diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java index cbf06926..a05a258b 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/StructurallyEquivalent.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2004, The JUNG Authors + * Copyright (c) 2004, The JUNG Authors * * All rights reserved. * Created on Jan 28, 2004 @@ -10,6 +10,9 @@ */ package edu.uci.ics.jung.algorithms.blockmodel; +import com.google.common.base.Function; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.util.Pair; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -20,155 +23,143 @@ import java.util.Map; import java.util.Set; -import com.google.common.base.Function; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - /** - * Identifies sets of structurally equivalent vertices in a graph. Vertices - * i and j are structurally equivalent iff the set of i's - * neighbors is identical to the set of j's neighbors, with the - * exception of i and j themselves. This algorithm finds all - * sets of equivalent vertices in O(V^2) time. - * - *

You can extend this class to have a different definition of equivalence (by - * overriding isStructurallyEquivalent), and may give it hints for - * accelerating the process by overriding canPossiblyCompare. - * (For example, in a bipartite graph, canPossiblyCompare may - * return false for vertices in - * different partitions. This function should be fast.) - * + * Identifies sets of structurally equivalent vertices in a graph. Vertices i and j + * are structurally equivalent iff the set of i's neighbors is identical to the set of + * j's neighbors, with the exception of i and j themselves. This algorithm + * finds all sets of equivalent vertices in O(V^2) time. + * + *

You can extend this class to have a different definition of equivalence (by overriding + * isStructurallyEquivalent), and may give it hints for accelerating the process by + * overriding canPossiblyCompare. (For example, in a bipartite graph, + * canPossiblyCompare may return false for vertices in different partitions. + * This function should be fast.) + * * @author Danyel Fisher */ -public class StructurallyEquivalent implements Function, VertexPartition> -{ - public VertexPartition apply(Graph g) - { - Set> vertex_pairs = getEquivalentPairs(g); - - Set> rv = new HashSet>(); - Map> intermediate = new HashMap>(); - for (Pair p : vertex_pairs) - { - Set res = intermediate.get(p.getFirst()); - if (res == null) - res = intermediate.get(p.getSecond()); - if (res == null) // we haven't seen this one before - res = new HashSet(); - res.add(p.getFirst()); - res.add(p.getSecond()); - intermediate.put(p.getFirst(), res); - intermediate.put(p.getSecond(), res); +public class StructurallyEquivalent implements Function, VertexPartition> { + public VertexPartition apply(Graph g) { + Set> vertex_pairs = getEquivalentPairs(g); + + Set> rv = new HashSet>(); + Map> intermediate = new HashMap>(); + for (Pair p : vertex_pairs) { + Set res = intermediate.get(p.getFirst()); + if (res == null) res = intermediate.get(p.getSecond()); + if (res == null) // we haven't seen this one before + res = new HashSet(); + res.add(p.getFirst()); + res.add(p.getSecond()); + intermediate.put(p.getFirst(), res); + intermediate.put(p.getSecond(), res); + } + rv.addAll(intermediate.values()); + + // pick up the vertices which don't appear in intermediate; they are + // singletons (equivalence classes of size 1) + Collection singletons = new ArrayList(g.getVertices()); + singletons.removeAll(intermediate.keySet()); + for (V v : singletons) { + Set v_set = Collections.singleton(v); + intermediate.put(v, v_set); + rv.add(v_set); + } + + return new VertexPartition(g, intermediate, rv); + } + + /** + * For each vertex pair v, v1 in G, checks whether v and v1 are fully equivalent: meaning that + * they connect to the exact same vertices. (Is this regular equivalence, or whathaveyou?) + * + * @param g the graph whose equivalent pairs are to be generated + * @return a Set of Pairs of vertices, where all the vertices in the inner Pairs are equivalent. + */ + protected Set> getEquivalentPairs(Graph g) { + + Set> rv = new HashSet>(); + Set alreadyEquivalent = new HashSet(); + + List l = new ArrayList(g.getVertices()); + + for (V v1 : l) { + if (alreadyEquivalent.contains(v1)) { + continue; + } + + for (Iterator iterator = l.listIterator(l.indexOf(v1) + 1); iterator.hasNext(); ) { + V v2 = iterator.next(); + + if (alreadyEquivalent.contains(v2)) { + continue; } - rv.addAll(intermediate.values()); - - // pick up the vertices which don't appear in intermediate; they are - // singletons (equivalence classes of size 1) - Collection singletons = new ArrayList(g.getVertices()); - singletons.removeAll(intermediate.keySet()); - for (V v : singletons) - { - Set v_set = Collections.singleton(v); - intermediate.put(v, v_set); - rv.add(v_set); + + if (!canBeEquivalent(v1, v2)) { + continue; } - return new VertexPartition(g, intermediate, rv); - } - - /** - * For each vertex pair v, v1 in G, checks whether v and v1 are fully - * equivalent: meaning that they connect to the exact same vertices. (Is - * this regular equivalence, or whathaveyou?) - * - * @param g the graph whose equivalent pairs are to be generated - * @return a Set of Pairs of vertices, where all the vertices in the inner - * Pairs are equivalent. - */ - protected Set> getEquivalentPairs(Graph g) { - - Set> rv = new HashSet>(); - Set alreadyEquivalent = new HashSet(); - - List l = new ArrayList(g.getVertices()); - - for (V v1 : l) - { - if (alreadyEquivalent.contains(v1)) - continue; - - for (Iterator iterator = l.listIterator(l.indexOf(v1) + 1); iterator.hasNext();) { - V v2 = iterator.next(); - - if (alreadyEquivalent.contains(v2)) - continue; - - if (!canBeEquivalent(v1, v2)) - continue; - - if (isStructurallyEquivalent(g, v1, v2)) { - Pair p = new Pair(v1, v2); - alreadyEquivalent.add(v2); - rv.add(p); - } - } - } - - return rv; - } - - /** - * @param g the graph in which the structural equivalence comparison is to take place - * @param v1 the vertex to check for structural equivalence to v2 - * @param v2 the vertex to check for structural equivalence to v1 - * @return {@code true} if {@code v1}'s predecessors/successors are equal to - * {@code v2}'s predecessors/successors - */ - protected boolean isStructurallyEquivalent(Graph g, V v1, V v2) { - - if( g.degree(v1) != g.degree(v2)) { - return false; - } - - Set n1 = new HashSet(g.getPredecessors(v1)); - n1.remove(v2); - n1.remove(v1); - Set n2 = new HashSet(g.getPredecessors(v2)); - n2.remove(v1); - n2.remove(v2); - - Set o1 = new HashSet(g.getSuccessors(v1)); - Set o2 = new HashSet(g.getSuccessors(v2)); - o1.remove(v1); - o1.remove(v2); - o2.remove(v1); - o2.remove(v2); - - // this neglects self-loops and directed edges from 1 to other - boolean b = (n1.equals(n2) && o1.equals(o2)); - if (!b) - return b; - - // if there's a directed edge v1->v2 then there's a directed edge v2->v1 - b &= ( g.isSuccessor(v1, v2) == g.isSuccessor(v2, v1)); - - // self-loop check - b &= ( g.isSuccessor(v1, v1) == g.isSuccessor(v2, v2)); - - return b; - - } - - /** - * This is a space for optimizations. For example, for a bipartite graph, - * vertices from different partitions cannot possibly be equivalent. - * - * @param v1 the first vertex to compare - * @param v2 the second vertex to compare - * @return {@code true} if the vertices can be equivalent - */ - protected boolean canBeEquivalent(V v1, V v2) { - return true; - } + if (isStructurallyEquivalent(g, v1, v2)) { + Pair p = new Pair(v1, v2); + alreadyEquivalent.add(v2); + rv.add(p); + } + } + } + + return rv; + } + + /** + * @param g the graph in which the structural equivalence comparison is to take place + * @param v1 the vertex to check for structural equivalence to v2 + * @param v2 the vertex to check for structural equivalence to v1 + * @return {@code true} if {@code v1}'s predecessors/successors are equal to {@code v2}'s + * predecessors/successors + */ + protected boolean isStructurallyEquivalent(Graph g, V v1, V v2) { + + if (g.degree(v1) != g.degree(v2)) { + return false; + } + + Set n1 = new HashSet(g.getPredecessors(v1)); + n1.remove(v2); + n1.remove(v1); + Set n2 = new HashSet(g.getPredecessors(v2)); + n2.remove(v1); + n2.remove(v2); + + Set o1 = new HashSet(g.getSuccessors(v1)); + Set o2 = new HashSet(g.getSuccessors(v2)); + o1.remove(v1); + o1.remove(v2); + o2.remove(v1); + o2.remove(v2); + + // this neglects self-loops and directed edges from 1 to other + boolean b = (n1.equals(n2) && o1.equals(o2)); + if (!b) { + return b; + } + + // if there's a directed edge v1->v2 then there's a directed edge v2->v1 + b &= (g.isSuccessor(v1, v2) == g.isSuccessor(v2, v1)); + + // self-loop check + b &= (g.isSuccessor(v1, v1) == g.isSuccessor(v2, v2)); + + return b; + } + + /** + * This is a space for optimizations. For example, for a bipartite graph, vertices from different + * partitions cannot possibly be equivalent. + * + * @param v1 the first vertex to compare + * @param v2 the second vertex to compare + * @return {@code true} if the vertices can be equivalent + */ + protected boolean canBeEquivalent(V v1, V v2) { + return true; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java index dea478b2..5a8f2f01 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/blockmodel/VertexPartition.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -12,120 +12,106 @@ */ package edu.uci.ics.jung.algorithms.blockmodel; -import java.util.*; - import edu.uci.ics.jung.graph.Graph; - +import java.util.*; /** - * Maintains information about a vertex partition of a graph. - * This can be built from a map from vertices to vertex sets - * or from a collection of (disjoint) vertex sets, - * such as those created by various clustering methods. + * Maintains information about a vertex partition of a graph. This can be built from a map from + * vertices to vertex sets or from a collection of (disjoint) vertex sets, such as those created by + * various clustering methods. */ -public class VertexPartition -{ - private Map> vertex_partition_map; - private Collection> vertex_sets; - private Graph graph; - - /** - * Creates an instance based on the specified graph and mapping from vertices - * to vertex sets, and generates a set of partitions based on this mapping. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - */ - public VertexPartition(Graph g, Map> partition_map) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.graph = g; - } +public class VertexPartition { + private Map> vertex_partition_map; + private Collection> vertex_sets; + private Graph graph; + + /** + * Creates an instance based on the specified graph and mapping from vertices to vertex sets, and + * generates a set of partitions based on this mapping. + * + * @param g the graph over which the vertex partition is defined + * @param partition_map the mapping from vertices to vertex sets (partitions) + */ + public VertexPartition(Graph g, Map> partition_map) { + this.vertex_partition_map = Collections.unmodifiableMap(partition_map); + this.graph = g; + } + + /** + * Creates an instance based on the specified graph, vertex-set mapping, and set of disjoint + * vertex sets. The vertex-set mapping and vertex partitions must be consistent; that is, the + * mapping must reflect the division of vertices into partitions, and each vertex must appear in + * exactly one partition. + * + * @param g the graph over which the vertex partition is defined + * @param partition_map the mapping from vertices to vertex sets (partitions) + * @param vertex_sets the set of disjoint vertex sets + */ + public VertexPartition( + Graph g, Map> partition_map, Collection> vertex_sets) { + this.vertex_partition_map = Collections.unmodifiableMap(partition_map); + this.vertex_sets = vertex_sets; + this.graph = g; + } + + /** + * Creates an instance based on the specified graph and set of disjoint vertex sets, and generates + * a vertex-to-partition map based on these sets. + * + * @param g the graph over which the vertex partition is defined + * @param vertex_sets the set of disjoint vertex sets + */ + public VertexPartition(Graph g, Collection> vertex_sets) { + this.vertex_sets = vertex_sets; + this.graph = g; + } + + /** + * Returns the graph on which the partition is defined. + * + * @return the graph on which the partition is defined + */ + public Graph getGraph() { + return graph; + } - /** - * Creates an instance based on the specified graph, vertex-set mapping, - * and set of disjoint vertex sets. The vertex-set mapping and vertex - * partitions must be consistent; that is, the mapping must reflect the - * division of vertices into partitions, and each vertex must appear in - * exactly one partition. - * @param g the graph over which the vertex partition is defined - * @param partition_map the mapping from vertices to vertex sets (partitions) - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Map> partition_map, - Collection> vertex_sets) - { - this.vertex_partition_map = Collections.unmodifiableMap(partition_map); - this.vertex_sets = vertex_sets; - this.graph = g; + /** + * Returns a map from each vertex in the input graph to its partition. This map is generated if it + * does not already exist. + * + * @return a map from each vertex in the input graph to a vertex set + */ + public Map> getVertexToPartitionMap() { + if (vertex_partition_map == null) { + this.vertex_partition_map = new HashMap>(); + for (Set set : this.vertex_sets) for (V v : set) this.vertex_partition_map.put(v, set); } + return vertex_partition_map; + } - /** - * Creates an instance based on the specified graph and set of disjoint vertex sets, - * and generates a vertex-to-partition map based on these sets. - * @param g the graph over which the vertex partition is defined - * @param vertex_sets the set of disjoint vertex sets - */ - public VertexPartition(Graph g, Collection> vertex_sets) - { - this.vertex_sets = vertex_sets; - this.graph = g; + /** + * Returns a collection of vertex sets, where each vertex in the input graph is in exactly one + * set. This collection is generated based on the vertex-to-partition map if it does not already + * exist. + * + * @return a collection of vertex sets such that each vertex in the instance's graph is in exactly + * one set + */ + public Collection> getVertexPartitions() { + if (vertex_sets == null) { + this.vertex_sets = new HashSet>(); + this.vertex_sets.addAll(vertex_partition_map.values()); } - - /** - * Returns the graph on which the partition is defined. - * @return the graph on which the partition is defined - */ - public Graph getGraph() - { - return graph; - } + return vertex_sets; + } - /** - * Returns a map from each vertex in the input graph to its partition. - * This map is generated if it does not already exist. - * @return a map from each vertex in the input graph to a vertex set - */ - public Map> getVertexToPartitionMap() - { - if (vertex_partition_map == null) - { - this.vertex_partition_map = new HashMap>(); - for (Set set : this.vertex_sets) - for (V v : set) - this.vertex_partition_map.put(v, set); - } - return vertex_partition_map; - } - - /** - * Returns a collection of vertex sets, where each vertex in the - * input graph is in exactly one set. - * This collection is generated based on the vertex-to-partition map - * if it does not already exist. - * @return a collection of vertex sets such that each vertex in the - * instance's graph is in exactly one set - */ - public Collection> getVertexPartitions() - { - if (vertex_sets == null) - { - this.vertex_sets = new HashSet>(); - this.vertex_sets.addAll(vertex_partition_map.values()); - } - return vertex_sets; - } + /** @return the number of partitions. */ + public int numPartitions() { + return vertex_sets.size(); + } - /** - * @return the number of partitions. - */ - public int numPartitions() - { - return vertex_sets.size(); - } - - @Override - public String toString() - { - return "Partitions: " + vertex_partition_map; - } + @Override + public String toString() { + return "Partitions: " + vertex_partition_map; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java index 359f1005..baf64578 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/BicomponentClusterer.java @@ -1,14 +1,16 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.cluster; +import com.google.common.base.Function; +import edu.uci.ics.jung.graph.UndirectedGraph; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; @@ -16,150 +18,127 @@ import java.util.Set; import java.util.Stack; -import com.google.common.base.Function; - -import edu.uci.ics.jung.graph.UndirectedGraph; - /** - * Finds all biconnected components (bicomponents) of an undirected graph. - * A graph is a biconnected component if - * at least 2 vertices must be removed in order to disconnect the graph. (Graphs - * consisting of one vertex, or of two connected vertices, are also biconnected.) Biconnected - * components of three or more vertices have the property that every pair of vertices in the component - * are connected by two or more vertex-disjoint paths. - *

- * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges + * Finds all biconnected components (bicomponents) of an undirected graph. A graph is a biconnected + * component if at least 2 vertices must be removed in order to disconnect the graph. (Graphs + * consisting of one vertex, or of two connected vertices, are also biconnected.) Biconnected + * components of three or more vertices have the property that every pair of vertices in the + * component are connected by two or more vertex-disjoint paths. + * + *

Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges + * * @see "Depth first search and linear graph algorithms by R. E. Tarjan (1972), SIAM J. Comp." - * * @author Joshua O'Madadhain */ -public class BicomponentClusterer implements Function, Set>> -{ - protected Map dfs_num; - protected Map high; - protected Map parents; - protected Stack stack; - protected int converse_depth; +public class BicomponentClusterer implements Function, Set>> { + protected Map dfs_num; + protected Map high; + protected Map parents; + protected Stack stack; + protected int converse_depth; - /** - * Constructs a new bicomponent finder - */ - public BicomponentClusterer() { - } + /** Constructs a new bicomponent finder */ + public BicomponentClusterer() {} - /** - * Extracts the bicomponents from the graph. - * @param theGraph the graph whose bicomponents are to be extracted - * @return the ClusterSet of bicomponents - */ - public Set> apply(UndirectedGraph theGraph) - { - Set> bicomponents = new LinkedHashSet>(); + /** + * Extracts the bicomponents from the graph. + * + * @param theGraph the graph whose bicomponents are to be extracted + * @return the ClusterSet of bicomponents + */ + public Set> apply(UndirectedGraph theGraph) { + Set> bicomponents = new LinkedHashSet>(); - if (theGraph.getVertices().isEmpty()) - return bicomponents; + if (theGraph.getVertices().isEmpty()) { + return bicomponents; + } - // initialize DFS number for each vertex to 0 - dfs_num = new HashMap(); - for (V v : theGraph.getVertices()) - { - dfs_num.put(v, 0); - } + // initialize DFS number for each vertex to 0 + dfs_num = new HashMap(); + for (V v : theGraph.getVertices()) { + dfs_num.put(v, 0); + } + + for (V v : theGraph.getVertices()) { + if (dfs_num.get(v).intValue() == 0) // if we haven't hit this vertex yet... + { + high = new HashMap(); + stack = new Stack(); + parents = new HashMap(); + converse_depth = theGraph.getVertexCount(); + // find the biconnected components for this subgraph, starting from v + findBiconnectedComponents(theGraph, v, bicomponents); - for (V v : theGraph.getVertices()) - { - if (dfs_num.get(v).intValue() == 0) // if we haven't hit this vertex yet... - { - high = new HashMap(); - stack = new Stack(); - parents = new HashMap(); - converse_depth = theGraph.getVertexCount(); - // find the biconnected components for this subgraph, starting from v - findBiconnectedComponents(theGraph, v, bicomponents); - - // if we only visited one vertex, this method won't have - // ID'd it as a biconnected component, so mark it as one - if (theGraph.getVertexCount() - converse_depth == 1) - { - Set s = new HashSet(); - s.add(v); - bicomponents.add(s); - } - } + // if we only visited one vertex, this method won't have + // ID'd it as a biconnected component, so mark it as one + if (theGraph.getVertexCount() - converse_depth == 1) { + Set s = new HashSet(); + s.add(v); + bicomponents.add(s); } - - return bicomponents; + } } - /** - *

Stores, in bicomponents, all the biconnected - * components that are reachable from v. - * - *

The algorithm basically proceeds as follows: do a depth-first - * traversal starting from v, marking each vertex with - * a value that indicates the order in which it was encountered (dfs_num), - * and with - * a value that indicates the highest point in the DFS tree that is known - * to be reachable from this vertex using non-DFS edges (high). (Since it - * is measured on non-DFS edges, "high" tells you how far back in the DFS - * tree you can reach by two distinct paths, hence biconnectivity.) - * Each time a new vertex w is encountered, push the edge just traversed - * on a stack, and call this method recursively. If w.high is no greater than - * v.dfs_num, then the contents of the stack down to (v,w) is a - * biconnected component (and v is an articulation point, that is, a - * component boundary). In either case, set v.high to max(v.high, w.high), - * and continue. If w has already been encountered but is - * not v's parent, set v.high max(v.high, w.dfs_num) and continue. - * - *

(In case anyone cares, the version of this algorithm on p. 224 of - * Udi Manber's "Introduction to Algorithms: A Creative Approach" seems to be - * wrong: the stack should be initialized outside this method, - * (v,w) should only be put on the stack if w hasn't been seen already, - * and there's no real benefit to putting v on the stack separately: just - * check for (v,w) on the stack rather than v. Had I known this, I could - * have saved myself a few days. JRTOM) - * - * @param g the graph to check for biconnected components - * @param v the starting place for searching for biconnected components - * @param bicomponents storage for the biconnected components found by this algorithm - */ - protected void findBiconnectedComponents(UndirectedGraph g, V v, Set> bicomponents) - { - int v_dfs_num = converse_depth; - dfs_num.put(v, v_dfs_num); - converse_depth--; - high.put(v, v_dfs_num); + return bicomponents; + } + + /** + * Stores, in bicomponents, all the biconnected components that are reachable from + * v. + * + *

The algorithm basically proceeds as follows: do a depth-first traversal starting from + * v, marking each vertex with a value that indicates the order in which it was encountered + * (dfs_num), and with a value that indicates the highest point in the DFS tree that is known to + * be reachable from this vertex using non-DFS edges (high). (Since it is measured on non-DFS + * edges, "high" tells you how far back in the DFS tree you can reach by two distinct paths, hence + * biconnectivity.) Each time a new vertex w is encountered, push the edge just traversed on a + * stack, and call this method recursively. If w.high is no greater than v.dfs_num, then the + * contents of the stack down to (v,w) is a biconnected component (and v is an articulation point, + * that is, a component boundary). In either case, set v.high to max(v.high, w.high), and + * continue. If w has already been encountered but is not v's parent, set v.high max(v.high, + * w.dfs_num) and continue. + * + *

(In case anyone cares, the version of this algorithm on p. 224 of Udi Manber's "Introduction + * to Algorithms: A Creative Approach" seems to be wrong: the stack should be initialized outside + * this method, (v,w) should only be put on the stack if w hasn't been seen already, and there's + * no real benefit to putting v on the stack separately: just check for (v,w) on the stack rather + * than v. Had I known this, I could have saved myself a few days. JRTOM) + * + * @param g the graph to check for biconnected components + * @param v the starting place for searching for biconnected components + * @param bicomponents storage for the biconnected components found by this algorithm + */ + protected void findBiconnectedComponents(UndirectedGraph g, V v, Set> bicomponents) { + int v_dfs_num = converse_depth; + dfs_num.put(v, v_dfs_num); + converse_depth--; + high.put(v, v_dfs_num); - for (V w : g.getNeighbors(v)) - { - int w_dfs_num = dfs_num.get(w).intValue();//get(w, dfs_num); - E vw = g.findEdge(v,w); - if (w_dfs_num == 0) // w hasn't yet been visited - { - parents.put(w, v); // v is w's parent in the DFS tree - stack.push(vw); - findBiconnectedComponents(g, w, bicomponents); - int w_high = high.get(w).intValue();//get(w, high); - if (w_high <= v_dfs_num) - { - // v disconnects w from the rest of the graph, - // i.e., v is an articulation point - // thus, everything between the top of the stack and - // v is part of a single biconnected component - Set bicomponent = new HashSet(); - E e; - do - { - e = stack.pop(); - bicomponent.addAll(g.getIncidentVertices(e)); - } - while (e != vw); - bicomponents.add(bicomponent); - } - high.put(v, Math.max(w_high, high.get(v).intValue())); - } - else if (w != parents.get(v)) // (v,w) is a back or a forward edge - high.put(v, Math.max(w_dfs_num, high.get(v).intValue())); + for (V w : g.getNeighbors(v)) { + int w_dfs_num = dfs_num.get(w).intValue(); //get(w, dfs_num); + E vw = g.findEdge(v, w); + if (w_dfs_num == 0) // w hasn't yet been visited + { + parents.put(w, v); // v is w's parent in the DFS tree + stack.push(vw); + findBiconnectedComponents(g, w, bicomponents); + int w_high = high.get(w).intValue(); //get(w, high); + if (w_high <= v_dfs_num) { + // v disconnects w from the rest of the graph, + // i.e., v is an articulation point + // thus, everything between the top of the stack and + // v is part of a single biconnected component + Set bicomponent = new HashSet(); + E e; + do { + e = stack.pop(); + bicomponent.addAll(g.getIncidentVertices(e)); + } while (e != vw); + bicomponents.add(bicomponent); } + high.put(v, Math.max(w_high, high.get(v).intValue())); + } else if (w != parents.get(v)) // (v,w) is a back or a forward edge + high.put(v, Math.max(w_dfs_num, high.get(v).intValue())); } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java index 80e30c0d..54f93820 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/EdgeBetweennessClusterer.java @@ -1,109 +1,105 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.cluster; +import com.google.common.base.Function; +import edu.uci.ics.jung.algorithms.scoring.BetweennessCentrality; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.util.Pair; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import com.google.common.base.Function; - -import edu.uci.ics.jung.algorithms.scoring.BetweennessCentrality; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - - /** * An algorithm for computing clusters (community structure) in graphs based on edge betweenness. - * The betweenness of an edge is defined as the extent to which that edge lies along - * shortest paths between all pairs of nodes. + * The betweenness of an edge is defined as the extent to which that edge lies along shortest paths + * between all pairs of nodes. + * + *

This algorithm works by iteratively following the 2 step process: * - * This algorithm works by iteratively following the 2 step process: *

- *

- * Running time is: O(kmn) where k is the number of edges to remove, m is the total number of edges, and - * n is the total number of vertices. For very sparse graphs the running time is closer to O(kn^2) and for - * graphs with strong community structure, the complexity is even lower. - *

- * This algorithm is a slight modification of the algorithm discussed below in that the number of edges - * to be removed is parameterized. + * + *

Running time is: O(kmn) where k is the number of edges to remove, m is the total number of + * edges, and n is the total number of vertices. For very sparse graphs the running time is closer + * to O(kn^2) and for graphs with strong community structure, the complexity is even lower. + * + *

This algorithm is a slight modification of the algorithm discussed below in that the number of + * edges to be removed is parameterized. + * * @author Scott White * @author Tom Nelson (converted to jung2) * @see "Community structure in social and biological networks by Michelle Girvan and Mark Newman" */ -public class EdgeBetweennessClusterer implements Function,Set>> { - private int mNumEdgesToRemove; - private Map> edges_removed; +public class EdgeBetweennessClusterer implements Function, Set>> { + private int mNumEdgesToRemove; + private Map> edges_removed; - /** - * Constructs a new clusterer for the specified graph. - * @param numEdgesToRemove the number of edges to be progressively removed from the graph - */ - public EdgeBetweennessClusterer(int numEdgesToRemove) { - mNumEdgesToRemove = numEdgesToRemove; - edges_removed = new LinkedHashMap>(); - } + /** + * Constructs a new clusterer for the specified graph. + * + * @param numEdgesToRemove the number of edges to be progressively removed from the graph + */ + public EdgeBetweennessClusterer(int numEdgesToRemove) { + mNumEdgesToRemove = numEdgesToRemove; + edges_removed = new LinkedHashMap>(); + } - /** - * Finds the set of clusters which have the strongest "community structure". - * The more edges removed the smaller and more cohesive the clusters. - * @param graph the graph - */ - public Set> apply(Graph graph) { - - if (mNumEdgesToRemove < 0 || mNumEdgesToRemove > graph.getEdgeCount()) { - throw new IllegalArgumentException("Invalid number of edges passed in."); - } - - edges_removed.clear(); + /** + * Finds the set of clusters which have the strongest "community structure". The more edges + * removed the smaller and more cohesive the clusters. + * + * @param graph the graph + */ + public Set> apply(Graph graph) { - for (int k=0;k bc = new BetweennessCentrality(graph); - E to_remove = null; - double score = 0; - for (E e : graph.getEdges()) - if (bc.getEdgeScore(e) > score) - { - to_remove = e; - score = bc.getEdgeScore(e); - } - edges_removed.put(to_remove, graph.getEndpoints(to_remove)); - graph.removeEdge(to_remove); - } + if (mNumEdgesToRemove < 0 || mNumEdgesToRemove > graph.getEdgeCount()) { + throw new IllegalArgumentException("Invalid number of edges passed in."); + } - WeakComponentClusterer wcSearch = new WeakComponentClusterer(); - Set> clusterSet = wcSearch.apply(graph); + edges_removed.clear(); - for (Map.Entry> entry : edges_removed.entrySet()) - { - Pair endpoints = entry.getValue(); - graph.addEdge(entry.getKey(), endpoints.getFirst(), endpoints.getSecond()); + for (int k = 0; k < mNumEdgesToRemove; k++) { + BetweennessCentrality bc = new BetweennessCentrality(graph); + E to_remove = null; + double score = 0; + for (E e : graph.getEdges()) + if (bc.getEdgeScore(e) > score) { + to_remove = e; + score = bc.getEdgeScore(e); } - return clusterSet; + edges_removed.put(to_remove, graph.getEndpoints(to_remove)); + graph.removeEdge(to_remove); } - /** - * Retrieves the list of all edges that were removed - * (assuming extract(...) was previously called). - * The edges returned - * are stored in order in which they were removed. - * - * @return the edges in the original graph - */ - public List getEdgesRemoved() - { - return new ArrayList(edges_removed.keySet()); + WeakComponentClusterer wcSearch = new WeakComponentClusterer(); + Set> clusterSet = wcSearch.apply(graph); + + for (Map.Entry> entry : edges_removed.entrySet()) { + Pair endpoints = entry.getValue(); + graph.addEdge(entry.getKey(), endpoints.getFirst(), endpoints.getSecond()); } + return clusterSet; + } + + /** + * Retrieves the list of all edges that were removed (assuming extract(...) was previously + * called). The edges returned are stored in order in which they were removed. + * + * @return the edges in the original graph + */ + public List getEdgesRemoved() { + return new ArrayList(edges_removed.keySet()); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java index cc2abf8b..41b4789b 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/VoltageClusterer.java @@ -16,7 +16,6 @@ import edu.uci.ics.jung.algorithms.util.KMeansClusterer; import edu.uci.ics.jung.algorithms.util.KMeansClusterer.NotEnoughClustersException; import edu.uci.ics.jung.graph.Graph; - import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -31,340 +30,293 @@ import java.util.Set; /** - *

Clusters vertices of a Graph based on their ranks as - * calculated by VoltageScorer. This algorithm is based on, - * but not identical with, the method described in the paper below. - * The primary difference is that Wu and Huberman assume a priori that the clusters - * are of approximately the same size, and therefore use a more complex - * method than k-means (which is used here) for determining cluster - * membership based on co-occurrence data. + * Clusters vertices of a Graph based on their ranks as calculated by + * VoltageScorer. This algorithm is based on, but not identical with, the method described in + * the paper below. The primary difference is that Wu and Huberman assume a priori that the clusters + * are of approximately the same size, and therefore use a more complex method than k-means (which + * is used here) for determining cluster membership based on co-occurrence data. * *

The algorithm proceeds as follows: + * *

* - *

NOTE: Depending on how the co-occurrence data splits the data into - * clusters, the number of clusters returned by this algorithm may be less than the - * number of clusters requested. The number of clusters will never be more than - * the number requested, however. + *

NOTE: Depending on how the co-occurrence data splits the data into clusters, the number + * of clusters returned by this algorithm may be less than the number of clusters requested. The + * number of clusters will never be more than the number requested, however. * * @author Joshua O'Madadhain - * @see "'Finding communities in linear time: a physics approach', Fang Wu and Bernardo Huberman, http://www.hpl.hp.com/research/idl/papers/linear/" + * @see "'Finding communities in linear time: a physics approach', Fang Wu and Bernardo Huberman, + * http://www.hpl.hp.com/research/idl/papers/linear/" * @see VoltageScorer * @see KMeansClusterer */ -public class VoltageClusterer -{ - protected int num_candidates; - protected KMeansClusterer kmc; - protected Random rand; - protected Graph g; - - /** - * Creates an instance of a VoltageCluster with the specified parameters. - * These are mostly parameters that are passed directly to VoltageScorer - * and KMeansClusterer. - * - * @param g the graph whose vertices are to be clustered - * @param num_candidates the number of candidate clusters to create - */ - public VoltageClusterer(Graph g, int num_candidates) - { - if (num_candidates < 1) - throw new IllegalArgumentException("must generate >=1 candidates"); - - this.num_candidates = num_candidates; - this.kmc = new KMeansClusterer(); - rand = new Random(); - this.g = g; - } - - protected void setRandomSeed(int random_seed) - { - rand = new Random(random_seed); - } - - /** - * @param v the vertex whose community we wish to discover - * @return a community (cluster) centered around v. - */ - public Collection> getCommunity(V v) - { - return cluster_internal(v, 2); +public class VoltageClusterer { + protected int num_candidates; + protected KMeansClusterer kmc; + protected Random rand; + protected Graph g; + + /** + * Creates an instance of a VoltageCluster with the specified parameters. These are mostly + * parameters that are passed directly to VoltageScorer and KMeansClusterer. + * + * @param g the graph whose vertices are to be clustered + * @param num_candidates the number of candidate clusters to create + */ + public VoltageClusterer(Graph g, int num_candidates) { + if (num_candidates < 1) throw new IllegalArgumentException("must generate >=1 candidates"); + + this.num_candidates = num_candidates; + this.kmc = new KMeansClusterer(); + rand = new Random(); + this.g = g; + } + + protected void setRandomSeed(int random_seed) { + rand = new Random(random_seed); + } + + /** + * @param v the vertex whose community we wish to discover + * @return a community (cluster) centered around v. + */ + public Collection> getCommunity(V v) { + return cluster_internal(v, 2); + } + + /** + * Clusters the vertices of g into num_clusters clusters, based on their + * connectivity. + * + * @param num_clusters the number of clusters to identify + * @return a collection of clusters (sets of vertices) + */ + public Collection> cluster(int num_clusters) { + return cluster_internal(null, num_clusters); + } + + /** + * Does the work of getCommunity and cluster. + * + * @param origin the vertex around which clustering is to be done + * @param num_clusters the (maximum) number of clusters to find + * @return a collection of clusters (sets of vertices) + */ + protected Collection> cluster_internal(V origin, int num_clusters) { + // generate candidate clusters + // repeat the following 'samples' times: + // * pick (widely separated) vertex pair, run VoltageScorer + // * use k-means to identify 2 communities in ranked graph + // * store resulting candidate communities + ArrayList v_array = new ArrayList(g.getVertices()); + + LinkedList> candidates = new LinkedList>(); + + for (int j = 0; j < num_candidates; j++) { + V source; + if (origin == null) source = v_array.get((int) (rand.nextDouble() * v_array.size())); + else source = origin; + V target = null; + do { + target = v_array.get((int) (rand.nextDouble() * v_array.size())); + } while (source == target); + VoltageScorer vs = new VoltageScorer(g, source, target); + vs.evaluate(); + + Map voltage_ranks = new HashMap(); + for (V v : g.getVertices()) voltage_ranks.put(v, new double[] {vs.getVertexScore(v)}); + + // addOneCandidateCluster(candidates, voltage_ranks); + addTwoCandidateClusters(candidates, voltage_ranks); } - /** - * Clusters the vertices of g into - * num_clusters clusters, based on their connectivity. - * @param num_clusters the number of clusters to identify - * @return a collection of clusters (sets of vertices) - */ - public Collection> cluster(int num_clusters) - { - return cluster_internal(null, num_clusters); + // repeat the following k-1 times: + // * pick a vertex v as a cluster seed + // (Wu/Huberman: most frequent vertex in candidates) + // * calculate co-occurrence (in candidate clusters) + // of this vertex with all others + // * use k-means to separate co-occurrence counts into high/low; + // high vertices are a cluster + // * remove v's vertices from candidate clusters + + Collection> clusters = new LinkedList>(); + Set remaining = new HashSet(g.getVertices()); + + List seed_candidates = getSeedCandidates(candidates); + int seed_index = 0; + + for (int j = 0; j < (num_clusters - 1); j++) { + if (remaining.isEmpty()) { + break; + } + + V seed; + if (seed_index == 0 && origin != null) { + seed = origin; + } else { + do { + seed = seed_candidates.get(seed_index++); + } while (!remaining.contains(seed)); + } + + Map occur_counts = getObjectCounts(candidates, seed); + if (occur_counts.size() < 2) { + break; + } + + // now that we have the counts, cluster them... + try { + Collection> high_low = kmc.cluster(occur_counts, 2); + // ...get the cluster with the highest-valued centroid... + Iterator> h_iter = high_low.iterator(); + Map cluster1 = h_iter.next(); + Map cluster2 = h_iter.next(); + double[] centroid1 = DiscreteDistribution.mean(cluster1.values()); + double[] centroid2 = DiscreteDistribution.mean(cluster2.values()); + Set new_cluster; + if (centroid1[0] >= centroid2[0]) new_cluster = cluster1.keySet(); + else new_cluster = cluster2.keySet(); + + // ...remove the elements of new_cluster from each candidate... + for (Set cluster : candidates) cluster.removeAll(new_cluster); + clusters.add(new_cluster); + remaining.removeAll(new_cluster); + } catch (NotEnoughClustersException nece) { + // all remaining vertices are in the same cluster + break; + } } - /** - * Does the work of getCommunity and cluster. - * @param origin the vertex around which clustering is to be done - * @param num_clusters the (maximum) number of clusters to find - * @return a collection of clusters (sets of vertices) - */ - protected Collection> cluster_internal(V origin, int num_clusters) - { - // generate candidate clusters - // repeat the following 'samples' times: - // * pick (widely separated) vertex pair, run VoltageScorer - // * use k-means to identify 2 communities in ranked graph - // * store resulting candidate communities - ArrayList v_array = new ArrayList(g.getVertices()); - - LinkedList> candidates = new LinkedList>(); - - for (int j = 0; j < num_candidates; j++) - { - V source; - if (origin == null) - source = v_array.get((int)(rand.nextDouble() * v_array.size())); - else - source = origin; - V target = null; - do - { - target = v_array.get((int)(rand.nextDouble() * v_array.size())); - } - while (source == target); - VoltageScorer vs = new VoltageScorer(g, source, target); - vs.evaluate(); - - Map voltage_ranks = new HashMap(); - for (V v : g.getVertices()) - voltage_ranks.put(v, new double[] {vs.getVertexScore(v)}); - -// addOneCandidateCluster(candidates, voltage_ranks); - addTwoCandidateClusters(candidates, voltage_ranks); - } - - // repeat the following k-1 times: - // * pick a vertex v as a cluster seed - // (Wu/Huberman: most frequent vertex in candidates) - // * calculate co-occurrence (in candidate clusters) - // of this vertex with all others - // * use k-means to separate co-occurrence counts into high/low; - // high vertices are a cluster - // * remove v's vertices from candidate clusters - - Collection> clusters = new LinkedList>(); - Set remaining = new HashSet(g.getVertices()); - - List seed_candidates = getSeedCandidates(candidates); - int seed_index = 0; - - for (int j = 0; j < (num_clusters - 1); j++) - { - if (remaining.isEmpty()) - break; - - V seed; - if (seed_index == 0 && origin != null) - seed = origin; - else - { - do { seed = seed_candidates.get(seed_index++); } - while (!remaining.contains(seed)); - } - - Map occur_counts = getObjectCounts(candidates, seed); - if (occur_counts.size() < 2) - break; - - // now that we have the counts, cluster them... - try - { - Collection> high_low = kmc.cluster(occur_counts, 2); - // ...get the cluster with the highest-valued centroid... - Iterator> h_iter = high_low.iterator(); - Map cluster1 = h_iter.next(); - Map cluster2 = h_iter.next(); - double[] centroid1 = DiscreteDistribution.mean(cluster1.values()); - double[] centroid2 = DiscreteDistribution.mean(cluster2.values()); - Set new_cluster; - if (centroid1[0] >= centroid2[0]) - new_cluster = cluster1.keySet(); - else - new_cluster = cluster2.keySet(); - - // ...remove the elements of new_cluster from each candidate... - for (Set cluster : candidates) - cluster.removeAll(new_cluster); - clusters.add(new_cluster); - remaining.removeAll(new_cluster); - } - catch (NotEnoughClustersException nece) - { - // all remaining vertices are in the same cluster - break; - } - } - - // identify remaining vertices (if any) as a 'garbage' cluster - if (!remaining.isEmpty()) - clusters.add(remaining); - - return clusters; + // identify remaining vertices (if any) as a 'garbage' cluster + if (!remaining.isEmpty()) clusters.add(remaining); + + return clusters; + } + + /** + * Do k-means with three intervals and pick the smaller two clusters (presumed to be on the ends); + * this is closer to the Wu-Huberman method. + * + * @param candidates the list of clusters to populate + * @param voltage_ranks the voltage values for each vertex + */ + protected void addTwoCandidateClusters( + LinkedList> candidates, Map voltage_ranks) { + try { + List> clusters = + new ArrayList>(kmc.cluster(voltage_ranks, 3)); + boolean b01 = clusters.get(0).size() > clusters.get(1).size(); + boolean b02 = clusters.get(0).size() > clusters.get(2).size(); + boolean b12 = clusters.get(1).size() > clusters.get(2).size(); + if (b01 && b02) { + candidates.add(clusters.get(1).keySet()); + candidates.add(clusters.get(2).keySet()); + } else if (!b01 && b12) { + candidates.add(clusters.get(0).keySet()); + candidates.add(clusters.get(2).keySet()); + } else if (!b02 && !b12) { + candidates.add(clusters.get(0).keySet()); + candidates.add(clusters.get(1).keySet()); + } + } catch (NotEnoughClustersException e) { + // no valid candidates, continue } - - /** - * Do k-means with three intervals and pick the smaller two clusters - * (presumed to be on the ends); this is closer to the Wu-Huberman method. - * @param candidates the list of clusters to populate - * @param voltage_ranks the voltage values for each vertex - */ - protected void addTwoCandidateClusters(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters = new ArrayList>(kmc.cluster(voltage_ranks, 3)); - boolean b01 = clusters.get(0).size() > clusters.get(1).size(); - boolean b02 = clusters.get(0).size() > clusters.get(2).size(); - boolean b12 = clusters.get(1).size() > clusters.get(2).size(); - if (b01 && b02) - { - candidates.add(clusters.get(1).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b01 && b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(2).keySet()); - } - else if (!b02 && !b12) - { - candidates.add(clusters.get(0).keySet()); - candidates.add(clusters.get(1).keySet()); - } - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue - } + } + + /** + * alternative to addTwoCandidateClusters(): cluster vertices by voltages into 2 clusters. We only + * consider the smaller of the two clusters returned by k-means to be a 'true' cluster candidate; + * the other is a garbage cluster. + * + * @param candidates the list of clusters to populate + * @param voltage_ranks the voltage values for each vertex + */ + protected void addOneCandidateCluster( + LinkedList> candidates, Map voltage_ranks) { + try { + List> clusters; + clusters = new ArrayList>(kmc.cluster(voltage_ranks, 2)); + if (clusters.get(0).size() < clusters.get(1).size()) candidates.add(clusters.get(0).keySet()); + else candidates.add(clusters.get(1).keySet()); + } catch (NotEnoughClustersException e) { + // no valid candidates, continue } - - /** - * alternative to addTwoCandidateClusters(): cluster vertices by voltages into 2 clusters. - * We only consider the smaller of the two clusters returned - * by k-means to be a 'true' cluster candidate; the other is a garbage cluster. - * @param candidates the list of clusters to populate - * @param voltage_ranks the voltage values for each vertex - */ - protected void addOneCandidateCluster(LinkedList> candidates, - Map voltage_ranks) - { - try - { - List> clusters; - clusters = new ArrayList>(kmc.cluster(voltage_ranks, 2)); - if (clusters.get(0).size() < clusters.get(1).size()) - candidates.add(clusters.get(0).keySet()); - else - candidates.add(clusters.get(1).keySet()); - } - catch (NotEnoughClustersException e) - { - // no valid candidates, continue + } + + /** + * Returns a list of cluster seeds, ranked in decreasing order of number of appearances in the + * specified collection of candidate clusters. + * + * @param candidates the set of candidate clusters + * @return a set of cluster seeds + */ + protected List getSeedCandidates(Collection> candidates) { + final Map occur_counts = getObjectCounts(candidates, null); + + ArrayList occurrences = new ArrayList(occur_counts.keySet()); + Collections.sort(occurrences, new MapValueArrayComparator(occur_counts)); + + // System.out.println("occurrences: "); + for (int i = 0; i < occurrences.size(); i++) + System.out.println(occur_counts.get(occurrences.get(i))[0]); + + return occurrences; + } + + protected Map getObjectCounts(Collection> candidates, V seed) { + Map occur_counts = new HashMap(); + for (V v : g.getVertices()) occur_counts.put(v, new double[] {0}); + + for (Set candidate : candidates) { + if (seed == null) System.out.println(candidate.size()); + if (seed == null || candidate.contains(seed)) { + for (V element : candidate) { + double[] count = occur_counts.get(element); + count[0]++; } + } } - /** - * Returns a list of cluster seeds, ranked in decreasing order - * of number of appearances in the specified collection of candidate - * clusters. - * @param candidates the set of candidate clusters - * @return a set of cluster seeds - */ - protected List getSeedCandidates(Collection> candidates) - { - final Map occur_counts = getObjectCounts(candidates, null); - - ArrayList occurrences = new ArrayList(occur_counts.keySet()); - Collections.sort(occurrences, new MapValueArrayComparator(occur_counts)); - -// System.out.println("occurrences: "); - for (int i = 0; i < occurrences.size(); i++) - System.out.println(occur_counts.get(occurrences.get(i))[0]); - - return occurrences; + if (seed == null) { + System.out.println("occur_counts size: " + occur_counts.size()); + for (V v : occur_counts.keySet()) System.out.println(occur_counts.get(v)[0]); } - protected Map getObjectCounts(Collection> candidates, V seed) - { - Map occur_counts = new HashMap(); - for (V v : g.getVertices()) - occur_counts.put(v, new double[]{0}); - - for (Set candidate : candidates) - { - if (seed == null) - System.out.println(candidate.size()); - if (seed == null || candidate.contains(seed)) - { - for (V element : candidate) - { - double[] count = occur_counts.get(element); - count[0]++; - } - } - } + return occur_counts; + } - if (seed == null) - { - System.out.println("occur_counts size: " + occur_counts.size()); - for (V v : occur_counts.keySet()) - System.out.println(occur_counts.get(v)[0]); - } + protected class MapValueArrayComparator implements Comparator { + private Map map; - return occur_counts; + protected MapValueArrayComparator(Map map) { + this.map = map; } - protected class MapValueArrayComparator implements Comparator - { - private Map map; - - protected MapValueArrayComparator(Map map) - { - this.map = map; - } - - public int compare(V o1, V o2) - { - double[] count0 = map.get(o1); - double[] count1 = map.get(o2); - if (count0[0] < count1[0]) - return 1; - else if (count0[0] > count1[0]) - return -1; - return 0; - } - + public int compare(V o1, V o2) { + double[] count0 = map.get(o1); + double[] count1 = map.get(o2); + if (count0[0] < count1[0]) { + return 1; + } else if (count0[0] > count1[0]) { + return -1; + } + return 0; } - + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java index 310cfdac..024c04c3 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClusterer.java @@ -1,73 +1,70 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.cluster; +import com.google.common.base.Function; +import edu.uci.ics.jung.graph.Graph; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.Queue; import java.util.Set; -import com.google.common.base.Function; - -import edu.uci.ics.jung.graph.Graph; - - - /** - * Finds all weak components in a graph as sets of vertex sets. A weak component is defined as - * a maximal subgraph in which all pairs of vertices in the subgraph are reachable from one - * another in the underlying undirected subgraph. - *

This implementation identifies components as sets of vertex sets. - * To create the induced graphs from any or all of these vertex sets, - * see algorithms.filters.FilterUtils. - *

- * Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges. + * Finds all weak components in a graph as sets of vertex sets. A weak component is defined as a + * maximal subgraph in which all pairs of vertices in the subgraph are reachable from one another in + * the underlying undirected subgraph. + * + *

This implementation identifies components as sets of vertex sets. To create the induced graphs + * from any or all of these vertex sets, see algorithms.filters.FilterUtils. + * + *

Running time: O(|V| + |E|) where |V| is the number of vertices and |E| is the number of edges. + * * @author Scott White */ -public class WeakComponentClusterer implements Function, Set>> -{ - /** - * Extracts the weak components from a graph. - * @param graph the graph whose weak components are to be extracted - * @return the list of weak components - */ - public Set> apply(Graph graph) { +public class WeakComponentClusterer implements Function, Set>> { + /** + * Extracts the weak components from a graph. + * + * @param graph the graph whose weak components are to be extracted + * @return the list of weak components + */ + public Set> apply(Graph graph) { - Set> clusterSet = new HashSet>(); + Set> clusterSet = new HashSet>(); - HashSet unvisitedVertices = new HashSet(graph.getVertices()); + HashSet unvisitedVertices = new HashSet(graph.getVertices()); - while (!unvisitedVertices.isEmpty()) { - Set cluster = new HashSet(); - V root = unvisitedVertices.iterator().next(); - unvisitedVertices.remove(root); - cluster.add(root); + while (!unvisitedVertices.isEmpty()) { + Set cluster = new HashSet(); + V root = unvisitedVertices.iterator().next(); + unvisitedVertices.remove(root); + cluster.add(root); - Queue queue = new LinkedList(); - queue.add(root); + Queue queue = new LinkedList(); + queue.add(root); - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - Collection neighbors = graph.getNeighbors(currentVertex); + while (!queue.isEmpty()) { + V currentVertex = queue.remove(); + Collection neighbors = graph.getNeighbors(currentVertex); - for(V neighbor : neighbors) { - if (unvisitedVertices.contains(neighbor)) { - queue.add(neighbor); - unvisitedVertices.remove(neighbor); - cluster.add(neighbor); - } - } - } - clusterSet.add(cluster); + for (V neighbor : neighbors) { + if (unvisitedVertices.contains(neighbor)) { + queue.add(neighbor); + unvisitedVertices.remove(neighbor); + cluster.add(neighbor); + } } - return clusterSet; + } + clusterSet.add(cluster); } + return clusterSet; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java index 7c474c36..8b8609a4 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/EdgePredicateFilter.java @@ -1,7 +1,7 @@ /* * Created on May 19, 2008 * - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * * All rights reserved. * @@ -12,59 +12,45 @@ package edu.uci.ics.jung.algorithms.filters; import com.google.common.base.Predicate; - import edu.uci.ics.jung.graph.Graph; /** - * Transforms the input graph into one which contains only those edges - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). All vertices from the original graph - * are copied into the new graph (even if they are not incident to any - * edges in the new graph). - * + * Transforms the input graph into one which contains only those edges that pass the specified + * Predicate. The filtered graph is a copy of the original graph (same type, uses the + * same vertex and edge objects). All vertices from the original graph are copied into the new graph + * (even if they are not incident to any edges in the new graph). + * * @author Joshua O'Madadhain */ -public class EdgePredicateFilter implements Filter -{ - protected Predicate edge_pred; +public class EdgePredicateFilter implements Filter { + protected Predicate edge_pred; - /** - * Creates an instance based on the specified edge Predicate. - * @param edge_pred the predicate that specifies which edges to add to the filtered graph - */ - public EdgePredicateFilter(Predicate edge_pred) - { - this.edge_pred = edge_pred; + /** + * Creates an instance based on the specified edge Predicate. + * + * @param edge_pred the predicate that specifies which edges to add to the filtered graph + */ + public EdgePredicateFilter(Predicate edge_pred) { + this.edge_pred = edge_pred; + } + + @SuppressWarnings("unchecked") + public Graph apply(Graph g) { + Graph filtered; + try { + filtered = g.getClass().newInstance(); + } catch (InstantiationException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); } - - @SuppressWarnings("unchecked") - public Graph apply(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - for (V v : g.getVertices()) - filtered.addVertex(v); - - for (E e : g.getEdges()) - { - if (edge_pred.apply(e)) - filtered.addEdge(e, g.getIncidentVertices(e)); - } - - return filtered; + for (V v : g.getVertices()) filtered.addVertex(v); + + for (E e : g.getEdges()) { + if (edge_pred.apply(e)) filtered.addEdge(e, g.getIncidentVertices(e)); } + return filtered; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java index 48e5da0f..b88e5a40 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/Filter.java @@ -1,26 +1,22 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.filters; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Graph; - - /** - * An interface for classes that return a subset of the input Graph - * as a Graph. The Graph returned may be either a - * new graph or a view into an existing graph; the documentation for the filter - * must specify which. - * + * An interface for classes that return a subset of the input Graph as a Graph + * . The Graph returned may be either a new graph or a view into an existing + * graph; the documentation for the filter must specify which. + * * @author danyelf */ -public interface Filter extends Function, Graph>{ } +public interface Filter extends Function, Graph> {} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java index 671b8af0..207411b1 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/FilterUtils.java @@ -1,100 +1,79 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jun 7, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jun 7, 2008 */ package edu.uci.ics.jung.algorithms.filters; +import edu.uci.ics.jung.graph.Hypergraph; import java.util.ArrayList; import java.util.Collection; -import edu.uci.ics.jung.graph.Hypergraph; +/** Utility methods relating to filtering. */ +public class FilterUtils { + /** + * Creates the induced subgraph from graph whose vertex set is equal to + * vertices. The graph returned has vertices as its vertex set, and includes + * all edges from graph which are incident only to elements of vertices. + * + * @param the vertex type + * @param the edge type + * @param the graph type + * @param vertices the subset of graph's vertices around which the subgraph is to be + * constructed + * @param graph the graph whose subgraph is to be constructed + * @return the subgraph induced by vertices + * @throws IllegalArgumentException if any vertex in vertices is not in graph + * + */ + @SuppressWarnings("unchecked") + public static > G createInducedSubgraph( + Collection vertices, G graph) { + G subgraph = null; + try { + subgraph = (G) graph.getClass().newInstance(); -/** - * Utility methods relating to filtering. - */ -public class FilterUtils -{ - /** - * Creates the induced subgraph from graph whose vertex set - * is equal to vertices. The graph returned has - * vertices as its vertex set, and includes all edges from - * graph which are incident only to elements of - * vertices. - * - * @param the vertex type - * @param the edge type - * @param the graph type - * @param vertices the subset of graph's vertices around - * which the subgraph is to be constructed - * @param graph the graph whose subgraph is to be constructed - * @return the subgraph induced by vertices - * @throws IllegalArgumentException if any vertex in - * vertices is not in graph - */ - @SuppressWarnings("unchecked") - public static > G createInducedSubgraph(Collection - vertices, G graph) - { - G subgraph = null; - try - { - subgraph = (G)graph.getClass().newInstance(); - - for (V v : vertices) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + - " is not an element of " + graph); - subgraph.addVertex(v); - } + for (V v : vertices) { + if (!graph.containsVertex(v)) + throw new IllegalArgumentException("Vertex " + v + " is not an element of " + graph); + subgraph.addVertex(v); + } + + for (E e : graph.getEdges()) { + Collection incident = graph.getIncidentVertices(e); + if (vertices.containsAll(incident)) subgraph.addEdge(e, incident, graph.getEdgeType(e)); + } + } catch (InstantiationException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } + return subgraph; + } + + /** + * Creates the induced subgraphs of graph associated with each element of + * vertex_collections. Note that these vertex collections need not be disjoint. + * + * @param the vertex type + * @param the edge type + * @param the graph type + * @param vertex_collections the collections of vertex collections to be used to induce the + * subgraphs + * @param graph the graph whose subgraphs are to be created + * @return the induced subgraphs of graph associated with each element of + * vertex_collections + */ + public static > Collection createAllInducedSubgraphs( + Collection> vertex_collections, G graph) { + Collection subgraphs = new ArrayList(); + + for (Collection vertex_set : vertex_collections) + subgraphs.add(createInducedSubgraph(vertex_set, graph)); - for (E e : graph.getEdges()) - { - Collection incident = graph.getIncidentVertices(e); - if (vertices.containsAll(incident)) - subgraph.addEdge(e, incident, graph.getEdgeType(e)); - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - return subgraph; - } - - /** - * Creates the induced subgraphs of graph associated with each - * element of vertex_collections. - * Note that these vertex collections need not be disjoint. - * @param the vertex type - * @param the edge type - * @param the graph type - * @param vertex_collections the collections of vertex collections to be - * used to induce the subgraphs - * @param graph the graph whose subgraphs are to be created - * @return the induced subgraphs of graph associated with each - * element of vertex_collections - */ - public static > Collection - createAllInducedSubgraphs(Collection> - vertex_collections, G graph) - { - Collection subgraphs = new ArrayList(); - - for (Collection vertex_set : vertex_collections) - subgraphs.add(createInducedSubgraph(vertex_set, graph)); - - return subgraphs; - } + return subgraphs; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java index 9f189ab4..38af1124 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/KNeighborhoodFilter.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -12,131 +12,127 @@ * */ package edu.uci.ics.jung.algorithms.filters; + +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.util.Pair; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; -import edu.uci.ics.jung.algorithms.filters.Filter; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - /** - * A filter used to extract the k-neighborhood around one or more root node(s). - * The k-neighborhood is defined as the subgraph induced by the set of - * vertices that are k or fewer hops (unweighted shortest-path distance) - * away from the root node. - * + * A filter used to extract the k-neighborhood around one or more root node(s). The k-neighborhood + * is defined as the subgraph induced by the set of vertices that are k or fewer hops (unweighted + * shortest-path distance) away from the root node. + * * @author Danyel Fisher */ -public class KNeighborhoodFilter implements Filter { +public class KNeighborhoodFilter implements Filter { + + /** The type of edge to follow for defining the neighborhood. */ + public static enum EdgeType { + IN_OUT, + IN, + OUT + } + + private Set rootNodes; + private int radiusK; + private EdgeType edgeType; - /** - * The type of edge to follow for defining the neighborhood. - */ - public static enum EdgeType { IN_OUT, IN, OUT } - private Set rootNodes; - private int radiusK; - private EdgeType edgeType; - - /** - * Constructs a new instance of the filter. - * @param rootNodes the set of root nodes - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(Set rootNodes, int radiusK, EdgeType edgeType) { - this.rootNodes = rootNodes; - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs a new instance of the filter. - * @param rootNode the root node - * @param radiusK the neighborhood radius around the root set - * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges - */ - public KNeighborhoodFilter(V rootNode, int radiusK, EdgeType edgeType) { - this.rootNodes = new HashSet(); - this.rootNodes.add(rootNode); - this.radiusK = radiusK; - this.edgeType = edgeType; - } - - /** - * Constructs an unassembled graph containing the k-neighborhood around the root node(s). - */ - @SuppressWarnings("unchecked") - public Graph apply(Graph graph) { - // generate a Set of Vertices we want - // add all to the UG - int currentDepth = 0; - List currentVertices = new ArrayList(); - Set visitedVertices = new HashSet(); - Set visitedEdges = new HashSet(); - Set acceptedVertices = new HashSet(); - //Copy, mark, and add all the root nodes to the new subgraph - for (V currentRoot : rootNodes) { + /** + * Constructs a new instance of the filter. + * + * @param rootNodes the set of root nodes + * @param radiusK the neighborhood radius around the root set + * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges + */ + public KNeighborhoodFilter(Set rootNodes, int radiusK, EdgeType edgeType) { + this.rootNodes = rootNodes; + this.radiusK = radiusK; + this.edgeType = edgeType; + } - visitedVertices.add(currentRoot); - acceptedVertices.add(currentRoot); - currentVertices.add(currentRoot); - } - ArrayList newVertices = null; - //Use BFS to locate the neighborhood around the root nodes within distance k - while (currentDepth < radiusK) { - newVertices = new ArrayList(); - for (V currentVertex : currentVertices) { + /** + * Constructs a new instance of the filter. + * + * @param rootNode the root node + * @param radiusK the neighborhood radius around the root set + * @param edgeType 0 for in/out edges, 1 for in-edges, 2 for out-edges + */ + public KNeighborhoodFilter(V rootNode, int radiusK, EdgeType edgeType) { + this.rootNodes = new HashSet(); + this.rootNodes.add(rootNode); + this.radiusK = radiusK; + this.edgeType = edgeType; + } - Collection edges = null; - switch (edgeType) { - case IN_OUT : - edges = graph.getIncidentEdges(currentVertex); - break; - case IN : - edges = graph.getInEdges(currentVertex); - break; - case OUT : - edges = graph.getOutEdges(currentVertex); - break; - } - for (E currentEdge : edges) { + /** Constructs an unassembled graph containing the k-neighborhood around the root node(s). */ + @SuppressWarnings("unchecked") + public Graph apply(Graph graph) { + // generate a Set of Vertices we want + // add all to the UG + int currentDepth = 0; + List currentVertices = new ArrayList(); + Set visitedVertices = new HashSet(); + Set visitedEdges = new HashSet(); + Set acceptedVertices = new HashSet(); + //Copy, mark, and add all the root nodes to the new subgraph + for (V currentRoot : rootNodes) { + + visitedVertices.add(currentRoot); + acceptedVertices.add(currentRoot); + currentVertices.add(currentRoot); + } + ArrayList newVertices = null; + //Use BFS to locate the neighborhood around the root nodes within distance k + while (currentDepth < radiusK) { + newVertices = new ArrayList(); + for (V currentVertex : currentVertices) { + + Collection edges = null; + switch (edgeType) { + case IN_OUT: + edges = graph.getIncidentEdges(currentVertex); + break; + case IN: + edges = graph.getInEdges(currentVertex); + break; + case OUT: + edges = graph.getOutEdges(currentVertex); + break; + } + for (E currentEdge : edges) { - V currentNeighbor = - graph.getOpposite(currentVertex, currentEdge); - if (!visitedEdges.contains(currentEdge)) { - visitedEdges.add(currentEdge); - if (!visitedVertices.contains(currentNeighbor)) { - visitedVertices.add(currentNeighbor); - acceptedVertices.add(currentNeighbor); - newVertices.add(currentNeighbor); - } - } - } - } - currentVertices = newVertices; - currentDepth++; - } - Graph ug = null; - try { - ug = graph.getClass().newInstance(); - for(E edge : graph.getEdges()) { - Pair endpoints = graph.getEndpoints(edge); - if(acceptedVertices.containsAll(endpoints)) { - ug.addEdge(edge, endpoints.getFirst(), endpoints.getSecond()); - } - } - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); + V currentNeighbor = graph.getOpposite(currentVertex, currentEdge); + if (!visitedEdges.contains(currentEdge)) { + visitedEdges.add(currentEdge); + if (!visitedVertices.contains(currentNeighbor)) { + visitedVertices.add(currentNeighbor); + acceptedVertices.add(currentNeighbor); + newVertices.add(currentNeighbor); + } + } } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); + } + currentVertices = newVertices; + currentDepth++; + } + Graph ug = null; + try { + ug = graph.getClass().newInstance(); + for (E edge : graph.getEdges()) { + Pair endpoints = graph.getEndpoints(edge); + if (acceptedVertices.containsAll(endpoints)) { + ug.addEdge(edge, endpoints.getFirst(), endpoints.getSecond()); } - return ug; - } + } + } catch (InstantiationException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } + return ug; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java index 81a9d0e5..d52e3eb7 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/filters/VertexPredicateFilter.java @@ -1,7 +1,7 @@ /* * Created on May 19, 2008 * - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * * All rights reserved. * @@ -11,65 +11,50 @@ */ package edu.uci.ics.jung.algorithms.filters; -import java.util.Collection; - import com.google.common.base.Predicate; - import edu.uci.ics.jung.graph.Graph; +import java.util.Collection; /** - * Transforms the input graph into one which contains only those vertices - * that pass the specified Predicate. The filtered graph - * is a copy of the original graph (same type, uses the same vertex and - * edge objects). Only those edges whose entire incident vertex collection - * passes the predicate are copied into the new graph. - * + * Transforms the input graph into one which contains only those vertices that pass the specified + * Predicate. The filtered graph is a copy of the original graph (same type, uses the + * same vertex and edge objects). Only those edges whose entire incident vertex collection passes + * the predicate are copied into the new graph. + * * @author Joshua O'Madadhain */ -public class VertexPredicateFilter implements Filter -{ - protected Predicate vertex_pred; +public class VertexPredicateFilter implements Filter { + protected Predicate vertex_pred; + + /** + * Creates an instance based on the specified vertex Predicate. + * + * @param vertex_pred the predicate that specifies which vertices to add to the filtered graph + */ + public VertexPredicateFilter(Predicate vertex_pred) { + this.vertex_pred = vertex_pred; + } - /** - * Creates an instance based on the specified vertex Predicate. - * @param vertex_pred the predicate that specifies which vertices to add to the filtered graph - */ - public VertexPredicateFilter(Predicate vertex_pred) - { - this.vertex_pred = vertex_pred; + @SuppressWarnings("unchecked") + public Graph apply(Graph g) { + Graph filtered; + try { + filtered = g.getClass().newInstance(); + } catch (InstantiationException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); + } catch (IllegalAccessException e) { + throw new RuntimeException("Unable to create copy of existing graph: ", e); } - - @SuppressWarnings("unchecked") - public Graph apply(Graph g) - { - Graph filtered; - try - { - filtered = g.getClass().newInstance(); - } - catch (InstantiationException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - catch (IllegalAccessException e) - { - throw new RuntimeException("Unable to create copy of existing graph: ", e); - } - for (V v : g.getVertices()) - if (vertex_pred.apply(v)) - filtered.addVertex(v); - - Collection filtered_vertices = filtered.getVertices(); - - for (E e : g.getEdges()) - { - Collection incident = g.getIncidentVertices(e); - if (filtered_vertices.containsAll(incident)) - filtered.addEdge(e, incident); - } - - return filtered; + for (V v : g.getVertices()) if (vertex_pred.apply(v)) filtered.addVertex(v); + + Collection filtered_vertices = filtered.getVertices(); + + for (E e : g.getEdges()) { + Collection incident = g.getIncidentVertices(e); + if (filtered_vertices.containsAll(incident)) filtered.addEdge(e, incident); } + return filtered; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java index 6653469e..dce834ff 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/flows/EdmondsKarpMaxFlow.java @@ -1,14 +1,19 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.flows; +import com.google.common.base.Function; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.algorithms.util.IterativeProcess; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.util.EdgeType; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -19,296 +24,289 @@ import java.util.Queue; import java.util.Set; -import com.google.common.base.Function; -import com.google.common.base.Supplier; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.util.EdgeType; - - /** - * Implements the Edmonds-Karp maximum flow algorithm for solving the maximum flow problem. - * After the algorithm is executed, - * the input {@code Map} is populated with a {@code Number} for each edge that indicates - * the flow along that edge. - *

- * An example of using this algorithm is as follows: + * Implements the Edmonds-Karp maximum flow algorithm for solving the maximum flow problem. After + * the algorithm is executed, the input {@code Map} is populated with a {@code Number} for each edge + * that indicates the flow along that edge. + * + *

An example of using this algorithm is as follows: + * *

- * EdmondsKarpMaxFlow ek = new EdmondsKarpMaxFlow(graph, source, sink, edge_capacities, edge_flows, 
+ * EdmondsKarpMaxFlow ek = new EdmondsKarpMaxFlow(graph, source, sink, edge_capacities, edge_flows,
  * edge_factory);
  * ek.evaluate(); // This instructs the class to compute the max flow
  * 
* * @see "Introduction to Algorithms by Cormen, Leiserson, Rivest, and Stein." * @see "Network Flows by Ahuja, Magnanti, and Orlin." - * @see "Theoretical improvements in algorithmic efficiency for network flow problems by Edmonds and Karp, 1972." + * @see "Theoretical improvements in algorithmic efficiency for network flow problems by Edmonds and + * Karp, 1972." * @author Scott White, adapted to jung2 by Tom Nelson */ -public class EdmondsKarpMaxFlow extends IterativeProcess { - - private DirectedGraph mFlowGraph; - private DirectedGraph mOriginalGraph; - private V source; - private V target; - private int mMaxFlow; - private Set mSourcePartitionNodes; - private Set mSinkPartitionNodes; - private Set mMinCutEdges; - - private Map residualCapacityMap = new HashMap(); - private Map parentMap = new HashMap(); - private Map parentCapacityMap = new HashMap(); - private Function edgeCapacityTransformer; - private Map edgeFlowMap; - private Supplier edgeFactory; - - /** - * Constructs a new instance of the algorithm solver for a given graph, source, and sink. - * Source and sink vertices must be elements of the specified graph, and must be - * distinct. - * @param directedGraph the flow graph - * @param source the source vertex - * @param sink the sink vertex - * @param edgeCapacityTransformer the Function that gets the capacity for each edge. - * @param edgeFlowMap the map where the solver will place the value of the flow for each edge - * @param edgeFactory used to create new edge instances for backEdges - */ - @SuppressWarnings("unchecked") - public EdmondsKarpMaxFlow(DirectedGraph directedGraph, V source, V sink, - Function edgeCapacityTransformer, Map edgeFlowMap, - Supplier edgeFactory) { - - if(directedGraph.getVertices().contains(source) == false || - directedGraph.getVertices().contains(sink) == false) { - throw new IllegalArgumentException("source and sink vertices must be elements of the specified graph"); - } - if (source.equals(sink)) { - throw new IllegalArgumentException("source and sink vertices must be distinct"); - } - mOriginalGraph = directedGraph; - - this.source = source; - this.target = sink; - this.edgeFlowMap = edgeFlowMap; - this.edgeCapacityTransformer = edgeCapacityTransformer; - this.edgeFactory = edgeFactory; - try { - mFlowGraph = directedGraph.getClass().newInstance(); - for(E e : mOriginalGraph.getEdges()) { - mFlowGraph.addEdge(e, mOriginalGraph.getSource(e), - mOriginalGraph.getDest(e), EdgeType.DIRECTED); - } - for(V v : mOriginalGraph.getVertices()) { - mFlowGraph.addVertex(v); - } - - } catch (InstantiationException e) { - e.printStackTrace(); - } catch (IllegalAccessException e) { - e.printStackTrace(); - } - mMaxFlow = 0; - mSinkPartitionNodes = new HashSet(); - mSourcePartitionNodes = new HashSet(); - mMinCutEdges = new HashSet(); +public class EdmondsKarpMaxFlow extends IterativeProcess { + + private DirectedGraph mFlowGraph; + private DirectedGraph mOriginalGraph; + private V source; + private V target; + private int mMaxFlow; + private Set mSourcePartitionNodes; + private Set mSinkPartitionNodes; + private Set mMinCutEdges; + + private Map residualCapacityMap = new HashMap(); + private Map parentMap = new HashMap(); + private Map parentCapacityMap = new HashMap(); + private Function edgeCapacityTransformer; + private Map edgeFlowMap; + private Supplier edgeFactory; + + /** + * Constructs a new instance of the algorithm solver for a given graph, source, and sink. Source + * and sink vertices must be elements of the specified graph, and must be distinct. + * + * @param directedGraph the flow graph + * @param source the source vertex + * @param sink the sink vertex + * @param edgeCapacityTransformer the Function that gets the capacity for each edge. + * @param edgeFlowMap the map where the solver will place the value of the flow for each edge + * @param edgeFactory used to create new edge instances for backEdges + */ + @SuppressWarnings("unchecked") + public EdmondsKarpMaxFlow( + DirectedGraph directedGraph, + V source, + V sink, + Function edgeCapacityTransformer, + Map edgeFlowMap, + Supplier edgeFactory) { + + if (directedGraph.getVertices().contains(source) == false + || directedGraph.getVertices().contains(sink) == false) { + throw new IllegalArgumentException( + "source and sink vertices must be elements of the specified graph"); } - - private void clearParentValues() { - parentMap.clear(); - parentCapacityMap.clear(); - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); + if (source.equals(sink)) { + throw new IllegalArgumentException("source and sink vertices must be distinct"); + } + mOriginalGraph = directedGraph; + + this.source = source; + this.target = sink; + this.edgeFlowMap = edgeFlowMap; + this.edgeCapacityTransformer = edgeCapacityTransformer; + this.edgeFactory = edgeFactory; + try { + mFlowGraph = directedGraph.getClass().newInstance(); + for (E e : mOriginalGraph.getEdges()) { + mFlowGraph.addEdge( + e, mOriginalGraph.getSource(e), mOriginalGraph.getDest(e), EdgeType.DIRECTED); + } + for (V v : mOriginalGraph.getVertices()) { + mFlowGraph.addVertex(v); + } + + } catch (InstantiationException e) { + e.printStackTrace(); + } catch (IllegalAccessException e) { + e.printStackTrace(); } + mMaxFlow = 0; + mSinkPartitionNodes = new HashSet(); + mSourcePartitionNodes = new HashSet(); + mMinCutEdges = new HashSet(); + } - protected boolean hasAugmentingPath() { - - mSinkPartitionNodes.clear(); - mSourcePartitionNodes.clear(); - mSinkPartitionNodes.addAll(mFlowGraph.getVertices()); - - Set visitedEdgesMap = new HashSet(); - Queue queue = new LinkedList(); - queue.add(source); - - while (!queue.isEmpty()) { - V currentVertex = queue.remove(); - mSinkPartitionNodes.remove(currentVertex); - mSourcePartitionNodes.add(currentVertex); - Number currentCapacity = parentCapacityMap.get(currentVertex); - - Collection neighboringEdges = mFlowGraph.getOutEdges(currentVertex); - - for (E neighboringEdge : neighboringEdges) { - - V neighboringVertex = mFlowGraph.getDest(neighboringEdge); - - Number residualCapacity = residualCapacityMap.get(neighboringEdge); - if (residualCapacity.intValue() <= 0 || visitedEdgesMap.contains(neighboringEdge)) - continue; - - V neighborsParent = parentMap.get(neighboringVertex); - Number neighborCapacity = parentCapacityMap.get(neighboringVertex); - int newCapacity = Math.min(residualCapacity.intValue(),currentCapacity.intValue()); - - if ((neighborsParent == null) || newCapacity > neighborCapacity.intValue()) { - parentMap.put(neighboringVertex, currentVertex); - parentCapacityMap.put(neighboringVertex, new Integer(newCapacity)); - visitedEdgesMap.add(neighboringEdge); - if (neighboringVertex != target) { - queue.add(neighboringVertex); - } - } - } - } + private void clearParentValues() { + parentMap.clear(); + parentCapacityMap.clear(); + parentCapacityMap.put(source, Integer.MAX_VALUE); + parentMap.put(source, source); + } - boolean hasAugmentingPath = false; - Number targetsParentCapacity = parentCapacityMap.get(target); - if (targetsParentCapacity != null && targetsParentCapacity.intValue() > 0) { - updateResidualCapacities(); - hasAugmentingPath = true; - } - clearParentValues(); - return hasAugmentingPath; - } + protected boolean hasAugmentingPath() { - @Override - public void step() { - while (hasAugmentingPath()) { - } - computeMinCut(); -// return 0; - } + mSinkPartitionNodes.clear(); + mSourcePartitionNodes.clear(); + mSinkPartitionNodes.addAll(mFlowGraph.getVertices()); - private void computeMinCut() { - - for (E e : mOriginalGraph.getEdges()) { - - V source = mOriginalGraph.getSource(e); - V destination = mOriginalGraph.getDest(e); - if (mSinkPartitionNodes.contains(source) && mSinkPartitionNodes.contains(destination)) { - continue; - } - if (mSourcePartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - if (mSinkPartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { - continue; - } - mMinCutEdges.add(e); - } - } + Set visitedEdgesMap = new HashSet(); + Queue queue = new LinkedList(); + queue.add(source); - /** - * @return the value of the maximum flow from the source to the sink. - */ - public int getMaxFlow() { - return mMaxFlow; - } + while (!queue.isEmpty()) { + V currentVertex = queue.remove(); + mSinkPartitionNodes.remove(currentVertex); + mSourcePartitionNodes.add(currentVertex); + Number currentCapacity = parentCapacityMap.get(currentVertex); - /** - * @return the nodes which share the same partition (as defined by the min-cut edges) - * as the sink node. - */ - public Set getNodesInSinkPartition() { - return mSinkPartitionNodes; - } + Collection neighboringEdges = mFlowGraph.getOutEdges(currentVertex); - /** - * @return the nodes which share the same partition (as defined by the min-cut edges) - * as the source node. - */ - public Set getNodesInSourcePartition() { - return mSourcePartitionNodes; - } + for (E neighboringEdge : neighboringEdges) { - /** - * @return the edges in the minimum cut. - */ - public Set getMinCutEdges() { - return mMinCutEdges; - } + V neighboringVertex = mFlowGraph.getDest(neighboringEdge); + + Number residualCapacity = residualCapacityMap.get(neighboringEdge); + if (residualCapacity.intValue() <= 0 || visitedEdgesMap.contains(neighboringEdge)) { + continue; + } - /** - * @return the graph for which the maximum flow is calculated. - */ - public DirectedGraph getFlowGraph() { - return mFlowGraph; + V neighborsParent = parentMap.get(neighboringVertex); + Number neighborCapacity = parentCapacityMap.get(neighboringVertex); + int newCapacity = Math.min(residualCapacity.intValue(), currentCapacity.intValue()); + + if ((neighborsParent == null) || newCapacity > neighborCapacity.intValue()) { + parentMap.put(neighboringVertex, currentVertex); + parentCapacityMap.put(neighboringVertex, new Integer(newCapacity)); + visitedEdgesMap.add(neighboringEdge); + if (neighboringVertex != target) { + queue.add(neighboringVertex); + } + } + } } - @Override - protected void initializeIterations() { - parentCapacityMap.put(source, Integer.MAX_VALUE); - parentMap.put(source, source); + boolean hasAugmentingPath = false; + Number targetsParentCapacity = parentCapacityMap.get(target); + if (targetsParentCapacity != null && targetsParentCapacity.intValue() > 0) { + updateResidualCapacities(); + hasAugmentingPath = true; + } + clearParentValues(); + return hasAugmentingPath; + } + + @Override + public void step() { + while (hasAugmentingPath()) {} + computeMinCut(); + // return 0; + } + + private void computeMinCut() { + + for (E e : mOriginalGraph.getEdges()) { + + V source = mOriginalGraph.getSource(e); + V destination = mOriginalGraph.getDest(e); + if (mSinkPartitionNodes.contains(source) && mSinkPartitionNodes.contains(destination)) { + continue; + } + if (mSourcePartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { + continue; + } + if (mSinkPartitionNodes.contains(source) && mSourcePartitionNodes.contains(destination)) { + continue; + } + mMinCutEdges.add(e); + } + } + + /** @return the value of the maximum flow from the source to the sink. */ + public int getMaxFlow() { + return mMaxFlow; + } + + /** + * @return the nodes which share the same partition (as defined by the min-cut edges) as the sink + * node. + */ + public Set getNodesInSinkPartition() { + return mSinkPartitionNodes; + } + + /** + * @return the nodes which share the same partition (as defined by the min-cut edges) as the + * source node. + */ + public Set getNodesInSourcePartition() { + return mSourcePartitionNodes; + } + + /** @return the edges in the minimum cut. */ + public Set getMinCutEdges() { + return mMinCutEdges; + } + + /** @return the graph for which the maximum flow is calculated. */ + public DirectedGraph getFlowGraph() { + return mFlowGraph; + } + + @Override + protected void initializeIterations() { + parentCapacityMap.put(source, Integer.MAX_VALUE); + parentMap.put(source, source); + + List edgeList = new ArrayList(mFlowGraph.getEdges()); + + for (int eIdx = 0; eIdx < edgeList.size(); eIdx++) { + E edge = edgeList.get(eIdx); + Number capacity = edgeCapacityTransformer.apply(edge); + + if (capacity == null) { + throw new IllegalArgumentException( + "Edge capacities must be provided in Function passed to constructor"); + } + residualCapacityMap.put(edge, capacity); + + V source = mFlowGraph.getSource(edge); + V destination = mFlowGraph.getDest(edge); + + if (mFlowGraph.isPredecessor(source, destination) == false) { + E backEdge = edgeFactory.get(); + mFlowGraph.addEdge(backEdge, destination, source, EdgeType.DIRECTED); + residualCapacityMap.put(backEdge, 0); + } + } + } - List edgeList = new ArrayList(mFlowGraph.getEdges()); + @Override + protected void finalizeIterations() { - for (int eIdx=0;eIdx< edgeList.size();eIdx++) { - E edge = edgeList.get(eIdx); - Number capacity = edgeCapacityTransformer.apply(edge); + for (E currentEdge : mFlowGraph.getEdges()) { + Number capacity = edgeCapacityTransformer.apply(currentEdge); - if (capacity == null) { - throw new IllegalArgumentException("Edge capacities must be provided in Function passed to constructor"); - } - residualCapacityMap.put(edge, capacity); + Number residualCapacity = residualCapacityMap.get(currentEdge); + if (capacity != null) { + Integer flowValue = new Integer(capacity.intValue() - residualCapacity.intValue()); + this.edgeFlowMap.put(currentEdge, flowValue); + } + } - V source = mFlowGraph.getSource(edge); - V destination = mFlowGraph.getDest(edge); + Set backEdges = new HashSet(); + for (E currentEdge : mFlowGraph.getEdges()) { - if(mFlowGraph.isPredecessor(source, destination) == false) { - E backEdge = edgeFactory.get(); - mFlowGraph.addEdge(backEdge, destination, source, EdgeType.DIRECTED); - residualCapacityMap.put(backEdge, 0); - } - } + if (edgeCapacityTransformer.apply(currentEdge) == null) { + backEdges.add(currentEdge); + } else { + residualCapacityMap.remove(currentEdge); + } } - - @Override - protected void finalizeIterations() { - - for (E currentEdge : mFlowGraph.getEdges()) { - Number capacity = edgeCapacityTransformer.apply(currentEdge); - - Number residualCapacity = residualCapacityMap.get(currentEdge); - if (capacity != null) { - Integer flowValue = new Integer(capacity.intValue()-residualCapacity.intValue()); - this.edgeFlowMap.put(currentEdge, flowValue); - } - } - - Set backEdges = new HashSet(); - for (E currentEdge: mFlowGraph.getEdges()) { - - if (edgeCapacityTransformer.apply(currentEdge) == null) { - backEdges.add(currentEdge); - } else { - residualCapacityMap.remove(currentEdge); - } - } - for(E e : backEdges) { - mFlowGraph.removeEdge(e); - } + for (E e : backEdges) { + mFlowGraph.removeEdge(e); } + } - private void updateResidualCapacities() { + private void updateResidualCapacities() { - Number augmentingPathCapacity = parentCapacityMap.get(target); - mMaxFlow += augmentingPathCapacity.intValue(); - V currentVertex = target; - V parentVertex = null; - while ((parentVertex = parentMap.get(currentVertex)) != currentVertex) { - E currentEdge = mFlowGraph.findEdge(parentVertex, currentVertex); + Number augmentingPathCapacity = parentCapacityMap.get(target); + mMaxFlow += augmentingPathCapacity.intValue(); + V currentVertex = target; + V parentVertex = null; + while ((parentVertex = parentMap.get(currentVertex)) != currentVertex) { + E currentEdge = mFlowGraph.findEdge(parentVertex, currentVertex); - Number residualCapacity = residualCapacityMap.get(currentEdge); + Number residualCapacity = residualCapacityMap.get(currentEdge); - residualCapacity = residualCapacity.intValue() - augmentingPathCapacity.intValue(); - residualCapacityMap.put(currentEdge, residualCapacity); + residualCapacity = residualCapacity.intValue() - augmentingPathCapacity.intValue(); + residualCapacityMap.put(currentEdge, residualCapacity); - E backEdge = mFlowGraph.findEdge(currentVertex, parentVertex); - residualCapacity = residualCapacityMap.get(backEdge); - residualCapacity = residualCapacity.intValue() + augmentingPathCapacity.intValue(); - residualCapacityMap.put(backEdge, residualCapacity); - currentVertex = parentVertex; - } + E backEdge = mFlowGraph.findEdge(currentVertex, parentVertex); + residualCapacity = residualCapacityMap.get(backEdge); + residualCapacity = residualCapacity.intValue() + augmentingPathCapacity.intValue(); + residualCapacityMap.put(backEdge, residualCapacity); + currentVertex = parentVertex; } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java index eddcf968..94eea089 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/EvolvingGraphGenerator.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -9,23 +9,24 @@ */ package edu.uci.ics.jung.algorithms.generators; - - /** * An interface for algorithms that generate graphs that evolve iteratively. + * * @author Scott White */ -public interface EvolvingGraphGenerator extends GraphGenerator { +public interface EvolvingGraphGenerator extends GraphGenerator { - /** - * Instructs the algorithm to evolve the graph N steps. - * @param numSteps number of steps to iterate from the current state - */ - void evolveGraph(int numSteps); + /** + * Instructs the algorithm to evolve the graph N steps. + * + * @param numSteps number of steps to iterate from the current state + */ + void evolveGraph(int numSteps); - /** - * Retrieves the total number of steps elapsed. - * @return number of elapsed steps - */ - int numIterations(); + /** + * Retrieves the total number of steps elapsed. + * + * @return number of elapsed steps + */ + int numIterations(); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java index 6f1529f3..a072b511 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/GraphGenerator.java @@ -1,21 +1,20 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.generators; import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Graph; /** * An interface for algorithms that generate graphs. + * * @author Scott White */ -public interface GraphGenerator extends Supplier>{ } - +public interface GraphGenerator extends Supplier> {} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java index cad79cd7..ad11bb8d 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/Lattice2DGenerator.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2009, The JUNG Authors + * Copyright (c) 2009, The JUNG Authors * * All rights reserved. * @@ -10,187 +10,179 @@ package edu.uci.ics.jung.algorithms.generators; -import java.util.ArrayList; -import java.util.List; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.EdgeType; +import java.util.ArrayList; +import java.util.List; /** - * Simple generator of an m x n lattice where each vertex - * is incident with each of its neighbors (to the left, right, up, and down). - * May be toroidal, in which case the vertices on the edges are connected to - * their counterparts on the opposite edges as well. - * - *

If the graph Supplier supplied has a default edge type of {@code EdgeType.DIRECTED}, - * then edges will be created in both directions between adjacent vertices. - * + * Simple generator of an m x n lattice where each vertex is incident with each of its neighbors (to + * the left, right, up, and down). May be toroidal, in which case the vertices on the edges are + * connected to their counterparts on the opposite edges as well. + * + *

If the graph Supplier supplied has a default edge type of {@code EdgeType.DIRECTED}, then + * edges will be created in both directions between adjacent vertices. + * * @author Joshua O'Madadhain */ -public class Lattice2DGenerator implements GraphGenerator -{ - protected int row_count; - protected int col_count; - protected boolean is_toroidal; - protected boolean is_directed; - protected Supplier> graph_factory; - protected Supplier vertex_factory; - protected Supplier edge_factory; - private List v_array; - - /** - * Constructs a generator of square lattices of size {@code latticeSize} - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param latticeSize the number of rows and columns of the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Supplier> graph_factory, Supplier vertex_factory, - Supplier edge_factory, int latticeSize, boolean isToroidal) - { - this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, isToroidal); - } +public class Lattice2DGenerator implements GraphGenerator { + protected int row_count; + protected int col_count; + protected boolean is_toroidal; + protected boolean is_directed; + protected Supplier> graph_factory; + protected Supplier vertex_factory; + protected Supplier edge_factory; + private List v_array; - /** - * Creates a generator of {@code row_count} x {@code col_count} lattices - * with the specified parameters. - * - * @param graph_factory used to create the {@code Graph} for the lattice - * @param vertex_factory used to create the lattice vertices - * @param edge_factory used to create the lattice edges - * @param row_count the number of rows in the lattice - * @param col_count the number of columns in the lattice - * @param isToroidal if true, the created lattice wraps from top to bottom and left to right - */ - public Lattice2DGenerator(Supplier> graph_factory, Supplier vertex_factory, - Supplier edge_factory, int row_count, int col_count, boolean isToroidal) - { - if (row_count < 2 || col_count < 2) - { - throw new IllegalArgumentException("Row and column counts must each be at least 2."); - } - - this.row_count = row_count; - this.col_count = col_count; - this.is_toroidal = isToroidal; - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.is_directed = (graph_factory.get().getDefaultEdgeType() == EdgeType.DIRECTED); - } - - /** - * Generates a graph based on the constructor-specified settings. - * - * @return the generated graph - */ - public Graph get() - { - int vertex_count = row_count * col_count; - Graph graph = graph_factory.get(); - v_array = new ArrayList(vertex_count); - for (int i = 0; i < vertex_count; i++) - { - V v = vertex_factory.get(); - graph.addVertex(v); - v_array.add(i, v); - } - - int start = is_toroidal ? 0 : 1; - int end_row = is_toroidal ? row_count : row_count - 1; - int end_col = is_toroidal ? col_count : col_count - 1; - - // fill in edges - // down - for (int i = 0; i < end_row; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.get(), getVertex(i,j), getVertex(i+1, j)); - // right - for (int i = 0; i < row_count; i++) - for (int j = 0; j < end_col; j++) - graph.addEdge(edge_factory.get(), getVertex(i,j), getVertex(i, j+1)); - - // if the graph is directed, fill in the edges going the other direction... - if (graph.getDefaultEdgeType() == EdgeType.DIRECTED) - { - // up - for (int i = start; i < row_count; i++) - for (int j = 0; j < col_count; j++) - graph.addEdge(edge_factory.get(), getVertex(i,j), getVertex(i-1, j)); - // left - for (int i = 0; i < row_count; i++) - for (int j = start; j < col_count; j++) - graph.addEdge(edge_factory.get(), getVertex(i,j), getVertex(i, j-1)); - } - - return graph; - } + /** + * Constructs a generator of square lattices of size {@code latticeSize} with the specified + * parameters. + * + * @param graph_factory used to create the {@code Graph} for the lattice + * @param vertex_factory used to create the lattice vertices + * @param edge_factory used to create the lattice edges + * @param latticeSize the number of rows and columns of the lattice + * @param isToroidal if true, the created lattice wraps from top to bottom and left to right + */ + public Lattice2DGenerator( + Supplier> graph_factory, + Supplier vertex_factory, + Supplier edge_factory, + int latticeSize, + boolean isToroidal) { + this(graph_factory, vertex_factory, edge_factory, latticeSize, latticeSize, isToroidal); + } - /** - * Returns the number of edges found in a lattice of this generator's specifications. - * (This is useful for subclasses that may modify the generated graphs to add more edges.) - * - * @return the number of edges that this generator will generate - */ - public int getGridEdgeCount() - { - int boundary_adjustment = (is_toroidal ? 0 : 1); - int vertical_edge_count = col_count * (row_count - boundary_adjustment); - int horizontal_edge_count = row_count * (col_count - boundary_adjustment); - - return (vertical_edge_count + horizontal_edge_count) * (is_directed ? 2 : 1); - } - - protected int getIndex(int i, int j) - { - return ((mod(i, row_count)) * col_count) + (mod(j, col_count)); + /** + * Creates a generator of {@code row_count} x {@code col_count} lattices with the specified + * parameters. + * + * @param graph_factory used to create the {@code Graph} for the lattice + * @param vertex_factory used to create the lattice vertices + * @param edge_factory used to create the lattice edges + * @param row_count the number of rows in the lattice + * @param col_count the number of columns in the lattice + * @param isToroidal if true, the created lattice wraps from top to bottom and left to right + */ + public Lattice2DGenerator( + Supplier> graph_factory, + Supplier vertex_factory, + Supplier edge_factory, + int row_count, + int col_count, + boolean isToroidal) { + if (row_count < 2 || col_count < 2) { + throw new IllegalArgumentException("Row and column counts must each be at least 2."); } - protected int mod(int i, int modulus) - { - int i_mod = i % modulus; - return i_mod >= 0 ? i_mod : i_mod + modulus; - } - - /** - * @param i row index into the lattice - * @param j column index into the lattice - * @return the vertex at position ({@code i mod row_count, j mod col_count}) - */ - protected V getVertex(int i, int j) - { - return v_array.get(getIndex(i, j)); - } - - /** - * @param i row index into the lattice - * @return the {@code i}th vertex (counting row-wise) - */ - protected V getVertex(int i) - { - return v_array.get(i); - } - - /** - * @param i index of the vertex whose row we want - * @return the row in which the vertex with index {@code i} is found - */ - protected int getRow(int i) - { - return i / col_count; + this.row_count = row_count; + this.col_count = col_count; + this.is_toroidal = isToroidal; + this.graph_factory = graph_factory; + this.vertex_factory = vertex_factory; + this.edge_factory = edge_factory; + this.is_directed = (graph_factory.get().getDefaultEdgeType() == EdgeType.DIRECTED); + } + + /** + * Generates a graph based on the constructor-specified settings. + * + * @return the generated graph + */ + public Graph get() { + int vertex_count = row_count * col_count; + Graph graph = graph_factory.get(); + v_array = new ArrayList(vertex_count); + for (int i = 0; i < vertex_count; i++) { + V v = vertex_factory.get(); + graph.addVertex(v); + v_array.add(i, v); } - - /** - * @param i index of the vertex whose column we want - * @return the column in which the vertex with index {@code i} is found - */ - protected int getCol(int i) - { - return i % col_count; + + int start = is_toroidal ? 0 : 1; + int end_row = is_toroidal ? row_count : row_count - 1; + int end_col = is_toroidal ? col_count : col_count - 1; + + // fill in edges + // down + for (int i = 0; i < end_row; i++) + for (int j = 0; j < col_count; j++) + graph.addEdge(edge_factory.get(), getVertex(i, j), getVertex(i + 1, j)); + // right + for (int i = 0; i < row_count; i++) + for (int j = 0; j < end_col; j++) + graph.addEdge(edge_factory.get(), getVertex(i, j), getVertex(i, j + 1)); + + // if the graph is directed, fill in the edges going the other direction... + if (graph.getDefaultEdgeType() == EdgeType.DIRECTED) { + // up + for (int i = start; i < row_count; i++) + for (int j = 0; j < col_count; j++) + graph.addEdge(edge_factory.get(), getVertex(i, j), getVertex(i - 1, j)); + // left + for (int i = 0; i < row_count; i++) + for (int j = start; j < col_count; j++) + graph.addEdge(edge_factory.get(), getVertex(i, j), getVertex(i, j - 1)); } + + return graph; + } + + /** + * Returns the number of edges found in a lattice of this generator's specifications. (This is + * useful for subclasses that may modify the generated graphs to add more edges.) + * + * @return the number of edges that this generator will generate + */ + public int getGridEdgeCount() { + int boundary_adjustment = (is_toroidal ? 0 : 1); + int vertical_edge_count = col_count * (row_count - boundary_adjustment); + int horizontal_edge_count = row_count * (col_count - boundary_adjustment); + + return (vertical_edge_count + horizontal_edge_count) * (is_directed ? 2 : 1); + } + + protected int getIndex(int i, int j) { + return ((mod(i, row_count)) * col_count) + (mod(j, col_count)); + } + + protected int mod(int i, int modulus) { + int i_mod = i % modulus; + return i_mod >= 0 ? i_mod : i_mod + modulus; + } + + /** + * @param i row index into the lattice + * @param j column index into the lattice + * @return the vertex at position ({@code i mod row_count, j mod col_count}) + */ + protected V getVertex(int i, int j) { + return v_array.get(getIndex(i, j)); + } + + /** + * @param i row index into the lattice + * @return the {@code i}th vertex (counting row-wise) + */ + protected V getVertex(int i) { + return v_array.get(i); + } + + /** + * @param i index of the vertex whose row we want + * @return the row in which the vertex with index {@code i} is found + */ + protected int getRow(int i) { + return i / col_count; + } + + /** + * @param i index of the vertex whose column we want + * @return the column in which the vertex with index {@code i} is found + */ + protected int getCol(int i) { + return i % col_count; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java index a834a2cc..c2257dbf 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/BarabasiAlbertGenerator.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -9,71 +9,59 @@ */ package edu.uci.ics.jung.algorithms.generators.random; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; - import com.google.common.base.Preconditions; import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.generators.EvolvingGraphGenerator; import edu.uci.ics.jung.algorithms.util.WeightedChoice; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.MultiGraph; import edu.uci.ics.jung.graph.util.EdgeType; import edu.uci.ics.jung.graph.util.Pair; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; /** - *

- * Simple evolving scale-free random graph generator. At each time step, a new - * vertex is created and is connected to existing vertices according to the - * principle of "preferential attachment", whereby vertices with higher degree - * have a higher probability of being selected for attachment. - * - *

- * At a given timestep, the probability p of creating an edge - * between an existing vertex v and the newly added vertex is - * + * Simple evolving scale-free random graph generator. At each time step, a new vertex is created and + * is connected to existing vertices according to the principle of "preferential attachment", + * whereby vertices with higher degree have a higher probability of being selected for attachment. + * + *

At a given timestep, the probability p of creating an edge between an existing + * vertex v and the newly added vertex is + * *

  * p = (degree(v) + 1) / (|E| + |V|);
  * 
- * - *

- * where |E| and |V| are, respectively, the number of - * edges and vertices currently in the network (counting neither the new vertex - * nor the other edges that are being attached to it). - * - *

- * Note that the formula specified in the original paper (cited below) was - * + * + *

where |E| and |V| are, respectively, the number of edges and + * vertices currently in the network (counting neither the new vertex nor the other edges that are + * being attached to it). + * + *

Note that the formula specified in the original paper (cited below) was + * *

  * p = degree(v) / |E|
  * 
- * - * - *

- * However, this would have meant that the probability of attachment for any - * existing isolated vertex would be 0. This version uses Lagrangian smoothing - * to give each existing vertex a positive attachment probability. - * - *

- * The graph created may be either directed or undirected (controlled by a - * constructor parameter); the default is undirected. If the graph is specified - * to be directed, then the edges added will be directed from the newly added - * vertex u to the existing vertex v, with probability proportional to the - * indegree of v (number of edges directed towards v). If the graph is specified - * to be undirected, then the (undirected) edges added will connect u to v, with - * probability proportional to the degree of v. - * - *

- * The parallel constructor parameter specifies whether parallel - * edges may be created. - * + * + *

However, this would have meant that the probability of attachment for any existing isolated + * vertex would be 0. This version uses Lagrangian smoothing to give each existing vertex a positive + * attachment probability. + * + *

The graph created may be either directed or undirected (controlled by a constructor + * parameter); the default is undirected. If the graph is specified to be directed, then the edges + * added will be directed from the newly added vertex u to the existing vertex v, with probability + * proportional to the indegree of v (number of edges directed towards v). If the graph is specified + * to be undirected, then the (undirected) edges added will connect u to v, with probability + * proportional to the degree of v. + * + *

The parallel constructor parameter specifies whether parallel edges may be + * created. + * * @see "A.-L. Barabasi and R. Albert, Emergence of scaling in random networks, Science 286, 1999." * @author Scott White * @author Joshua O'Madadhain @@ -81,213 +69,224 @@ * @author James Marchant */ public class BarabasiAlbertGenerator implements EvolvingGraphGenerator { - private Graph mGraph = null; - private int mNumEdgesToAttachPerStep; - private int mElapsedTimeSteps; - private Random mRandom; - protected List vertex_index; - protected int init_vertices; - protected Map index_vertex; - protected Supplier> graphFactory; - protected Supplier vertexFactory; - protected Supplier edgeFactory; + private Graph mGraph = null; + private int mNumEdgesToAttachPerStep; + private int mElapsedTimeSteps; + private Random mRandom; + protected List vertex_index; + protected int init_vertices; + protected Map index_vertex; + protected Supplier> graphFactory; + protected Supplier vertexFactory; + protected Supplier edgeFactory; - /** - * Constructs a new instance of the generator. - * - * @param graphFactory - * factory for graphs of the appropriate type - * @param vertexFactory - * factory for vertices of the appropriate type - * @param edgeFactory - * factory for edges of the appropriate type - * @param init_vertices - * number of unconnected 'seed' vertices that the graph should - * start with - * @param numEdgesToAttach - * the number of edges that should be attached from the new - * vertex to pre-existing vertices at each time step - * @param seed - * random number seed - * @param seedVertices - * storage for the seed vertices that this graph creates - */ - // TODO: seedVertices is a bizarre way of exposing that information, - // refactor - public BarabasiAlbertGenerator(Supplier> graphFactory, Supplier vertexFactory, - Supplier edgeFactory, int init_vertices, int numEdgesToAttach, int seed, Set seedVertices) { - Preconditions.checkArgument(init_vertices > 0, - "Number of initial unconnected 'seed' vertices must be positive"); - Preconditions.checkArgument(numEdgesToAttach > 0, - "Number of edges to attach at each time step must be positive"); - Preconditions.checkArgument(numEdgesToAttach <= init_vertices, - "Number of edges to attach at each time step must less than or equal to the number of initial vertices"); + /** + * Constructs a new instance of the generator. + * + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param init_vertices number of unconnected 'seed' vertices that the graph should start with + * @param numEdgesToAttach the number of edges that should be attached from the new vertex to + * pre-existing vertices at each time step + * @param seed random number seed + * @param seedVertices storage for the seed vertices that this graph creates + */ + // TODO: seedVertices is a bizarre way of exposing that information, + // refactor + public BarabasiAlbertGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int init_vertices, + int numEdgesToAttach, + int seed, + Set seedVertices) { + Preconditions.checkArgument( + init_vertices > 0, "Number of initial unconnected 'seed' vertices must be positive"); + Preconditions.checkArgument( + numEdgesToAttach > 0, "Number of edges to attach at each time step must be positive"); + Preconditions.checkArgument( + numEdgesToAttach <= init_vertices, + "Number of edges to attach at each time step must less than or equal to the number of initial vertices"); - mNumEdgesToAttachPerStep = numEdgesToAttach; - mRandom = new Random(seed); - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - this.init_vertices = init_vertices; - initialize(seedVertices); - } + mNumEdgesToAttachPerStep = numEdgesToAttach; + mRandom = new Random(seed); + this.graphFactory = graphFactory; + this.vertexFactory = vertexFactory; + this.edgeFactory = edgeFactory; + this.init_vertices = init_vertices; + initialize(seedVertices); + } - /** - * Constructs a new instance of the generator, whose output will be an - * undirected graph, and which will use the current time as a seed for the - * random number generation. - * - * @param graphFactory - * factory for graphs of the appropriate type - * @param vertexFactory - * factory for vertices of the appropriate type - * @param edgeFactory - * factory for edges of the appropriate type - * @param init_vertices - * number of vertices that the graph should start with - * @param numEdgesToAttach - * the number of edges that should be attached from the new - * vertex to pre-existing vertices at each time step - * @param seedVertices - * storage for the seed vertices that this graph creates - */ - public BarabasiAlbertGenerator(Supplier> graphFactory, Supplier vertexFactory, - Supplier edgeFactory, int init_vertices, int numEdgesToAttach, Set seedVertices) { - this(graphFactory, vertexFactory, edgeFactory, init_vertices, numEdgesToAttach, - (int) System.currentTimeMillis(), seedVertices); - } + /** + * Constructs a new instance of the generator, whose output will be an undirected graph, and which + * will use the current time as a seed for the random number generation. + * + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param init_vertices number of vertices that the graph should start with + * @param numEdgesToAttach the number of edges that should be attached from the new vertex to + * pre-existing vertices at each time step + * @param seedVertices storage for the seed vertices that this graph creates + */ + public BarabasiAlbertGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int init_vertices, + int numEdgesToAttach, + Set seedVertices) { + this( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + numEdgesToAttach, + (int) System.currentTimeMillis(), + seedVertices); + } - private void initialize(Set seedVertices) { - mGraph = graphFactory.get(); + private void initialize(Set seedVertices) { + mGraph = graphFactory.get(); - vertex_index = new ArrayList(2 * init_vertices); - index_vertex = new HashMap(2 * init_vertices); - for (int i = 0; i < init_vertices; i++) { - V v = vertexFactory.get(); - mGraph.addVertex(v); - vertex_index.add(v); - index_vertex.put(v, i); - seedVertices.add(v); - } + vertex_index = new ArrayList(2 * init_vertices); + index_vertex = new HashMap(2 * init_vertices); + for (int i = 0; i < init_vertices; i++) { + V v = vertexFactory.get(); + mGraph.addVertex(v); + vertex_index.add(v); + index_vertex.put(v, i); + seedVertices.add(v); + } - mElapsedTimeSteps = 0; - } + mElapsedTimeSteps = 0; + } - public void evolveGraph(int numTimeSteps) { + public void evolveGraph(int numTimeSteps) { - for (int i = 0; i < numTimeSteps; i++) { - evolveGraph(); - mElapsedTimeSteps++; - } - } + for (int i = 0; i < numTimeSteps; i++) { + evolveGraph(); + mElapsedTimeSteps++; + } + } - private void evolveGraph() { - Collection preexistingNodes = mGraph.getVertices(); - V newVertex = vertexFactory.get(); + private void evolveGraph() { + Collection preexistingNodes = mGraph.getVertices(); + V newVertex = vertexFactory.get(); - mGraph.addVertex(newVertex); + mGraph.addVertex(newVertex); - // generate and store the new edges; don't add them to the graph - // yet because we don't want to bias the degree calculations - // (all new edges in a timestep should be added in parallel) - Set> added_pairs = createRandomEdges(preexistingNodes, newVertex, mNumEdgesToAttachPerStep); + // generate and store the new edges; don't add them to the graph + // yet because we don't want to bias the degree calculations + // (all new edges in a timestep should be added in parallel) + Set> added_pairs = + createRandomEdges(preexistingNodes, newVertex, mNumEdgesToAttachPerStep); - for (Pair pair : added_pairs) { - V v1 = pair.getFirst(); - V v2 = pair.getSecond(); - if (mGraph.getDefaultEdgeType() != EdgeType.UNDIRECTED || !mGraph.isNeighbor(v1, v2)) - mGraph.addEdge(edgeFactory.get(), pair); - } - // now that we're done attaching edges to this new vertex, - // add it to the index - vertex_index.add(newVertex); - index_vertex.put(newVertex, new Integer(vertex_index.size() - 1)); - } + for (Pair pair : added_pairs) { + V v1 = pair.getFirst(); + V v2 = pair.getSecond(); + if (mGraph.getDefaultEdgeType() != EdgeType.UNDIRECTED || !mGraph.isNeighbor(v1, v2)) + mGraph.addEdge(edgeFactory.get(), pair); + } + // now that we're done attaching edges to this new vertex, + // add it to the index + vertex_index.add(newVertex); + index_vertex.put(newVertex, new Integer(vertex_index.size() - 1)); + } - private Set> createRandomEdges(Collection preexistingNodes, V newVertex, int numEdges) { - Set> added_pairs = new HashSet>(numEdges * 3); + private Set> createRandomEdges( + Collection preexistingNodes, V newVertex, int numEdges) { + Set> added_pairs = new HashSet>(numEdges * 3); - /* Generate the probability distribution */ - Map item_weights = new HashMap(); - for (V v : preexistingNodes) { - /* - * as preexistingNodes is a view onto the vertex set, it will - * contain the new node. In the construction of Barabasi-Albert, - * there should be no self-loops. - */ - if (v == newVertex) - continue; + /* Generate the probability distribution */ + Map item_weights = new HashMap(); + for (V v : preexistingNodes) { + /* + * as preexistingNodes is a view onto the vertex set, it will + * contain the new node. In the construction of Barabasi-Albert, + * there should be no self-loops. + */ + if (v == newVertex) { + continue; + } - double degree; - double denominator; + double degree; + double denominator; - /* - * Attachment probability is dependent on whether the graph is - * directed or undirected. - * - * Subtract 1 from numVertices because we don't want to count - * newVertex (which has already been added to the graph, but not to - * vertex_index). - */ - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { - degree = mGraph.degree(v); - denominator = (2 * mGraph.getEdgeCount()) + mGraph.getVertexCount() - 1; - } else { - degree = mGraph.inDegree(v); - denominator = mGraph.getEdgeCount() + mGraph.getVertexCount() - 1; - } + /* + * Attachment probability is dependent on whether the graph is + * directed or undirected. + * + * Subtract 1 from numVertices because we don't want to count + * newVertex (which has already been added to the graph, but not to + * vertex_index). + */ + if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { + degree = mGraph.degree(v); + denominator = (2 * mGraph.getEdgeCount()) + mGraph.getVertexCount() - 1; + } else { + degree = mGraph.inDegree(v); + denominator = mGraph.getEdgeCount() + mGraph.getVertexCount() - 1; + } - double prob = (degree + 1) / denominator; - item_weights.put(v, prob); - } - WeightedChoice nodeProbabilities = new WeightedChoice(item_weights, mRandom); + double prob = (degree + 1) / denominator; + item_weights.put(v, prob); + } + WeightedChoice nodeProbabilities = new WeightedChoice(item_weights, mRandom); - for (int i = 0; i < numEdges; i++) { - createRandomEdge(preexistingNodes, newVertex, added_pairs, nodeProbabilities); - } + for (int i = 0; i < numEdges; i++) { + createRandomEdge(preexistingNodes, newVertex, added_pairs, nodeProbabilities); + } - return added_pairs; - } + return added_pairs; + } - private void createRandomEdge(Collection preexistingNodes, V newVertex, Set> added_pairs, - WeightedChoice weightedProbabilities) { - V attach_point; - boolean created_edge = false; - Pair endpoints; + private void createRandomEdge( + Collection preexistingNodes, + V newVertex, + Set> added_pairs, + WeightedChoice weightedProbabilities) { + V attach_point; + boolean created_edge = false; + Pair endpoints; - do { - attach_point = weightedProbabilities.nextItem(); + do { + attach_point = weightedProbabilities.nextItem(); - endpoints = new Pair(newVertex, attach_point); + endpoints = new Pair(newVertex, attach_point); - /* - * If parallel edges are not allowed, skip attach_point if - * already exists; note that because of - * the way the new node's edges are added, we only need to check the - * list of candidate edges for duplicates. - */ - if (!(mGraph instanceof MultiGraph)) { - if (added_pairs.contains(endpoints)) - continue; - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED - && added_pairs.contains(new Pair(attach_point, newVertex))) - continue; - } - created_edge = true; - } while (!created_edge); + /* + * If parallel edges are not allowed, skip attach_point if + * already exists; note that because of + * the way the new node's edges are added, we only need to check the + * list of candidate edges for duplicates. + */ + if (!(mGraph instanceof MultiGraph)) { + if (added_pairs.contains(endpoints)) { + continue; + } + if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED + && added_pairs.contains(new Pair(attach_point, newVertex))) { + continue; + } + } + created_edge = true; + } while (!created_edge); - added_pairs.add(endpoints); + added_pairs.add(endpoints); - if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { - added_pairs.add(new Pair(attach_point, newVertex)); - } - } + if (mGraph.getDefaultEdgeType() == EdgeType.UNDIRECTED) { + added_pairs.add(new Pair(attach_point, newVertex)); + } + } - public int numIterations() { - return mElapsedTimeSteps; - } + public int numIterations() { + return mElapsedTimeSteps; + } - public Graph get() { - return mGraph; - } + public Graph get() { + return mGraph; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java index d02cdba2..ec5e1f60 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/EppsteinPowerLawGenerator.java @@ -1,128 +1,134 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.generators.random; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.generators.GraphGenerator; import edu.uci.ics.jung.graph.Graph; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; /** * Graph generator that generates undirected graphs with power-law degree distributions. + * * @author Scott White * @see "A Steady State Model for Graph Power Law by David Eppstein and Joseph Wang" */ -public class EppsteinPowerLawGenerator implements GraphGenerator { - private int mNumVertices; - private int mNumEdges; - private int mNumIterations; - private double mMaxDegree; - private Random mRandom; - private Supplier> graphFactory; - private Supplier vertexFactory; - private Supplier edgeFactory; - - /** - * Creates an instance with the specified factories and specifications. - * @param graphFactory the Supplier to use to generate the graph - * @param vertexFactory the Supplier to use to create vertices - * @param edgeFactory the Supplier to use to create edges - * @param numVertices the number of vertices for the generated graph - * @param numEdges the number of edges the generated graph will have, should be Theta(numVertices) - * @param r the number of iterations to use; the larger the value the better the graph's degree - * distribution will approximate a power-law - */ - public EppsteinPowerLawGenerator(Supplier> graphFactory, - Supplier vertexFactory, Supplier edgeFactory, - int numVertices, int numEdges, int r) { - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mNumVertices = numVertices; - mNumEdges = numEdges; - mNumIterations = r; - mRandom = new Random(); +public class EppsteinPowerLawGenerator implements GraphGenerator { + private int mNumVertices; + private int mNumEdges; + private int mNumIterations; + private double mMaxDegree; + private Random mRandom; + private Supplier> graphFactory; + private Supplier vertexFactory; + private Supplier edgeFactory; + + /** + * Creates an instance with the specified factories and specifications. + * + * @param graphFactory the Supplier to use to generate the graph + * @param vertexFactory the Supplier to use to create vertices + * @param edgeFactory the Supplier to use to create edges + * @param numVertices the number of vertices for the generated graph + * @param numEdges the number of edges the generated graph will have, should be Theta(numVertices) + * @param r the number of iterations to use; the larger the value the better the graph's degree + * distribution will approximate a power-law + */ + public EppsteinPowerLawGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int numVertices, + int numEdges, + int r) { + this.graphFactory = graphFactory; + this.vertexFactory = vertexFactory; + this.edgeFactory = edgeFactory; + mNumVertices = numVertices; + mNumEdges = numEdges; + mNumIterations = r; + mRandom = new Random(); + } + + protected Graph initializeGraph() { + Graph graph = null; + graph = graphFactory.get(); + for (int i = 0; i < mNumVertices; i++) { + graph.addVertex(vertexFactory.get()); + } + List vertices = new ArrayList(graph.getVertices()); + while (graph.getEdgeCount() < mNumEdges) { + V u = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); + V v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); + if (!graph.isSuccessor(v, u)) { + graph.addEdge(edgeFactory.get(), u, v); + } } - protected Graph initializeGraph() { - Graph graph = null; - graph = graphFactory.get(); - for(int i=0; i vertices = new ArrayList(graph.getVertices()); - while (graph.getEdgeCount() < mNumEdges) { - V u = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - if (!graph.isSuccessor(v,u)) { - graph.addEdge(edgeFactory.get(), u, v); - } - } - - double maxDegree = 0; - for (V v : graph.getVertices()) { - maxDegree = Math.max(graph.degree(v),maxDegree); - } - mMaxDegree = maxDegree; //(maxDegree+1)*(maxDegree)/2; - - return graph; + double maxDegree = 0; + for (V v : graph.getVertices()) { + maxDegree = Math.max(graph.degree(v), maxDegree); } + mMaxDegree = maxDegree; //(maxDegree+1)*(maxDegree)/2; - /** - * Generates a graph whose degree distribution approximates a power-law. - * @return the generated graph - */ - public Graph get() { - Graph graph = initializeGraph(); + return graph; + } - List vertices = new ArrayList(graph.getVertices()); - for (int rIdx = 0; rIdx < mNumIterations; rIdx++) { + /** + * Generates a graph whose degree distribution approximates a power-law. + * + * @return the generated graph + */ + public Graph get() { + Graph graph = initializeGraph(); - V v = null; - int degree = 0; - do { - v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - degree = graph.degree(v); + List vertices = new ArrayList(graph.getVertices()); + for (int rIdx = 0; rIdx < mNumIterations; rIdx++) { - } while (degree == 0); + V v = null; + int degree = 0; + do { + v = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); + degree = graph.degree(v); - List edges = new ArrayList(graph.getIncidentEdges(v)); - E randomExistingEdge = edges.get((int) (mRandom.nextDouble()*degree)); + } while (degree == 0); - // FIXME: look at email thread on a more efficient RNG for arbitrary distributions - - V x = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - V y = null; - do { - y = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); + List edges = new ArrayList(graph.getIncidentEdges(v)); + E randomExistingEdge = edges.get((int) (mRandom.nextDouble() * degree)); - } while (mRandom.nextDouble() > ((graph.degree(y)+1)/mMaxDegree)); + // FIXME: look at email thread on a more efficient RNG for arbitrary distributions - if (!graph.isSuccessor(y,x) && x != y) { - graph.removeEdge(randomExistingEdge); - graph.addEdge(edgeFactory.get(), x, y); - } - } + V x = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); + V y = null; + do { + y = vertices.get((int) (mRandom.nextDouble() * mNumVertices)); - return graph; - } + } while (mRandom.nextDouble() > ((graph.degree(y) + 1) / mMaxDegree)); - /** - * Sets the seed for the random number generator. - * @param seed input to the random number generator. - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); + if (!graph.isSuccessor(y, x) && x != y) { + graph.removeEdge(randomExistingEdge); + graph.addEdge(edgeFactory.get(), x, y); + } } + + return graph; + } + + /** + * Sets the seed for the random number generator. + * + * @param seed input to the random number generator. + */ + public void setSeed(long seed) { + mRandom.setSeed(seed); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java index 68f146fd..d26281d8 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/ErdosRenyiGenerator.java @@ -1,105 +1,93 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.generators.random; -import java.util.ArrayList; -import java.util.List; -import java.util.Random; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.generators.GraphGenerator; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.UndirectedGraph; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; /** - * Generates a random graph using the Erdos-Renyi binomial model - * (each pair of vertices is connected with probability p). - * - * @author William Giordano, Scott White, Joshua O'Madadhain + * Generates a random graph using the Erdos-Renyi binomial model (each pair of vertices is connected + * with probability p). + * + * @author William Giordano, Scott White, Joshua O'Madadhain */ -public class ErdosRenyiGenerator implements GraphGenerator { - private int mNumVertices; - private double mEdgeConnectionProbability; - private Random mRandom; - Supplier> graphFactory; - Supplier vertexFactory; - Supplier edgeFactory; - - /** - * - * @param graphFactory factory for graphs of the appropriate type - * @param vertexFactory factory for vertices of the appropriate type - * @param edgeFactory factory for edges of the appropriate type - * @param numVertices number of vertices graph should have - * @param p Connection's probability between 2 vertices - */ - public ErdosRenyiGenerator(Supplier> graphFactory, - Supplier vertexFactory, Supplier edgeFactory, - int numVertices,double p) - { - if (numVertices <= 0) { - throw new IllegalArgumentException("A positive # of vertices must be specified."); - } - mNumVertices = numVertices; - if (p < 0 || p > 1) { - throw new IllegalArgumentException("p must be between 0 and 1."); - } - this.graphFactory = graphFactory; - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mEdgeConnectionProbability = p; - mRandom = new Random(); - } - - /** - * Returns a graph in which each pair of vertices is connected by - * an undirected edge with the probability specified by the constructor. - */ - public Graph get() { - UndirectedGraph g = graphFactory.get(); - for(int i=0; i list = new ArrayList(g.getVertices()); - - for (int i = 0; i < mNumVertices-1; i++) { - V v_i = list.get(i); - for (int j = i+1; j < mNumVertices; j++) { - V v_j = list.get(j); - if (mRandom.nextDouble() < mEdgeConnectionProbability) { - g.addEdge(edgeFactory.get(), v_i, v_j); - } - } - } - return g; +public class ErdosRenyiGenerator implements GraphGenerator { + private int mNumVertices; + private double mEdgeConnectionProbability; + private Random mRandom; + Supplier> graphFactory; + Supplier vertexFactory; + Supplier edgeFactory; + + /** + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param numVertices number of vertices graph should have + * @param p Connection's probability between 2 vertices + */ + public ErdosRenyiGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int numVertices, + double p) { + if (numVertices <= 0) { + throw new IllegalArgumentException("A positive # of vertices must be specified."); } - - /** - * Sets the seed of the internal random number generator to {@code seed}. - * Enables consistent behavior. - * - * @param seed the seed to use for the internal random number generator - */ - public void setSeed(long seed) { - mRandom.setSeed(seed); + mNumVertices = numVertices; + if (p < 0 || p > 1) { + throw new IllegalArgumentException("p must be between 0 and 1."); + } + this.graphFactory = graphFactory; + this.vertexFactory = vertexFactory; + this.edgeFactory = edgeFactory; + mEdgeConnectionProbability = p; + mRandom = new Random(); + } + + /** + * Returns a graph in which each pair of vertices is connected by an undirected edge with the + * probability specified by the constructor. + */ + public Graph get() { + UndirectedGraph g = graphFactory.get(); + for (int i = 0; i < mNumVertices; i++) { + g.addVertex(vertexFactory.get()); + } + List list = new ArrayList(g.getVertices()); + + for (int i = 0; i < mNumVertices - 1; i++) { + V v_i = list.get(i); + for (int j = i + 1; j < mNumVertices; j++) { + V v_j = list.get(j); + if (mRandom.nextDouble() < mEdgeConnectionProbability) { + g.addEdge(edgeFactory.get(), v_i, v_j); + } + } } + return g; + } + + /** + * Sets the seed of the internal random number generator to {@code seed}. Enables consistent + * behavior. + * + * @param seed the seed to use for the internal random number generator + */ + public void setSeed(long seed) { + mRandom.setSeed(seed); + } } - - - - - - - - - - - diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java index 98b0d490..515f3c51 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/KleinbergSmallWorldGenerator.java @@ -2,191 +2,192 @@ package edu.uci.ics.jung.algorithms.generators.random; /* -* Copyright (c) 2009, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ - -import java.util.HashMap; -import java.util.Map; -import java.util.Random; + * Copyright (c) 2009, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.generators.Lattice2DGenerator; import edu.uci.ics.jung.algorithms.util.WeightedChoice; import edu.uci.ics.jung.graph.Graph; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; /** - * Graph generator that produces a random graph with small world properties. - * The underlying model is an mxn (optionally toroidal) lattice. Each node u - * has four local connections, one to each of its neighbors, and - * in addition 1+ long range connections to some node v where v is chosen randomly according to - * probability proportional to d^-alpha where d is the lattice distance between u and v and alpha - * is the clustering exponent. - * + * Graph generator that produces a random graph with small world properties. The underlying model is + * an mxn (optionally toroidal) lattice. Each node u has four local connections, one to each of its + * neighbors, and in addition 1+ long range connections to some node v where v is chosen randomly + * according to probability proportional to d^-alpha where d is the lattice distance between u and v + * and alpha is the clustering exponent. + * * @see "Navigation in a small world J. Kleinberg, Nature 406(2000), 845." * @author Joshua O'Madadhain */ public class KleinbergSmallWorldGenerator extends Lattice2DGenerator { - private double clustering_exponent; - private Random random; - private int num_connections = 1; - - /** - * Creates an instance with the specified parameters, whose underlying lattice is (a) of size - * {@code latticeSize} x {@code latticeSize}, and (b) toroidal. - * @param graphFactory factory for graphs of the appropriate type - * @param vertexFactory factory for vertices of the appropriate type - * @param edgeFactory factory for edges of the appropriate type - * @param latticeSize the number of rows and columns of the underlying lattice - * @param clusteringExponent the clustering exponent - */ - public KleinbergSmallWorldGenerator(Supplier> graphFactory, - Supplier vertexFactory, Supplier edgeFactory, - int latticeSize, double clusteringExponent) - { - this(graphFactory, vertexFactory, edgeFactory, latticeSize, latticeSize, clusteringExponent); - } + private double clustering_exponent; + private Random random; + private int num_connections = 1; - /** - * Creates an instance with the specified parameters, whose underlying lattice is toroidal. - * @param graphFactory factory for graphs of the appropriate type - * @param vertexFactory factory for vertices of the appropriate type - * @param edgeFactory factory for edges of the appropriate type - * @param row_count number of rows of the underlying lattice - * @param col_count number of columns of the underlying lattice - * @param clusteringExponent the clustering exponent - */ - public KleinbergSmallWorldGenerator(Supplier> graphFactory, - Supplier vertexFactory, Supplier edgeFactory, - int row_count, int col_count, double clusteringExponent) - { - super(graphFactory, vertexFactory, edgeFactory, row_count, col_count, true); - clustering_exponent = clusteringExponent; - initialize(); - } + /** + * Creates an instance with the specified parameters, whose underlying lattice is (a) of size + * {@code latticeSize} x {@code latticeSize}, and (b) toroidal. + * + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param latticeSize the number of rows and columns of the underlying lattice + * @param clusteringExponent the clustering exponent + */ + public KleinbergSmallWorldGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int latticeSize, + double clusteringExponent) { + this(graphFactory, vertexFactory, edgeFactory, latticeSize, latticeSize, clusteringExponent); + } - /** - * Creates an instance with the specified parameters. - * @param graphFactory factory for graphs of the appropriate type - * @param vertexFactory factory for vertices of the appropriate type - * @param edgeFactory factory for edges of the appropriate type - * @param row_count number of rows of the underlying lattice - * @param col_count number of columns of the underlying lattice - * @param clusteringExponent the clustering exponent - * @param isToroidal whether the underlying lattice is toroidal - */ - public KleinbergSmallWorldGenerator(Supplier> graphFactory, - Supplier vertexFactory, Supplier edgeFactory, - int row_count, int col_count, double clusteringExponent, boolean isToroidal) - { - super(graphFactory, vertexFactory, edgeFactory, row_count, col_count, isToroidal); - clustering_exponent = clusteringExponent; - initialize(); - } + /** + * Creates an instance with the specified parameters, whose underlying lattice is toroidal. + * + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param row_count number of rows of the underlying lattice + * @param col_count number of columns of the underlying lattice + * @param clusteringExponent the clustering exponent + */ + public KleinbergSmallWorldGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int row_count, + int col_count, + double clusteringExponent) { + super(graphFactory, vertexFactory, edgeFactory, row_count, col_count, true); + clustering_exponent = clusteringExponent; + initialize(); + } - private void initialize() - { - this.random = new Random(); - } - - /** - * Sets the {@code Random} instance used by this instance. Useful for - * unit testing. - * @param random the {@code Random} instance for this class to use - */ - public void setRandom(Random random) - { - this.random = random; - } - - /** - * Sets the seed of the internal random number generator. May be used to provide repeatable - * experiments. - * @param seed the random seed that this class's random number generator is to use - */ - public void setRandomSeed(long seed) - { - random.setSeed(seed); - } + /** + * Creates an instance with the specified parameters. + * + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param row_count number of rows of the underlying lattice + * @param col_count number of columns of the underlying lattice + * @param clusteringExponent the clustering exponent + * @param isToroidal whether the underlying lattice is toroidal + */ + public KleinbergSmallWorldGenerator( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int row_count, + int col_count, + double clusteringExponent, + boolean isToroidal) { + super(graphFactory, vertexFactory, edgeFactory, row_count, col_count, isToroidal); + clustering_exponent = clusteringExponent; + initialize(); + } - /** - * Sets the number of new 'small-world' connections (outgoing edges) to be added to each vertex. - * @param num_connections the number of outgoing small-world edges to add to each vertex - */ - public void setConnectionCount(int num_connections) - { - if (num_connections <= 0) - { - throw new IllegalArgumentException("Number of new connections per vertex must be >= 1"); - } - this.num_connections = num_connections; - } + private void initialize() { + this.random = new Random(); + } + + /** + * Sets the {@code Random} instance used by this instance. Useful for unit testing. + * + * @param random the {@code Random} instance for this class to use + */ + public void setRandom(Random random) { + this.random = random; + } + + /** + * Sets the seed of the internal random number generator. May be used to provide repeatable + * experiments. + * + * @param seed the random seed that this class's random number generator is to use + */ + public void setRandomSeed(long seed) { + random.setSeed(seed); + } - /** - * @return the number of new 'small-world' connections that will originate at each vertex - */ - public int getConnectionCount() - { - return this.num_connections; + /** + * Sets the number of new 'small-world' connections (outgoing edges) to be added to each vertex. + * + * @param num_connections the number of outgoing small-world edges to add to each vertex + */ + public void setConnectionCount(int num_connections) { + if (num_connections <= 0) { + throw new IllegalArgumentException("Number of new connections per vertex must be >= 1"); } - - /** - * Generates a random small world network according to the parameters given - * @return a random small world graph - */ - @Override - public Graph get() - { - Graph graph = super.get(); - - // TODO: For toroidal graphs, we can make this more clever by pre-creating the WeightedChoice object - // and using the output as an offset to the current vertex location. - WeightedChoice weighted_choice; - - // Add long range connections - for (int i = 0; i < graph.getVertexCount(); i++) - { - V source = getVertex(i); - int row = getRow(i); - int col = getCol(i); - int row_offset = row < row_count/2 ? -row_count : row_count; - int col_offset = col < col_count/2 ? -col_count : col_count; - - Map vertex_weights = new HashMap(); - for (int j = 0; j < row_count; j++) - { - for (int k = 0; k < col_count; k++) - { - if (j == row && k == col) - continue; - int v_dist = Math.abs(j - row); - int h_dist = Math.abs(k - col); - if (is_toroidal) - { - v_dist = Math.min(v_dist, Math.abs(j - row+row_offset)); - h_dist = Math.min(h_dist, Math.abs(k - col+col_offset)); - } - int distance = v_dist + h_dist; - if (distance < 2) - continue; - else - vertex_weights.put(getVertex(j,k), (float)Math.pow(distance, -clustering_exponent)); - } - } - - for (int j = 0; j < this.num_connections; j++) { - weighted_choice = new WeightedChoice(vertex_weights, random); - V target = weighted_choice.nextItem(); - graph.addEdge(edge_factory.get(), source, target); - } + this.num_connections = num_connections; + } + + /** @return the number of new 'small-world' connections that will originate at each vertex */ + public int getConnectionCount() { + return this.num_connections; + } + + /** + * Generates a random small world network according to the parameters given + * + * @return a random small world graph + */ + @Override + public Graph get() { + Graph graph = super.get(); + + // TODO: For toroidal graphs, we can make this more clever by pre-creating the WeightedChoice object + // and using the output as an offset to the current vertex location. + WeightedChoice weighted_choice; + + // Add long range connections + for (int i = 0; i < graph.getVertexCount(); i++) { + V source = getVertex(i); + int row = getRow(i); + int col = getCol(i); + int row_offset = row < row_count / 2 ? -row_count : row_count; + int col_offset = col < col_count / 2 ? -col_count : col_count; + + Map vertex_weights = new HashMap(); + for (int j = 0; j < row_count; j++) { + for (int k = 0; k < col_count; k++) { + if (j == row && k == col) { + continue; + } + int v_dist = Math.abs(j - row); + int h_dist = Math.abs(k - col); + if (is_toroidal) { + v_dist = Math.min(v_dist, Math.abs(j - row + row_offset)); + h_dist = Math.min(h_dist, Math.abs(k - col + col_offset)); + } + int distance = v_dist + h_dist; + if (distance < 2) { + continue; + } else { + vertex_weights.put(getVertex(j, k), (float) Math.pow(distance, -clustering_exponent)); + } } + } - return graph; + for (int j = 0; j < this.num_connections; j++) { + weighted_choice = new WeightedChoice(vertex_weights, random); + V target = weighted_choice.nextItem(); + graph.addEdge(edge_factory.get(), source, target); + } } + + return graph; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java index 6a932ca5..43f883ef 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/generators/random/MixedRandomGraphGenerator.java @@ -1,80 +1,82 @@ /* * Copyright (c) 2003, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ /* * Created on Jul 2, 2003 - * + * */ package edu.uci.ics.jung.algorithms.generators.random; -import java.util.Map; -import java.util.Set; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.EdgeType; +import java.util.Map; +import java.util.Set; /** - * Generates a mixed-mode random graph (with random edge weights) based on the output of - * BarabasiAlbertGenerator. - * Primarily intended for providing a heterogeneous sample graph for visualization testing, etc. + * Generates a mixed-mode random graph (with random edge weights) based on the output of + * BarabasiAlbertGenerator. Primarily intended for providing a heterogeneous sample graph for + * visualization testing, etc. */ public class MixedRandomGraphGenerator { - /** - * Returns a random mixed-mode graph. Starts with a randomly generated - * Barabasi-Albert (preferential attachment) generator - * (4 initial vertices, 3 edges added at each step, and num_vertices - 4 evolution steps). - * Then takes the resultant graph, replaces random undirected edges with directed - * edges, and assigns random weights to each edge. - * @param the vertex type - * @param the edge type - * @param graphFactory factory for graphs of the appropriate type - * @param vertexFactory factory for vertices of the appropriate type - * @param edgeFactory factory for edges of the appropriate type - * @param edge_weights storage for the edge weights that this generator creates - * @param num_vertices number of vertices to generate - * @param seedVertices storage for the seed vertices that this generator creates - * @return the generated graph - */ - public static Graph generateMixedRandomGraph( - Supplier> graphFactory, - Supplier vertexFactory, - Supplier edgeFactory, - Map edge_weights, - int num_vertices, Set seedVertices) - { - int seed = (int)(Math.random() * 10000); - BarabasiAlbertGenerator bag = - new BarabasiAlbertGenerator(graphFactory, vertexFactory, edgeFactory, - 4, 3, //false, parallel, - seed, seedVertices); - bag.evolveGraph(num_vertices - 4); - Graph ug = bag.get(); + /** + * Returns a random mixed-mode graph. Starts with a randomly generated Barabasi-Albert + * (preferential attachment) generator (4 initial vertices, 3 edges added at each step, and + * num_vertices - 4 evolution steps). Then takes the resultant graph, replaces random undirected + * edges with directed edges, and assigns random weights to each edge. + * + * @param the vertex type + * @param the edge type + * @param graphFactory factory for graphs of the appropriate type + * @param vertexFactory factory for vertices of the appropriate type + * @param edgeFactory factory for edges of the appropriate type + * @param edge_weights storage for the edge weights that this generator creates + * @param num_vertices number of vertices to generate + * @param seedVertices storage for the seed vertices that this generator creates + * @return the generated graph + */ + public static Graph generateMixedRandomGraph( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + Map edge_weights, + int num_vertices, + Set seedVertices) { + int seed = (int) (Math.random() * 10000); + BarabasiAlbertGenerator bag = + new BarabasiAlbertGenerator( + graphFactory, + vertexFactory, + edgeFactory, + 4, + 3, //false, parallel, + seed, + seedVertices); + bag.evolveGraph(num_vertices - 4); + Graph ug = bag.get(); + + Graph g = graphFactory.get(); + for (V v : ug.getVertices()) { + g.addVertex(v); + } - Graph g = graphFactory.get(); - for(V v : ug.getVertices()) { - g.addVertex(v); - } - - // randomly replace some of the edges by directed edges to - // get a mixed-mode graph, add random weights - - for(E e : ug.getEdges()) { - V v1 = ug.getEndpoints(e).getFirst(); - V v2 = ug.getEndpoints(e).getSecond(); + // randomly replace some of the edges by directed edges to + // get a mixed-mode graph, add random weights - E me = edgeFactory.get(); - g.addEdge(me, v1, v2, Math.random() < .5 ? EdgeType.DIRECTED : EdgeType.UNDIRECTED); - edge_weights.put(me, Math.random()); - } - - return g; + for (E e : ug.getEdges()) { + V v1 = ug.getEndpoints(e).getFirst(); + V v2 = ug.getEndpoints(e).getSecond(); + + E me = edgeFactory.get(); + g.addEdge(me, v1, v2, Math.random() < .5 ? EdgeType.DIRECTED : EdgeType.UNDIRECTED); + edge_weights.put(me, Math.random()); } - + + return g; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java index a9aa61ce..1fcacfcf 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/AbstractRanker.java @@ -1,14 +1,19 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import edu.uci.ics.jung.algorithms.util.IterativeProcess; +import edu.uci.ics.jung.graph.Graph; import java.text.DecimalFormat; import java.text.Format; import java.util.ArrayList; @@ -18,375 +23,368 @@ import java.util.List; import java.util.Map; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; - -import edu.uci.ics.jung.algorithms.util.IterativeProcess; -import edu.uci.ics.jung.graph.Graph; - /** - * Abstract class for algorithms that rank nodes or edges by some "importance" metric. Provides a common set of - * services such as: + * Abstract class for algorithms that rank nodes or edges by some "importance" metric. Provides a + * common set of services such as: + * *

    - *
  • storing rank scores
  • - *
  • getters and setters for rank scores
  • - *
  • computing default edge weights
  • - *
  • normalizing default or user-provided edge transition weights
  • - *
  • normalizing rank scores
  • - *
  • automatic cleanup of decorations
  • - *
  • creation of Ranking list
  • - *
  • print rankings in sorted order by rank
  • + *
  • storing rank scores + *
  • getters and setters for rank scores + *
  • computing default edge weights + *
  • normalizing default or user-provided edge transition weights + *
  • normalizing rank scores + *
  • automatic cleanup of decorations + *
  • creation of Ranking list + *
  • print rankings in sorted order by rank *
- *

- * By default, all rank scores are removed from the vertices (or edges) being ranked. + * + *

By default, all rank scores are removed from the vertices (or edges) being ranked. + * * @author Scott White */ -public abstract class AbstractRanker extends IterativeProcess { - private Graph mGraph; - private List> mRankings; - private boolean mRemoveRankScoresOnFinalize; - private boolean mRankNodes; - private boolean mRankEdges; - private boolean mNormalizeRankings; - protected LoadingCache> vertexRankScores - = CacheBuilder.newBuilder().build(new CacheLoader>() { - public Map load(Object o) { - return new HashMap(); - } - }); - protected LoadingCache> edgeRankScores - = CacheBuilder.newBuilder().build(new CacheLoader>() { - public Map load(Object o) { - return new HashMap(); - } - }); - - private Map edgeWeights = new HashMap(); - - protected void initialize(Graph graph, boolean isNodeRanker, - boolean isEdgeRanker) { - if (!isNodeRanker && !isEdgeRanker) - throw new IllegalArgumentException("Must rank edges, vertices, or both"); - mGraph = graph; - mRemoveRankScoresOnFinalize = true; - mNormalizeRankings = true; - mRankNodes = isNodeRanker; - mRankEdges = isEdgeRanker; - } - - /** - * @return all rankScores - */ - public Map> getVertexRankScores() { - return vertexRankScores.asMap(); - } - - public Map> getEdgeRankScores() { - return edgeRankScores.asMap(); - } - - /** - * @param key the rank score key whose scores are to be retrieved - * @return the rank scores for the specified key - */ - public Map getVertexRankScores(Object key) { - return vertexRankScores.getUnchecked(key); - } - - public Map getEdgeRankScores(Object key) { - return edgeRankScores.getUnchecked(key); - } - - protected Collection getVertices() { - return mGraph.getVertices(); - } - - protected int getVertexCount() { - return mGraph.getVertexCount(); - } - - protected Graph getGraph() { - return mGraph; - } - - @Override - public void reset() { - } - - /** - * @return true if this ranker ranks nodes, and - * false otherwise. - */ - public boolean isRankingNodes() { - return mRankNodes; - } - - /** - * @return true if this ranker ranks edges, and - * false otherwise. - */ - public boolean isRankingEdges() { - return mRankEdges; - } - - /** - * Instructs the ranker whether or not it should remove the rank scores from the nodes (or edges) once the ranks - * have been computed. - * @param removeRankScoresOnFinalize true if the rank scores are to be removed, false otherwise - */ - public void setRemoveRankScoresOnFinalize(boolean removeRankScoresOnFinalize) { - this.mRemoveRankScoresOnFinalize = removeRankScoresOnFinalize; - } - - protected void onFinalize(Object e) {} - - /** - * The user datum key used to store the rank score. - * @return the key - */ - abstract public Object getRankScoreKey(); - - - @SuppressWarnings("unchecked") - @Override - protected void finalizeIterations() { - List> sortedRankings = new ArrayList>(); - - int id = 1; - if (mRankNodes) { - for (V currentVertex : getVertices()) { - Ranking ranking = new Ranking(id,getVertexRankScore(currentVertex),currentVertex); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.vertexRankScores.getUnchecked(getRankScoreKey()).remove(currentVertex); +public abstract class AbstractRanker extends IterativeProcess { + private Graph mGraph; + private List> mRankings; + private boolean mRemoveRankScoresOnFinalize; + private boolean mRankNodes; + private boolean mRankEdges; + private boolean mNormalizeRankings; + protected LoadingCache> vertexRankScores = + CacheBuilder.newBuilder() + .build( + new CacheLoader>() { + public Map load(Object o) { + return new HashMap(); } - id++; - onFinalize(currentVertex); - } - } - if (mRankEdges) { - for (E currentEdge : mGraph.getEdges()) { - - Ranking ranking = new Ranking(id,getEdgeRankScore(currentEdge),currentEdge); - sortedRankings.add(ranking); - if (mRemoveRankScoresOnFinalize) { - this.edgeRankScores.getUnchecked(getRankScoreKey()).remove(currentEdge); + }); + protected LoadingCache> edgeRankScores = + CacheBuilder.newBuilder() + .build( + new CacheLoader>() { + public Map load(Object o) { + return new HashMap(); } - id++; - onFinalize(currentEdge); - } + }); + + private Map edgeWeights = new HashMap(); + + protected void initialize(Graph graph, boolean isNodeRanker, boolean isEdgeRanker) { + if (!isNodeRanker && !isEdgeRanker) + throw new IllegalArgumentException("Must rank edges, vertices, or both"); + mGraph = graph; + mRemoveRankScoresOnFinalize = true; + mNormalizeRankings = true; + mRankNodes = isNodeRanker; + mRankEdges = isEdgeRanker; + } + + /** @return all rankScores */ + public Map> getVertexRankScores() { + return vertexRankScores.asMap(); + } + + public Map> getEdgeRankScores() { + return edgeRankScores.asMap(); + } + + /** + * @param key the rank score key whose scores are to be retrieved + * @return the rank scores for the specified key + */ + public Map getVertexRankScores(Object key) { + return vertexRankScores.getUnchecked(key); + } + + public Map getEdgeRankScores(Object key) { + return edgeRankScores.getUnchecked(key); + } + + protected Collection getVertices() { + return mGraph.getVertices(); + } + + protected int getVertexCount() { + return mGraph.getVertexCount(); + } + + protected Graph getGraph() { + return mGraph; + } + + @Override + public void reset() {} + + /** @return true if this ranker ranks nodes, and false otherwise. */ + public boolean isRankingNodes() { + return mRankNodes; + } + + /** @return true if this ranker ranks edges, and false otherwise. */ + public boolean isRankingEdges() { + return mRankEdges; + } + + /** + * Instructs the ranker whether or not it should remove the rank scores from the nodes (or edges) + * once the ranks have been computed. + * + * @param removeRankScoresOnFinalize true if the rank scores are to be removed, + * false otherwise + */ + public void setRemoveRankScoresOnFinalize(boolean removeRankScoresOnFinalize) { + this.mRemoveRankScoresOnFinalize = removeRankScoresOnFinalize; + } + + protected void onFinalize(Object e) {} + + /** + * The user datum key used to store the rank score. + * + * @return the key + */ + public abstract Object getRankScoreKey(); + + @SuppressWarnings("unchecked") + @Override + protected void finalizeIterations() { + List> sortedRankings = new ArrayList>(); + + int id = 1; + if (mRankNodes) { + for (V currentVertex : getVertices()) { + Ranking ranking = new Ranking(id, getVertexRankScore(currentVertex), currentVertex); + sortedRankings.add(ranking); + if (mRemoveRankScoresOnFinalize) { + this.vertexRankScores.getUnchecked(getRankScoreKey()).remove(currentVertex); } - - mRankings = sortedRankings; - Collections.sort(mRankings); - } - - /** - * Retrieves the list of ranking instances in descending sorted order by rank score - * If the algorithm is ranking edges, the instances will be of type EdgeRanking, otherwise - * if the algorithm is ranking nodes the instances will be of type NodeRanking - * @return the list of rankings - */ - public List> getRankings() { - return mRankings; + id++; + onFinalize(currentVertex); + } } + if (mRankEdges) { + for (E currentEdge : mGraph.getEdges()) { - /** - * Return a list of the top k rank scores. - * @param topKRankings the value of k to use - * @return list of rank scores - */ - public List getRankScores(int topKRankings) { - List scores = new ArrayList(); - int count=1; - for (Ranking currentRanking : getRankings()) { - if (count > topKRankings) { - return scores; - } - scores.add(currentRanking.rankScore); - count++; + Ranking ranking = new Ranking(id, getEdgeRankScore(currentEdge), currentEdge); + sortedRankings.add(ranking); + if (mRemoveRankScoresOnFinalize) { + this.edgeRankScores.getUnchecked(getRankScoreKey()).remove(currentEdge); } + id++; + onFinalize(currentEdge); + } + } + mRankings = sortedRankings; + Collections.sort(mRankings); + } + + /** + * Retrieves the list of ranking instances in descending sorted order by rank score If the + * algorithm is ranking edges, the instances will be of type EdgeRanking, otherwise + * if the algorithm is ranking nodes the instances will be of type NodeRanking + * + * @return the list of rankings + */ + public List> getRankings() { + return mRankings; + } + + /** + * Return a list of the top k rank scores. + * + * @param topKRankings the value of k to use + * @return list of rank scores + */ + public List getRankScores(int topKRankings) { + List scores = new ArrayList(); + int count = 1; + for (Ranking currentRanking : getRankings()) { + if (count > topKRankings) { return scores; + } + scores.add(currentRanking.rankScore); + count++; } - /** - * Given a node, returns the corresponding rank score. This is a default - * implementation of getRankScore which assumes the decorations are of type MutableDouble. - * This method only returns legal values if setRemoveRankScoresOnFinalize(false) was called - * prior to evaluate(). - * - * @param v the node whose rank score is to be returned. - * @return the rank score value - */ - public double getVertexRankScore(V v) { - Number rankScore = vertexRankScores.getUnchecked(getRankScoreKey()).get(v); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } + return scores; + } + + /** + * Given a node, returns the corresponding rank score. This is a default implementation of + * getRankScore which assumes the decorations are of type MutableDouble. This method only returns + * legal values if setRemoveRankScoresOnFinalize(false) was called prior to + * evaluate(). + * + * @param v the node whose rank score is to be returned. + * @return the rank score value + */ + public double getVertexRankScore(V v) { + Number rankScore = vertexRankScores.getUnchecked(getRankScoreKey()).get(v); + if (rankScore != null) { + return rankScore.doubleValue(); + } else { + throw new RuntimeException( + "setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); } - - public double getVertexRankScore(V v, Object key) { - return vertexRankScores.getUnchecked(key).get(v).doubleValue(); + } + + public double getVertexRankScore(V v, Object key) { + return vertexRankScores.getUnchecked(key).get(v).doubleValue(); + } + + public double getEdgeRankScore(E e) { + Number rankScore = edgeRankScores.getUnchecked(getRankScoreKey()).get(e); + if (rankScore != null) { + return rankScore.doubleValue(); + } else { + throw new RuntimeException( + "setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); } + } - public double getEdgeRankScore(E e) { - Number rankScore = edgeRankScores.getUnchecked(getRankScoreKey()).get(e); - if (rankScore != null) { - return rankScore.doubleValue(); - } else { - throw new RuntimeException("setRemoveRankScoresOnFinalize(false) must be called before evaluate()."); - } - } - - public double getEdgeRankScore(E e, Object key) { - return edgeRankScores.getUnchecked(key).get(e).doubleValue(); - } + public double getEdgeRankScore(E e, Object key) { + return edgeRankScores.getUnchecked(key).get(e).doubleValue(); + } - protected void setVertexRankScore(V v, double rankValue, Object key) { - vertexRankScores.getUnchecked(key).put(v, rankValue); - } + protected void setVertexRankScore(V v, double rankValue, Object key) { + vertexRankScores.getUnchecked(key).put(v, rankValue); + } - protected void setEdgeRankScore(E e, double rankValue, Object key) { - edgeRankScores.getUnchecked(key).put(e, rankValue); - } + protected void setEdgeRankScore(E e, double rankValue, Object key) { + edgeRankScores.getUnchecked(key).put(e, rankValue); + } - protected void setVertexRankScore(V v, double rankValue) { - setVertexRankScore(v,rankValue, getRankScoreKey()); - } + protected void setVertexRankScore(V v, double rankValue) { + setVertexRankScore(v, rankValue, getRankScoreKey()); + } - protected void setEdgeRankScore(E e, double rankValue) { - setEdgeRankScore(e, rankValue, getRankScoreKey()); - } + protected void setEdgeRankScore(E e, double rankValue) { + setEdgeRankScore(e, rankValue, getRankScoreKey()); + } - protected void removeVertexRankScore(V v, Object key) { - vertexRankScores.getUnchecked(key).remove(v); - } + protected void removeVertexRankScore(V v, Object key) { + vertexRankScores.getUnchecked(key).remove(v); + } - protected void removeEdgeRankScore(E e, Object key) { - edgeRankScores.getUnchecked(key).remove(e); - } + protected void removeEdgeRankScore(E e, Object key) { + edgeRankScores.getUnchecked(key).remove(e); + } - protected void removeVertexRankScore(V v) { - vertexRankScores.getUnchecked(getRankScoreKey()).remove(v); - } + protected void removeVertexRankScore(V v) { + vertexRankScores.getUnchecked(getRankScoreKey()).remove(v); + } - protected void removeEdgeRankScore(E e) { - edgeRankScores.getUnchecked(getRankScoreKey()).remove(e); - } + protected void removeEdgeRankScore(E e) { + edgeRankScores.getUnchecked(getRankScoreKey()).remove(e); + } - protected double getEdgeWeight(E e) { - return edgeWeights.get(e).doubleValue(); - } + protected double getEdgeWeight(E e) { + return edgeWeights.get(e).doubleValue(); + } - protected void setEdgeWeight(E e, double weight) { - edgeWeights.put(e, weight); - } - - public void setEdgeWeights(Map edgeWeights) { - this.edgeWeights = edgeWeights; - } + protected void setEdgeWeight(E e, double weight) { + edgeWeights.put(e, weight); + } - /** - * @return the edgeWeights - */ - public Map getEdgeWeights() { - return edgeWeights; - } + public void setEdgeWeights(Map edgeWeights) { + this.edgeWeights = edgeWeights; + } - protected void assignDefaultEdgeTransitionWeights() { + /** @return the edgeWeights */ + public Map getEdgeWeights() { + return edgeWeights; + } - for (V currentVertex : getVertices()) { + protected void assignDefaultEdgeTransitionWeights() { - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); + for (V currentVertex : getVertices()) { - double numOutEdges = outgoingEdges.size(); - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,1.0/numOutEdges); - } - } + Collection outgoingEdges = mGraph.getOutEdges(currentVertex); + + double numOutEdges = outgoingEdges.size(); + for (E currentEdge : outgoingEdges) { + setEdgeWeight(currentEdge, 1.0 / numOutEdges); + } } + } - protected void normalizeEdgeTransitionWeights() { + protected void normalizeEdgeTransitionWeights() { - for (V currentVertex : getVertices()) { + for (V currentVertex : getVertices()) { - Collection outgoingEdges = mGraph.getOutEdges(currentVertex); + Collection outgoingEdges = mGraph.getOutEdges(currentVertex); - double totalEdgeWeight = 0; - for (E currentEdge : outgoingEdges) { - totalEdgeWeight += getEdgeWeight(currentEdge); - } + double totalEdgeWeight = 0; + for (E currentEdge : outgoingEdges) { + totalEdgeWeight += getEdgeWeight(currentEdge); + } - for (E currentEdge : outgoingEdges) { - setEdgeWeight(currentEdge,getEdgeWeight(currentEdge)/totalEdgeWeight); - } - } + for (E currentEdge : outgoingEdges) { + setEdgeWeight(currentEdge, getEdgeWeight(currentEdge) / totalEdgeWeight); + } } + } - protected void normalizeRankings() { - if (!mNormalizeRankings) { - return; - } - double totalWeight = 0; - - for (V currentVertex : getVertices()) { - totalWeight += getVertexRankScore(currentVertex); - } - - for (V currentVertex : getVertices()) { - setVertexRankScore(currentVertex,getVertexRankScore(currentVertex)/totalWeight); - } + protected void normalizeRankings() { + if (!mNormalizeRankings) { + return; } + double totalWeight = 0; - /** - * Print the rankings to standard out in descending order of rank score - * @param verbose if true, include information about the actual rank order as well as - * the original position of the vertex before it was ranked - * @param printScore if true, include the actual value of the rank score - */ - public void printRankings(boolean verbose,boolean printScore) { - double total = 0; - Format formatter = new DecimalFormat("#0.#######"); - int rank = 1; - - for (Ranking currentRanking : getRankings()) { - double rankScore = currentRanking.rankScore; - if (verbose) { - System.out.print("Rank " + rank + ": "); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.print("\tVertex Id: " + currentRanking.originalPos); - System.out.print(" (" + currentRanking.getRanked() + ")"); - System.out.println(); - } else { - System.out.print(rank + "\t"); - if (printScore) { - System.out.print(formatter.format(rankScore)); - } - System.out.println("\t" + currentRanking.originalPos); - - } - total += rankScore; - rank++; - } + for (V currentVertex : getVertices()) { + totalWeight += getVertexRankScore(currentVertex); + } - if (verbose) { - System.out.println("Total: " + formatter.format(total)); - } + for (V currentVertex : getVertices()) { + setVertexRankScore(currentVertex, getVertexRankScore(currentVertex) / totalWeight); + } + } + + /** + * Print the rankings to standard out in descending order of rank score + * + * @param verbose if true, include information about the actual rank order as well as + * the original position of the vertex before it was ranked + * @param printScore if true, include the actual value of the rank score + */ + public void printRankings(boolean verbose, boolean printScore) { + double total = 0; + Format formatter = new DecimalFormat("#0.#######"); + int rank = 1; + + for (Ranking currentRanking : getRankings()) { + double rankScore = currentRanking.rankScore; + if (verbose) { + System.out.print("Rank " + rank + ": "); + if (printScore) { + System.out.print(formatter.format(rankScore)); + } + System.out.print("\tVertex Id: " + currentRanking.originalPos); + System.out.print(" (" + currentRanking.getRanked() + ")"); + System.out.println(); + } else { + System.out.print(rank + "\t"); + if (printScore) { + System.out.print(formatter.format(rankScore)); + } + System.out.println("\t" + currentRanking.originalPos); + } + total += rankScore; + rank++; } - /** - * Allows the user to specify whether or not s/he wants the rankings to be normalized. - * In some cases, this will have no effect since the algorithm doesn't allow normalization - * as an option - * @param normalizeRankings {@code true} iff the ranking are to be normalized - */ - public void setNormalizeRankings(boolean normalizeRankings) { - mNormalizeRankings = normalizeRankings; + if (verbose) { + System.out.println("Total: " + formatter.format(total)); } + } + + /** + * Allows the user to specify whether or not s/he wants the rankings to be normalized. In some + * cases, this will have no effect since the algorithm doesn't allow normalization as an option + * + * @param normalizeRankings {@code true} iff the ranking are to be normalized + */ + public void setNormalizeRankings(boolean normalizeRankings) { + mNormalizeRankings = normalizeRankings; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java index e2e776be..e7d0c166 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/BetweennessCentrality.java @@ -1,14 +1,16 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.UndirectedGraph; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -18,16 +20,15 @@ import java.util.Queue; import java.util.Stack; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - /** - * Computes betweenness centrality for each vertex and edge in the graph. The result is that each vertex - * and edge has a UserData element of type MutableDouble whose key is 'centrality.BetweennessCentrality'. - * Note: Many social network researchers like to normalize the betweenness values by dividing the values by - * (n-1)(n-2)/2. The values given here are unnormalized.

+ * Computes betweenness centrality for each vertex and edge in the graph. The result is that each + * vertex and edge has a UserData element of type MutableDouble whose key is + * 'centrality.BetweennessCentrality'. Note: Many social network researchers like to normalize the + * betweenness values by dividing the values by (n-1)(n-2)/2. The values given here are + * unnormalized. + * + *

A simple example of usage is: * - * A simple example of usage is: *

  * BetweennessCentrality ranker = new BetweennessCentrality(someGraph);
  * ranker.evaluate();
@@ -35,155 +36,155 @@
  * 
* * Running time is: O(n^2 + nm). - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." + * + * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical + * Sociology 25(2):163-177, 2001." * @author Scott White * @author Tom Nelson converted to jung2 */ +public class BetweennessCentrality extends AbstractRanker { -public class BetweennessCentrality extends AbstractRanker { + public static final String CENTRALITY = "centrality.BetweennessCentrality"; - public static final String CENTRALITY = "centrality.BetweennessCentrality"; + /** + * Constructor which initializes the algorithm + * + * @param g the graph whose nodes are to be analyzed + */ + public BetweennessCentrality(Graph g) { + initialize(g, true, true); + } - /** - * Constructor which initializes the algorithm - * @param g the graph whose nodes are to be analyzed - */ - public BetweennessCentrality(Graph g) { - initialize(g, true, true); - } + public BetweennessCentrality(Graph g, boolean rankNodes) { + initialize(g, rankNodes, true); + } - public BetweennessCentrality(Graph g, boolean rankNodes) { - initialize(g, rankNodes, true); - } + public BetweennessCentrality(Graph g, boolean rankNodes, boolean rankEdges) { + initialize(g, rankNodes, rankEdges); + } - public BetweennessCentrality(Graph g, boolean rankNodes, boolean rankEdges) - { - initialize(g, rankNodes, rankEdges); - } - - protected void computeBetweenness(Graph graph) { - - Map decorator = new HashMap(); - Map bcVertexDecorator = - vertexRankScores.getUnchecked(getRankScoreKey()); - bcVertexDecorator.clear(); - Map bcEdgeDecorator = - edgeRankScores.getUnchecked(getRankScoreKey()); - bcEdgeDecorator.clear(); - - Collection vertices = graph.getVertices(); - - for (V s : vertices) { - - initializeData(graph,decorator); - - decorator.get(s).numSPs = 1; - decorator.get(s).distance = 0; - - Stack stack = new Stack(); - Queue queue = new LinkedList(); - queue.add(s); - - while (!queue.isEmpty()) { - V v = queue.remove(); - stack.push(v); - - for(V w : getGraph().getSuccessors(v)) { - - if (decorator.get(w).distance < 0) { - queue.add(w); - decorator.get(w).distance = decorator.get(v).distance + 1; - } - - if (decorator.get(w).distance == decorator.get(v).distance + 1) { - decorator.get(w).numSPs += decorator.get(v).numSPs; - decorator.get(w).predecessors.add(v); - } - } - } - - while (!stack.isEmpty()) { - V w = stack.pop(); - - for (V v : decorator.get(w).predecessors) { - - double partialDependency = (decorator.get(v).numSPs / decorator.get(w).numSPs); - partialDependency *= (1.0 + decorator.get(w).dependency); - decorator.get(v).dependency += partialDependency; - E currentEdge = getGraph().findEdge(v, w); - double edgeValue = bcEdgeDecorator.get(currentEdge).doubleValue(); - edgeValue += partialDependency; - bcEdgeDecorator.put(currentEdge, edgeValue); - } - if (w != s) { - double bcValue = bcVertexDecorator.get(w).doubleValue(); - bcValue += decorator.get(w).dependency; - bcVertexDecorator.put(w, bcValue); - } - } - } + protected void computeBetweenness(Graph graph) { - if(graph instanceof UndirectedGraph) { - for (V v : vertices) { - double bcValue = bcVertexDecorator.get(v).doubleValue(); - bcValue /= 2.0; - bcVertexDecorator.put(v, bcValue); - } - for (E e : graph.getEdges()) { - double bcValue = bcEdgeDecorator.get(e).doubleValue(); - bcValue /= 2.0; - bcEdgeDecorator.put(e, bcValue); - } - } + Map decorator = new HashMap(); + Map bcVertexDecorator = vertexRankScores.getUnchecked(getRankScoreKey()); + bcVertexDecorator.clear(); + Map bcEdgeDecorator = edgeRankScores.getUnchecked(getRankScoreKey()); + bcEdgeDecorator.clear(); - for (V vertex : vertices) { - decorator.remove(vertex); - } - } + Collection vertices = graph.getVertices(); + + for (V s : vertices) { + + initializeData(graph, decorator); + + decorator.get(s).numSPs = 1; + decorator.get(s).distance = 0; - private void initializeData(Graph g, Map decorator) { - for (V vertex : g.getVertices()) { + Stack stack = new Stack(); + Queue queue = new LinkedList(); + queue.add(s); - Map bcVertexDecorator = vertexRankScores.getUnchecked(getRankScoreKey()); - if(bcVertexDecorator.containsKey(vertex) == false) { - bcVertexDecorator.put(vertex, 0.0); - } - decorator.put(vertex, new BetweennessData()); + while (!queue.isEmpty()) { + V v = queue.remove(); + stack.push(v); + + for (V w : getGraph().getSuccessors(v)) { + + if (decorator.get(w).distance < 0) { + queue.add(w); + decorator.get(w).distance = decorator.get(v).distance + 1; + } + + if (decorator.get(w).distance == decorator.get(v).distance + 1) { + decorator.get(w).numSPs += decorator.get(v).numSPs; + decorator.get(w).predecessors.add(v); + } } - for (E e : g.getEdges()) { + } + + while (!stack.isEmpty()) { + V w = stack.pop(); - Map bcEdgeDecorator = edgeRankScores.getUnchecked(getRankScoreKey()); - if(bcEdgeDecorator.containsKey(e) == false) { - bcEdgeDecorator.put(e, 0.0); - } + for (V v : decorator.get(w).predecessors) { + + double partialDependency = (decorator.get(v).numSPs / decorator.get(w).numSPs); + partialDependency *= (1.0 + decorator.get(w).dependency); + decorator.get(v).dependency += partialDependency; + E currentEdge = getGraph().findEdge(v, w); + double edgeValue = bcEdgeDecorator.get(currentEdge).doubleValue(); + edgeValue += partialDependency; + bcEdgeDecorator.put(currentEdge, edgeValue); + } + if (w != s) { + double bcValue = bcVertexDecorator.get(w).doubleValue(); + bcValue += decorator.get(w).dependency; + bcVertexDecorator.put(w, bcValue); } + } } - - /** - * the user datum key used to store the rank scores - * @return the key - */ - @Override - public String getRankScoreKey() { - return CENTRALITY; + + if (graph instanceof UndirectedGraph) { + for (V v : vertices) { + double bcValue = bcVertexDecorator.get(v).doubleValue(); + bcValue /= 2.0; + bcVertexDecorator.put(v, bcValue); + } + for (E e : graph.getEdges()) { + double bcValue = bcEdgeDecorator.get(e).doubleValue(); + bcValue /= 2.0; + bcEdgeDecorator.put(e, bcValue); + } } - @Override - public void step() { - computeBetweenness(getGraph()); + for (V vertex : vertices) { + decorator.remove(vertex); } + } - class BetweennessData { - double distance; - double numSPs; - List predecessors; - double dependency; - - BetweennessData() { - distance = -1; - numSPs = 0; - predecessors = new ArrayList(); - dependency = 0; - } + private void initializeData(Graph g, Map decorator) { + for (V vertex : g.getVertices()) { + + Map bcVertexDecorator = vertexRankScores.getUnchecked(getRankScoreKey()); + if (bcVertexDecorator.containsKey(vertex) == false) { + bcVertexDecorator.put(vertex, 0.0); + } + decorator.put(vertex, new BetweennessData()); + } + for (E e : g.getEdges()) { + + Map bcEdgeDecorator = edgeRankScores.getUnchecked(getRankScoreKey()); + if (bcEdgeDecorator.containsKey(e) == false) { + bcEdgeDecorator.put(e, 0.0); + } + } + } + + /** + * the user datum key used to store the rank scores + * + * @return the key + */ + @Override + public String getRankScoreKey() { + return CENTRALITY; + } + + @Override + public void step() { + computeBetweenness(getGraph()); + } + + class BetweennessData { + double distance; + double numSPs; + List predecessors; + double dependency; + + BetweennessData() { + distance = -1; + numSPs = 0; + predecessors = new ArrayList(); + dependency = 0; } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java index 717c30b8..cda1e1b5 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/KStepMarkov.java @@ -1,135 +1,142 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import edu.uci.ics.jung.graph.DirectedGraph; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; -import edu.uci.ics.jung.graph.DirectedGraph; - - /** - * Algorithm variant of PageRankWithPriors that computes the importance of a node based upon taking fixed-length random - * walks out from the root set and then computing the stationary probability of being at each node. Specifically, it computes - * the relative probability that the markov chain will spend at any particular node, given that it start in the root - * set and ends after k steps. - *

- * A simple example of usage is: + * Algorithm variant of PageRankWithPriors that computes the importance of a node based + * upon taking fixed-length random walks out from the root set and then computing the stationary + * probability of being at each node. Specifically, it computes the relative probability that the + * markov chain will spend at any particular node, given that it start in the root set and ends + * after k steps. + * + *

A simple example of usage is: + * *

  * KStepMarkov ranker = new KStepMarkov(someGraph,rootSet,6,null);
  * ranker.evaluate();
  * ranker.printRankings();
  * 
+ * *

* * @author Scott White * @author Tom Nelson - adapter to jung2 - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" + * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, + * 2003" */ -public class KStepMarkov extends RelativeAuthorityRanker { - public final static String RANK_SCORE = "jung.algorithms.importance.KStepMarkovExperimental.RankScore"; - private final static String CURRENT_RANK = "jung.algorithms.importance.KStepMarkovExperimental.CurrentRank"; - private int mNumSteps; - HashMap mPreviousRankingsMap; - - /** - * Construct the algorihm instance and initializes the algorithm. - * @param graph the graph to be analyzed - * @param priors the set of root nodes - * @param k positive integer parameter which controls the relative tradeoff between a distribution "biased" towards - * R and the steady-state distribution which is independent of where the Markov-process started. Generally values - * between 4-8 are reasonable - * @param edgeWeights the weight for each edge - */ - public KStepMarkov(DirectedGraph graph, Set priors, int k, Map edgeWeights) { - super.initialize(graph,true,false); - mNumSteps = k; - setPriors(priors); - initializeRankings(); - if (edgeWeights == null) { - assignDefaultEdgeTransitionWeights(); - } else { - setEdgeWeights(edgeWeights); - } - normalizeEdgeTransitionWeights(); - } - - /** - * The user datum key used to store the rank scores. - * @return the key - */ - @Override - public String getRankScoreKey() { - return RANK_SCORE; +public class KStepMarkov extends RelativeAuthorityRanker { + public static final String RANK_SCORE = + "jung.algorithms.importance.KStepMarkovExperimental.RankScore"; + private static final String CURRENT_RANK = + "jung.algorithms.importance.KStepMarkovExperimental.CurrentRank"; + private int mNumSteps; + HashMap mPreviousRankingsMap; + + /** + * Construct the algorihm instance and initializes the algorithm. + * + * @param graph the graph to be analyzed + * @param priors the set of root nodes + * @param k positive integer parameter which controls the relative tradeoff between a distribution + * "biased" towards R and the steady-state distribution which is independent of where the + * Markov-process started. Generally values between 4-8 are reasonable + * @param edgeWeights the weight for each edge + */ + public KStepMarkov(DirectedGraph graph, Set priors, int k, Map edgeWeights) { + super.initialize(graph, true, false); + mNumSteps = k; + setPriors(priors); + initializeRankings(); + if (edgeWeights == null) { + assignDefaultEdgeTransitionWeights(); + } else { + setEdgeWeights(edgeWeights); } - - protected void incrementRankScore(V v, double rankValue) { - double value = getVertexRankScore(v, RANK_SCORE); - value += rankValue; - setVertexRankScore(v, value, RANK_SCORE); - } - - protected double getCurrentRankScore(V v) { - return getVertexRankScore(v, CURRENT_RANK); + normalizeEdgeTransitionWeights(); + } + + /** + * The user datum key used to store the rank scores. + * + * @return the key + */ + @Override + public String getRankScoreKey() { + return RANK_SCORE; + } + + protected void incrementRankScore(V v, double rankValue) { + double value = getVertexRankScore(v, RANK_SCORE); + value += rankValue; + setVertexRankScore(v, value, RANK_SCORE); + } + + protected double getCurrentRankScore(V v) { + return getVertexRankScore(v, CURRENT_RANK); + } + + protected void setCurrentRankScore(V v, double rankValue) { + setVertexRankScore(v, rankValue, CURRENT_RANK); + } + + protected void initializeRankings() { + mPreviousRankingsMap = new HashMap(); + for (V v : getVertices()) { + Set priors = getPriors(); + double numPriors = priors.size(); + + if (getPriors().contains(v)) { + setVertexRankScore(v, 1.0 / numPriors); + setCurrentRankScore(v, 1.0 / numPriors); + mPreviousRankingsMap.put(v, 1.0 / numPriors); + } else { + setVertexRankScore(v, 0); + setCurrentRankScore(v, 0); + mPreviousRankingsMap.put(v, 0); + } } - - protected void setCurrentRankScore(V v, double rankValue) { - setVertexRankScore(v, rankValue, CURRENT_RANK); - } - - protected void initializeRankings() { - mPreviousRankingsMap = new HashMap(); - for (V v : getVertices()) { - Set priors = getPriors(); - double numPriors = priors.size(); - - if (getPriors().contains(v)) { - setVertexRankScore(v, 1.0/ numPriors); - setCurrentRankScore(v, 1.0/ numPriors); - mPreviousRankingsMap.put(v,1.0/numPriors); - } else { - setVertexRankScore(v, 0); - setCurrentRankScore(v, 0); - mPreviousRankingsMap.put(v, 0); - } - } - } - @Override - public void step() { - - for (int i=0;i incomingEdges = getGraph().getInEdges(v); + Collection incomingEdges = getGraph().getInEdges(v); - double currentPageRankSum = 0; - for (E e : incomingEdges) { - double currentWeight = getEdgeWeight(e); - currentPageRankSum += - mPreviousRankingsMap.get(getGraph().getOpposite(v,e)).doubleValue()*currentWeight; - } - setCurrentRankScore(v,currentPageRankSum); - } + double currentPageRankSum = 0; + for (E e : incomingEdges) { + double currentWeight = getEdgeWeight(e); + currentPageRankSum += + mPreviousRankingsMap.get(getGraph().getOpposite(v, e)).doubleValue() * currentWeight; + } + setCurrentRankScore(v, currentPageRankSum); } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java index 84cbd66e..d490b549 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/Ranking.java @@ -1,79 +1,72 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; - /** - * Abstract data container for ranking objects. Stores common data relevant to both node and edge rankings, namely, - * the original position of the instance in the list and the actual ranking score. + * Abstract data container for ranking objects. Stores common data relevant to both node and edge + * rankings, namely, the original position of the instance in the list and the actual ranking score. + * * @author Scott White */ @SuppressWarnings("rawtypes") public class Ranking implements Comparable { - /** - * The original (0-indexed) position of the instance being ranked - */ - public int originalPos; - /** - * The actual rank score (normally between 0 and 1) - */ - public double rankScore; - - /** - * what is being ranked - */ - private V ranked; + /** The original (0-indexed) position of the instance being ranked */ + public int originalPos; + /** The actual rank score (normally between 0 and 1) */ + public double rankScore; + + /** what is being ranked */ + private V ranked; - /** - * Constructor which allows values to be set on construction - * @param originalPos The original (0-indexed) position of the instance being ranked - * @param rankScore The actual rank score (normally between 0 and 1) - * @param ranked the vertex being ranked - */ - public Ranking(int originalPos, double rankScore, V ranked) { - this.originalPos = originalPos; - this.rankScore = rankScore; - this.ranked = ranked; - } + /** + * Constructor which allows values to be set on construction + * + * @param originalPos The original (0-indexed) position of the instance being ranked + * @param rankScore The actual rank score (normally between 0 and 1) + * @param ranked the vertex being ranked + */ + public Ranking(int originalPos, double rankScore, V ranked) { + this.originalPos = originalPos; + this.rankScore = rankScore; + this.ranked = ranked; + } - /** - * Compares two ranking based on the rank score. - * @param other The other ranking - * @return -1 if the other ranking is higher, 0 if they are equal, and 1 if this ranking is higher - */ - public int compareTo(Object other) { - @SuppressWarnings("unchecked") - Ranking otherRanking = (Ranking) other; - return Double.compare(otherRanking.rankScore,rankScore); - } + /** + * Compares two ranking based on the rank score. + * + * @param other The other ranking + * @return -1 if the other ranking is higher, 0 if they are equal, and 1 if this ranking is higher + */ + public int compareTo(Object other) { + @SuppressWarnings("unchecked") + Ranking otherRanking = (Ranking) other; + return Double.compare(otherRanking.rankScore, rankScore); + } - /** - * Returns the rank score as a string. - * @return the stringified rank score - */ - @Override - public String toString() { - return String.valueOf(rankScore); - } + /** + * Returns the rank score as a string. + * + * @return the stringified rank score + */ + @Override + public String toString() { + return String.valueOf(rankScore); + } - /** - * @return the ranked element - */ - public V getRanked() { - return ranked; - } + /** @return the ranked element */ + public V getRanked() { + return ranked; + } - /** - * @param ranked the ranked to set - */ - public void setRanked(V ranked) { - this.ranked = ranked; - } + /** @param ranked the ranked to set */ + public void setRanked(V ranked) { + this.ranked = ranked; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java index 262df484..6c1e531a 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/RelativeAuthorityRanker.java @@ -1,73 +1,75 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; import java.util.HashMap; import java.util.Map; import java.util.Set; - /** - * This class provides basic infrastructure for relative authority algorithms that compute the importance of nodes - * relative to one or more root nodes. The services provided are: + * This class provides basic infrastructure for relative authority algorithms that compute the + * importance of nodes relative to one or more root nodes. The services provided are: + * *

    - *
  • The set of root nodes (priors) is stored and maintained
  • - *
  • Getters and setters for the prior rank score are provided
  • + *
  • The set of root nodes (priors) is stored and maintained + *
  • Getters and setters for the prior rank score are provided *
- * + * * @author Scott White */ -public abstract class RelativeAuthorityRanker extends AbstractRanker { - private Set mPriors; - /** - * The default key used for the user datum key corresponding to prior rank scores. - */ - - protected Map priorRankScoreMap = new HashMap(); - /** - * Cleans up all of the prior rank scores on finalize. - */ - @Override - protected void finalizeIterations() { - super.finalizeIterations(); - priorRankScoreMap.clear(); - } - - /** - * Retrieves the value of the prior rank score. - * @param v the root node (prior) - * @return the prior rank score - */ - protected double getPriorRankScore(V v) { - return priorRankScoreMap.get(v).doubleValue(); +public abstract class RelativeAuthorityRanker extends AbstractRanker { + private Set mPriors; + /** The default key used for the user datum key corresponding to prior rank scores. */ + protected Map priorRankScoreMap = new HashMap(); + /** Cleans up all of the prior rank scores on finalize. */ + @Override + protected void finalizeIterations() { + super.finalizeIterations(); + priorRankScoreMap.clear(); + } - } + /** + * Retrieves the value of the prior rank score. + * + * @param v the root node (prior) + * @return the prior rank score + */ + protected double getPriorRankScore(V v) { + return priorRankScoreMap.get(v).doubleValue(); + } - /** - * Allows the user to specify a value to set for the prior rank score - * @param v the root node (prior) - * @param value the score to set to - */ - public void setPriorRankScore(V v, double value) { - this.priorRankScoreMap.put(v, value); - } + /** + * Allows the user to specify a value to set for the prior rank score + * + * @param v the root node (prior) + * @param value the score to set to + */ + public void setPriorRankScore(V v, double value) { + this.priorRankScoreMap.put(v, value); + } - /** - * Retrieves the set of priors. - * @return the set of root nodes (priors) - */ - protected Set getPriors() { return mPriors; } + /** + * Retrieves the set of priors. + * + * @return the set of root nodes (priors) + */ + protected Set getPriors() { + return mPriors; + } - /** - * Specifies which vertices are root nodes (priors). - * @param priors the root nodes - */ - protected void setPriors(Set priors) { mPriors = priors; } + /** + * Specifies which vertices are root nodes (priors). + * + * @param priors the root nodes + */ + protected void setPriors(Set priors) { + mPriors = priors; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java index 29d8a6ef..6ddcadb9 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/importance/WeightedNIPaths.java @@ -1,14 +1,16 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.DirectedGraph; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -17,180 +19,193 @@ import java.util.Map; import java.util.Set; -import com.google.common.base.Supplier; - -import edu.uci.ics.jung.graph.DirectedGraph; - - - /** - * This algorithm measures the importance of nodes based upon both the number and length of disjoint paths that lead - * to a given node from each of the nodes in the root set. Specifically the formula for measuring the importance of a - * node is given by: I(t|R) = sum_i=1_|P(r,t)|_{alpha^|p_i|} where alpha is the path decay coefficient, p_i is path i - * and P(r,t) is a set of maximum-sized node-disjoint paths from r to t. - *

- * This algorithm uses heuristic breadth-first search to try and find the maximum-sized set of node-disjoint paths - * between two nodes. As such, it is not guaranteed to give exact answers. - *

- * A simple example of usage is: + * This algorithm measures the importance of nodes based upon both the number and length of disjoint + * paths that lead to a given node from each of the nodes in the root set. Specifically the formula + * for measuring the importance of a node is given by: I(t|R) = sum_i=1_|P(r,t)|_{alpha^|p_i|} where + * alpha is the path decay coefficient, p_i is path i and P(r,t) is a set of maximum-sized + * node-disjoint paths from r to t. + * + *

This algorithm uses heuristic breadth-first search to try and find the maximum-sized set of + * node-disjoint paths between two nodes. As such, it is not guaranteed to give exact answers. + * + *

A simple example of usage is: + * *

  * WeightedNIPaths ranker = new WeightedNIPaths(someGraph,2.0,6,rootSet);
  * ranker.evaluate();
  * ranker.printRankings();
  * 
- * + * * @author Scott White - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" + * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, + * 2003" */ -public class WeightedNIPaths extends AbstractRanker { - public final static String WEIGHTED_NIPATHS_KEY = "jung.algorithms.importance.WEIGHTED_NIPATHS_KEY"; - private double mAlpha; - private int mMaxDepth; - private Set mPriors; - private Map pathIndices = new HashMap(); - private Map roots = new HashMap(); - private Map> pathsSeenMap = new HashMap>(); - private Supplier vertexFactory; - private Supplier edgeFactory; - - /** - * Constructs and initializes the algorithm. - * @param graph the graph whose nodes are being measured for their importance - * @param vertexFactory used to generate instances of V - * @param edgeFactory used to generate instances of E - * @param alpha the path decay coefficient (≥1); 2 is recommended - * @param maxDepth the maximal depth to search out from the root set - * @param priors the root set (starting vertices) - */ - public WeightedNIPaths(DirectedGraph graph, Supplier vertexFactory, - Supplier edgeFactory, double alpha, int maxDepth, Set priors) { - super.initialize(graph, true,false); - this.vertexFactory = vertexFactory; - this.edgeFactory = edgeFactory; - mAlpha = alpha; - mMaxDepth = maxDepth; - mPriors = priors; - for (V v : graph.getVertices()) { - super.setVertexRankScore(v, 0.0); - } +public class WeightedNIPaths extends AbstractRanker { + public static final String WEIGHTED_NIPATHS_KEY = + "jung.algorithms.importance.WEIGHTED_NIPATHS_KEY"; + private double mAlpha; + private int mMaxDepth; + private Set mPriors; + private Map pathIndices = new HashMap(); + private Map roots = new HashMap(); + private Map> pathsSeenMap = new HashMap>(); + private Supplier vertexFactory; + private Supplier edgeFactory; + + /** + * Constructs and initializes the algorithm. + * + * @param graph the graph whose nodes are being measured for their importance + * @param vertexFactory used to generate instances of V + * @param edgeFactory used to generate instances of E + * @param alpha the path decay coefficient (≥1); 2 is recommended + * @param maxDepth the maximal depth to search out from the root set + * @param priors the root set (starting vertices) + */ + public WeightedNIPaths( + DirectedGraph graph, + Supplier vertexFactory, + Supplier edgeFactory, + double alpha, + int maxDepth, + Set priors) { + super.initialize(graph, true, false); + this.vertexFactory = vertexFactory; + this.edgeFactory = edgeFactory; + mAlpha = alpha; + mMaxDepth = maxDepth; + mPriors = priors; + for (V v : graph.getVertices()) { + super.setVertexRankScore(v, 0.0); } + } - protected void incrementRankScore(V v, double rankValue) { - setVertexRankScore(v, getVertexRankScore(v) + rankValue); - } + protected void incrementRankScore(V v, double rankValue) { + setVertexRankScore(v, getVertexRankScore(v) + rankValue); + } - protected void computeWeightedPathsFromSource(V root, int depth) { + protected void computeWeightedPathsFromSource(V root, int depth) { - int pathIdx = 1; + int pathIdx = 1; - for (E e : getGraph().getOutEdges(root)) { - this.pathIndices.put(e, pathIdx); - this.roots.put(e, root); - newVertexEncountered(pathIdx, getGraph().getEndpoints(e).getSecond(), root); - pathIdx++; - } + for (E e : getGraph().getOutEdges(root)) { + this.pathIndices.put(e, pathIdx); + this.roots.put(e, root); + newVertexEncountered(pathIdx, getGraph().getEndpoints(e).getSecond(), root); + pathIdx++; + } - List edges = new ArrayList(); - - V virtualNode = vertexFactory.get(); - getGraph().addVertex(virtualNode); - E virtualSinkEdge = edgeFactory.get(); - - getGraph().addEdge(virtualSinkEdge, virtualNode, root); - edges.add(virtualSinkEdge); - - int currentDepth = 0; - while (currentDepth <= depth) { - - double currentWeight = Math.pow(mAlpha, -1.0 * currentDepth); - for (E currentEdge : edges) { - incrementRankScore(getGraph().getEndpoints(currentEdge).getSecond(),// - currentWeight); - } - - if ((currentDepth == depth) || (edges.size() == 0)) break; - - List newEdges = new ArrayList(); - - for (E currentSourceEdge : edges) { //Iterator sourceEdgeIt = edges.iterator(); sourceEdgeIt.hasNext();) { - Number sourcePathIndex = this.pathIndices.get(currentSourceEdge); - - // from the currentSourceEdge, get its opposite end - // then iterate over the out edges of that opposite end - V newDestVertex = getGraph().getEndpoints(currentSourceEdge).getSecond(); - Collection outs = getGraph().getOutEdges(newDestVertex); - for (E currentDestEdge : outs) { - V destEdgeRoot = this.roots.get(currentDestEdge); - V destEdgeDest = getGraph().getEndpoints(currentDestEdge).getSecond(); - - if (currentSourceEdge == virtualSinkEdge) { - newEdges.add(currentDestEdge); - continue; - } - if (destEdgeRoot == root) { - continue; - } - if (destEdgeDest == getGraph().getEndpoints(currentSourceEdge).getFirst()) {//currentSourceEdge.getSource()) { - continue; - } - Set pathsSeen = this.pathsSeenMap.get(destEdgeDest); - - if (pathsSeen == null) { - newVertexEncountered(sourcePathIndex.intValue(), destEdgeDest, root); - } else if (roots.get(destEdgeDest) != root) { - roots.put(destEdgeDest,root); - pathsSeen.clear(); - pathsSeen.add(sourcePathIndex); - } else if (!pathsSeen.contains(sourcePathIndex)) { - pathsSeen.add(sourcePathIndex); - } else { - continue; - } - - this.pathIndices.put(currentDestEdge, sourcePathIndex); - this.roots.put(currentDestEdge, root); - newEdges.add(currentDestEdge); - } - } - - edges = newEdges; - currentDepth++; + List edges = new ArrayList(); + + V virtualNode = vertexFactory.get(); + getGraph().addVertex(virtualNode); + E virtualSinkEdge = edgeFactory.get(); + + getGraph().addEdge(virtualSinkEdge, virtualNode, root); + edges.add(virtualSinkEdge); + + int currentDepth = 0; + while (currentDepth <= depth) { + + double currentWeight = Math.pow(mAlpha, -1.0 * currentDepth); + for (E currentEdge : edges) { + incrementRankScore( + getGraph().getEndpoints(currentEdge).getSecond(), // + currentWeight); + } + + if ((currentDepth == depth) || (edges.size() == 0)) { + break; + } + + List newEdges = new ArrayList(); + + for (E currentSourceEdge : + edges) { //Iterator sourceEdgeIt = edges.iterator(); sourceEdgeIt.hasNext();) { + Number sourcePathIndex = this.pathIndices.get(currentSourceEdge); + + // from the currentSourceEdge, get its opposite end + // then iterate over the out edges of that opposite end + V newDestVertex = getGraph().getEndpoints(currentSourceEdge).getSecond(); + Collection outs = getGraph().getOutEdges(newDestVertex); + for (E currentDestEdge : outs) { + V destEdgeRoot = this.roots.get(currentDestEdge); + V destEdgeDest = getGraph().getEndpoints(currentDestEdge).getSecond(); + + if (currentSourceEdge == virtualSinkEdge) { + newEdges.add(currentDestEdge); + continue; + } + if (destEdgeRoot == root) { + continue; + } + if (destEdgeDest + == getGraph() + .getEndpoints(currentSourceEdge) + .getFirst()) { //currentSourceEdge.getSource()) { + continue; + } + Set pathsSeen = this.pathsSeenMap.get(destEdgeDest); + + if (pathsSeen == null) { + newVertexEncountered(sourcePathIndex.intValue(), destEdgeDest, root); + } else if (roots.get(destEdgeDest) != root) { + roots.put(destEdgeDest, root); + pathsSeen.clear(); + pathsSeen.add(sourcePathIndex); + } else if (!pathsSeen.contains(sourcePathIndex)) { + pathsSeen.add(sourcePathIndex); + } else { + continue; + } + + this.pathIndices.put(currentDestEdge, sourcePathIndex); + this.roots.put(currentDestEdge, root); + newEdges.add(currentDestEdge); } + } - getGraph().removeVertex(virtualNode); + edges = newEdges; + currentDepth++; } - private void newVertexEncountered(int sourcePathIndex, V dest, V root) { - Set pathsSeen = new HashSet(); - pathsSeen.add(sourcePathIndex); - this.pathsSeenMap.put(dest, pathsSeen); - roots.put(dest, root); - } + getGraph().removeVertex(virtualNode); + } - @Override - public void step() { - for (V v : mPriors) { - computeWeightedPathsFromSource(v, mMaxDepth); - } + private void newVertexEncountered(int sourcePathIndex, V dest, V root) { + Set pathsSeen = new HashSet(); + pathsSeen.add(sourcePathIndex); + this.pathsSeenMap.put(dest, pathsSeen); + roots.put(dest, root); + } - normalizeRankings(); -// return 0; - } - - /** - * Given a node, returns the corresponding rank score. This implementation of getRankScore assumes - * the decoration representing the rank score is of type MutableDouble. - * @return the rank score for this node - */ - @Override - public String getRankScoreKey() { - return WEIGHTED_NIPATHS_KEY; + @Override + public void step() { + for (V v : mPriors) { + computeWeightedPathsFromSource(v, mMaxDepth); } - @Override - protected void onFinalize(Object udc) { - pathIndices.remove(udc); - roots.remove(udc); - pathsSeenMap.remove(udc); - } + normalizeRankings(); + // return 0; + } + + /** + * Given a node, returns the corresponding rank score. This implementation of getRankScore + * assumes the decoration representing the rank score is of type MutableDouble + * . + * + * @return the rank score for this node + */ + @Override + public String getRankScoreKey() { + return WEIGHTED_NIPATHS_KEY; + } + + @Override + protected void onFinalize(Object udc) { + pathIndices.remove(udc); + roots.remove(udc); + pathsSeenMap.remove(udc); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java index 10d4e7c1..f611fd46 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AbstractLayout.java @@ -1,292 +1,284 @@ /* * Copyright (c) 2003, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * + * * Created on Jul 7, 2003 - * + * */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; -import java.util.HashSet; -import java.util.Set; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.graph.Graph; +import java.awt.Dimension; +import java.awt.geom.Point2D; +import java.util.ConcurrentModificationException; +import java.util.HashSet; +import java.util.Set; /** - * Abstract class for implementations of {@code Layout}. It handles some of the - * basic functions: storing coordinates, maintaining the dimensions, initializing - * the locations, maintaining locked vertices. - * + * Abstract class for implementations of {@code Layout}. It handles some of the basic functions: + * storing coordinates, maintaining the dimensions, initializing the locations, maintaining locked + * vertices. + * * @author Danyel Fisher, Scott White * @author Tom Nelson - converted to jung2 * @param the vertex type * @param the edge type */ -abstract public class AbstractLayout implements Layout { +public abstract class AbstractLayout implements Layout { - /** - * A set of vertices that are fixed in place and not affected by the layout algorithm - */ - private Set dontmove = new HashSet(); + /** A set of vertices that are fixed in place and not affected by the layout algorithm */ + private Set dontmove = new HashSet(); - protected Dimension size; - protected Graph graph; - protected boolean initialized; + protected Dimension size; + protected Graph graph; + protected boolean initialized; - protected LoadingCache locations = - CacheBuilder.newBuilder().build(new CacheLoader() { - public Point2D load(V vertex) { - return new Point2D.Double(); - } - }); + protected LoadingCache locations = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public Point2D load(V vertex) { + return new Point2D.Double(); + } + }); - /** - * Creates an instance for {@code graph} which does not initialize the vertex locations. - * - * @param graph the graph on which the layout algorithm is to operate - */ - protected AbstractLayout(Graph graph) { - if (graph == null) - { - throw new IllegalArgumentException("Graph must be non-null"); - } - this.graph = graph; - } - - /** - * Creates an instance for {@code graph} which initializes the vertex locations - * using {@code initializer}. - * - * @param graph the graph on which the layout algorithm is to operate - * @param initializer specifies the starting positions of the vertices - */ - protected AbstractLayout(Graph graph, Function initializer) { - this.graph = graph; - Function chain = - Functions.compose( - new Function(){ - public Point2D apply(Point2D p) { - return (Point2D)p.clone(); - }}, - initializer - ); - this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); - initialized = true; - } - - /** - * Creates an instance for {@code graph} which sets the size of the layout to {@code size}. - * - * @param graph the graph on which the layout algorithm is to operate - * @param size the dimensions of the region in which the layout algorithm will place vertices - */ - protected AbstractLayout(Graph graph, Dimension size) { - this.graph = graph; - this.size = size; - } - - /** - * Creates an instance for {@code graph} which initializes the vertex locations - * using {@code initializer} and sets the size of the layout to {@code size}. - * - * @param graph the graph on which the layout algorithm is to operate - * @param initializer specifies the starting positions of the vertices - * @param size the dimensions of the region in which the layout algorithm will place vertices - */ - protected AbstractLayout(Graph graph, Function initializer, Dimension size) { - this.graph = graph; - Function chain = - Functions.compose( - new Function(){ - public Point2D apply(Point2D p) { - return (Point2D)p.clone(); - }}, - initializer - ); - this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); - this.size = size; - } - - public void setGraph(Graph graph) { - this.graph = graph; - if(size != null && graph != null) { - initialize(); - } + /** + * Creates an instance for {@code graph} which does not initialize the vertex locations. + * + * @param graph the graph on which the layout algorithm is to operate + */ + protected AbstractLayout(Graph graph) { + if (graph == null) { + throw new IllegalArgumentException("Graph must be non-null"); + } + this.graph = graph; + } + + /** + * Creates an instance for {@code graph} which initializes the vertex locations using {@code + * initializer}. + * + * @param graph the graph on which the layout algorithm is to operate + * @param initializer specifies the starting positions of the vertices + */ + protected AbstractLayout(Graph graph, Function initializer) { + this.graph = graph; + Function chain = + Functions.compose( + new Function() { + public Point2D apply(Point2D p) { + return (Point2D) p.clone(); + } + }, + initializer); + this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); + initialized = true; + } + + /** + * Creates an instance for {@code graph} which sets the size of the layout to {@code size}. + * + * @param graph the graph on which the layout algorithm is to operate + * @param size the dimensions of the region in which the layout algorithm will place vertices + */ + protected AbstractLayout(Graph graph, Dimension size) { + this.graph = graph; + this.size = size; + } + + /** + * Creates an instance for {@code graph} which initializes the vertex locations using {@code + * initializer} and sets the size of the layout to {@code size}. + * + * @param graph the graph on which the layout algorithm is to operate + * @param initializer specifies the starting positions of the vertices + * @param size the dimensions of the region in which the layout algorithm will place vertices + */ + protected AbstractLayout(Graph graph, Function initializer, Dimension size) { + this.graph = graph; + Function chain = + Functions.compose( + new Function() { + public Point2D apply(Point2D p) { + return (Point2D) p.clone(); + } + }, + initializer); + this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); + this.size = size; + } + + public void setGraph(Graph graph) { + this.graph = graph; + if (size != null && graph != null) { + initialize(); } - - /** - * When a visualization is resized, it presumably wants to fix the - * locations of the vertices and possibly to reinitialize its data. The - * current method calls initializeLocations followed by initialize_local. - */ - public void setSize(Dimension size) { - - if(size != null && graph != null) { - - Dimension oldSize = this.size; - this.size = size; - initialize(); - - if(oldSize != null) { - adjustLocations(oldSize, size); - } - } - } - - private void adjustLocations(Dimension oldSize, Dimension size) { + } + + /** + * When a visualization is resized, it presumably wants to fix the locations of the vertices and + * possibly to reinitialize its data. The current method calls initializeLocations + * followed by initialize_local. + */ + public void setSize(Dimension size) { + + if (size != null && graph != null) { - int xOffset = (size.width - oldSize.width) / 2; - int yOffset = (size.height - oldSize.height) / 2; + Dimension oldSize = this.size; + this.size = size; + initialize(); - // now, move each vertex to be at the new screen center - while(true) { - try { - for(V v : getGraph().getVertices()) { - offsetVertex(v, xOffset, yOffset); - } - break; - } catch(ConcurrentModificationException cme) { - } - } - } - - public boolean isLocked(V v) { - return dontmove.contains(v); + if (oldSize != null) { + adjustLocations(oldSize, size); + } } - - public void setInitializer(Function initializer) { - if(this.equals(initializer)) { - throw new IllegalArgumentException("Layout cannot be initialized with itself"); - } - Function chain = - Functions.compose( - new Function(){ - public Point2D apply(Point2D p) { - return (Point2D)p.clone(); - }}, - initializer - ); - this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); - initialized = true; + } + + private void adjustLocations(Dimension oldSize, Dimension size) { + + int xOffset = (size.width - oldSize.width) / 2; + int yOffset = (size.height - oldSize.height) / 2; + + // now, move each vertex to be at the new screen center + while (true) { + try { + for (V v : getGraph().getVertices()) { + offsetVertex(v, xOffset, yOffset); + } + break; + } catch (ConcurrentModificationException cme) { + } } - - /** - * Returns the current size of the visualization space, accoring to the - * last call to resize(). - * - * @return the current size of the screen - */ - public Dimension getSize() { - return size; - } + } - /** - * Returns the Coordinates object that stores the vertex' x and y location. - * - * @param v - * A Vertex that is a part of the Graph being visualized. - * @return A Coordinates object with x and y locations. - */ - private Point2D getCoordinates(V v) { - return locations.getUnchecked(v); - } - - public Point2D apply(V v) { - return getCoordinates(v); - } - - /** - * Returns the x coordinate of the vertex from the Coordinates object. - * in most cases you will be better off calling transform(v). - * - * @param v the vertex whose x coordinate is to be returned - * @return the x coordinate of {@code v} - */ - public double getX(V v) { - Preconditions.checkNotNull(getCoordinates(v), "Cannot getX for an unmapped vertex "+v); - return getCoordinates(v).getX(); - } + public boolean isLocked(V v) { + return dontmove.contains(v); + } + + public void setInitializer(Function initializer) { + if (this.equals(initializer)) { + throw new IllegalArgumentException("Layout cannot be initialized with itself"); + } + Function chain = + Functions.compose( + new Function() { + public Point2D apply(Point2D p) { + return (Point2D) p.clone(); + } + }, + initializer); + this.locations = CacheBuilder.newBuilder().build(CacheLoader.from(chain)); + initialized = true; + } - /** - * Returns the y coordinate of the vertex from the Coordinates object. - * In most cases you will be better off calling transform(v). - * - * @param v the vertex whose y coordinate is to be returned - * @return the y coordinate of {@code v} - */ - public double getY(V v) { - Preconditions.checkNotNull(getCoordinates(v), "Cannot getY for an unmapped vertex "+v); - return getCoordinates(v).getY(); - } - - /** - * @param v the vertex whose coordinates are to be offset - * @param xOffset the change to apply to this vertex's x coordinate - * @param yOffset the change to apply to this vertex's y coordinate - */ - protected void offsetVertex(V v, double xOffset, double yOffset) { - Point2D c = getCoordinates(v); - c.setLocation(c.getX()+xOffset, c.getY()+yOffset); - setLocation(v, c); - } + /** + * Returns the current size of the visualization space, accoring to the last call to resize(). + * + * @return the current size of the screen + */ + public Dimension getSize() { + return size; + } - /** - * @return the graph that this layout operates on - */ - public Graph getGraph() { - return graph; - } - - /** - * Forcibly moves a vertex to the (x,y) location by setting its x and y - * locations to the specified location. Does not add the vertex to the - * "dontmove" list, and (in the default implementation) does not make any - * adjustments to the rest of the graph. - * @param picked the vertex whose location is being set - * @param x the x coordinate of the location to set - * @param y the y coordinate of the location to set - */ - public void setLocation(V picked, double x, double y) { - Point2D coord = getCoordinates(picked); - coord.setLocation(x, y); - } + /** + * Returns the Coordinates object that stores the vertex' x and y location. + * + * @param v A Vertex that is a part of the Graph being visualized. + * @return A Coordinates object with x and y locations. + */ + private Point2D getCoordinates(V v) { + return locations.getUnchecked(v); + } - public void setLocation(V picked, Point2D p) { - Point2D coord = getCoordinates(picked); - coord.setLocation(p); - } + public Point2D apply(V v) { + return getCoordinates(v); + } - /** - * Locks {@code v} in place if {@code state} is {@code true}, otherwise unlocks it. - * @param v the vertex whose position is to be (un)locked - * @param state {@code true} if the vertex is to be locked, {@code false} if to be unlocked - */ - public void lock(V v, boolean state) { - if(state == true) - dontmove.add(v); - else - dontmove.remove(v); - } - - /** - * @param lock {@code true} to lock all vertices in place, {@code false} to unlock all vertices - */ - public void lock(boolean lock) { - for(V v : graph.getVertices()) { - lock(v, lock); - } - } + /** + * Returns the x coordinate of the vertex from the Coordinates object. in most cases you will be + * better off calling transform(v). + * + * @param v the vertex whose x coordinate is to be returned + * @return the x coordinate of {@code v} + */ + public double getX(V v) { + Preconditions.checkNotNull(getCoordinates(v), "Cannot getX for an unmapped vertex " + v); + return getCoordinates(v).getX(); + } + + /** + * Returns the y coordinate of the vertex from the Coordinates object. In most cases you will be + * better off calling transform(v). + * + * @param v the vertex whose y coordinate is to be returned + * @return the y coordinate of {@code v} + */ + public double getY(V v) { + Preconditions.checkNotNull(getCoordinates(v), "Cannot getY for an unmapped vertex " + v); + return getCoordinates(v).getY(); + } + + /** + * @param v the vertex whose coordinates are to be offset + * @param xOffset the change to apply to this vertex's x coordinate + * @param yOffset the change to apply to this vertex's y coordinate + */ + protected void offsetVertex(V v, double xOffset, double yOffset) { + Point2D c = getCoordinates(v); + c.setLocation(c.getX() + xOffset, c.getY() + yOffset); + setLocation(v, c); + } + + /** @return the graph that this layout operates on */ + public Graph getGraph() { + return graph; + } + + /** + * Forcibly moves a vertex to the (x,y) location by setting its x and y locations to the specified + * location. Does not add the vertex to the "dontmove" list, and (in the default implementation) + * does not make any adjustments to the rest of the graph. + * + * @param picked the vertex whose location is being set + * @param x the x coordinate of the location to set + * @param y the y coordinate of the location to set + */ + public void setLocation(V picked, double x, double y) { + Point2D coord = getCoordinates(picked); + coord.setLocation(x, y); + } + + public void setLocation(V picked, Point2D p) { + Point2D coord = getCoordinates(picked); + coord.setLocation(p); + } + + /** + * Locks {@code v} in place if {@code state} is {@code true}, otherwise unlocks it. + * + * @param v the vertex whose position is to be (un)locked + * @param state {@code true} if the vertex is to be locked, {@code false} if to be unlocked + */ + public void lock(V v, boolean state) { + if (state == true) dontmove.add(v); + else dontmove.remove(v); + } + + /** + * @param lock {@code true} to lock all vertices in place, {@code false} to unlock all vertices + */ + public void lock(boolean lock) { + for (V v : graph.getVertices()) { + lock(v, lock); + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java index 7d338a17..f1c50fbb 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/AggregateLayout.java @@ -1,250 +1,235 @@ /* * Copyright (c) 2003, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * - * - * + * + * + * */ package edu.uci.ics.jung.algorithms.layout; +import com.google.common.base.Function; +import edu.uci.ics.jung.algorithms.util.IterativeContext; +import edu.uci.ics.jung.graph.Graph; import java.awt.Dimension; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.util.HashMap; import java.util.Map; -import com.google.common.base.Function; - -import edu.uci.ics.jung.algorithms.util.IterativeContext; -import edu.uci.ics.jung.graph.Graph; - /** - * A {@code Layout} implementation that combines - * multiple other layouts so that they may be manipulated - * as one layout. The relaxer thread will step each layout - * in sequence. - * - * @author Tom Nelson - tomnelson@dev.java.net + * A {@code Layout} implementation that combines multiple other layouts so that they may be + * manipulated as one layout. The relaxer thread will step each layout in sequence. * + * @author Tom Nelson - tomnelson@dev.java.net * @param the vertex type * @param the edge type */ -public class AggregateLayout implements Layout, IterativeContext { - - protected Layout delegate; - protected Map,Point2D> layouts = new HashMap,Point2D>(); - - /** - * Creates an instance backed by the specified {@code delegate}. - * @param delegate the layout to which this instance is delegating - */ - public AggregateLayout(Layout delegate) { - this.delegate = delegate; - } - - /** - * @return the delegate - */ - public Layout getDelegate() { - return delegate; - } - - /** - * @param delegate the delegate to set - */ - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } - - /** - * Adds the passed layout as a sublayout, and specifies - * the center of where this sublayout should appear. - * @param layout the layout algorithm to use as a sublayout - * @param center the center of the coordinates for the sublayout - */ - public void put(Layout layout, Point2D center) { - layouts.put(layout,center); - } - - /** - * @param layout the layout whose center is to be returned - * @return the center of the passed layout - */ - public Point2D get(Layout layout) { - return layouts.get(layout); - } - - /** - * Removes {@code layout} from this instance. - * @param layout the layout to remove - */ - public void remove(Layout layout) { - layouts.remove(layout); - } - - /** - * Removes all layouts from this instance. - */ - public void removeAll() { - layouts.clear(); - } - - public Graph getGraph() { - return delegate.getGraph(); - } - - public Dimension getSize() { - return delegate.getSize(); - } - - public void initialize() { - delegate.initialize(); - for(Layout layout : layouts.keySet()) { - layout.initialize(); - } - } - - /** - * @param v the vertex whose locked state is to be returned - * @return true if v is locked in any of the layouts, and false otherwise - */ - public boolean isLocked(V v) { - for(Layout layout : layouts.keySet()) { - if (layout.isLocked(v)) { - return true; - } - } - return delegate.isLocked(v); - } - - /** - * Locks this vertex in the main layout and in any sublayouts whose graph contains - * this vertex. - * @param v the vertex whose locked state is to be set - * @param state {@code true} if the vertex is to be locked, and {@code false} if unlocked - */ - public void lock(V v, boolean state) { - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - layout.lock(v, state); - } - } - delegate.lock(v, state); - } - - public void reset() { - for(Layout layout : layouts.keySet()) { - layout.reset(); - } - delegate.reset(); - } - - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } - - public void setInitializer(Function initializer) { - delegate.setInitializer(initializer); - } - - public void setLocation(V v, Point2D location) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - - AffineTransform at = - AffineTransform.getTranslateInstance(-center.getX()+d.width/2,-center.getY()+d.height/2); - Point2D localLocation = at.transform(location, null); - layout.setLocation(v, localLocation); - wasInSublayout = true; - } - } - if(wasInSublayout == false && getGraph().getVertices().contains(v)) { - delegate.setLocation(v, location); - } - } - - public void setSize(Dimension d) { - delegate.setSize(d); - } - - /** - * @return a map from each {@code Layout} instance to its center point. - */ - public Map,Point2D> getLayouts() { - return layouts; - } - - /** - * Returns the location of the vertex. The location is specified first - * by the sublayouts, and then by the base layout if no sublayouts operate - * on this vertex. - * @return the location of the vertex - */ - public Point2D apply(V v) { - boolean wasInSublayout = false; - for(Layout layout : layouts.keySet()) { - if(layout.getGraph().getVertices().contains(v)) { - wasInSublayout = true; - Point2D center = layouts.get(layout); - // transform by the layout itself, but offset to the - // center of the sublayout - Dimension d = layout.getSize(); - AffineTransform at = - AffineTransform.getTranslateInstance(center.getX()-d.width/2, - center.getY()-d.height/2); - return at.transform(layout.apply(v),null); - } - } - if(wasInSublayout == false) { - return delegate.apply(v); - } - return null; - - } - - /** - * @return {@code true} iff the delegate layout and all sublayouts are done - */ - public boolean done() { - for (Layout layout : layouts.keySet()) { - if (layout instanceof IterativeContext) { - if (! ((IterativeContext) layout).done() ) { - return false; - } - } - } - if(delegate instanceof IterativeContext) { - return ((IterativeContext)delegate).done(); - } - return true; - } - - /** - * Call step on any sublayout that is also an IterativeContext and is not done - */ - public void step() { - for(Layout layout : layouts.keySet()) { - if(layout instanceof IterativeContext) { - IterativeContext context = (IterativeContext)layout; - if(context.done() == false) { - context.step(); - } - } - } - if(delegate instanceof IterativeContext) { - IterativeContext context = (IterativeContext)delegate; - if(context.done() == false) { - context.step(); - } - } - } - +public class AggregateLayout implements Layout, IterativeContext { + + protected Layout delegate; + protected Map, Point2D> layouts = new HashMap, Point2D>(); + + /** + * Creates an instance backed by the specified {@code delegate}. + * + * @param delegate the layout to which this instance is delegating + */ + public AggregateLayout(Layout delegate) { + this.delegate = delegate; + } + + /** @return the delegate */ + public Layout getDelegate() { + return delegate; + } + + /** @param delegate the delegate to set */ + public void setDelegate(Layout delegate) { + this.delegate = delegate; + } + + /** + * Adds the passed layout as a sublayout, and specifies the center of where this sublayout should + * appear. + * + * @param layout the layout algorithm to use as a sublayout + * @param center the center of the coordinates for the sublayout + */ + public void put(Layout layout, Point2D center) { + layouts.put(layout, center); + } + + /** + * @param layout the layout whose center is to be returned + * @return the center of the passed layout + */ + public Point2D get(Layout layout) { + return layouts.get(layout); + } + + /** + * Removes {@code layout} from this instance. + * + * @param layout the layout to remove + */ + public void remove(Layout layout) { + layouts.remove(layout); + } + + /** Removes all layouts from this instance. */ + public void removeAll() { + layouts.clear(); + } + + public Graph getGraph() { + return delegate.getGraph(); + } + + public Dimension getSize() { + return delegate.getSize(); + } + + public void initialize() { + delegate.initialize(); + for (Layout layout : layouts.keySet()) { + layout.initialize(); + } + } + + /** + * @param v the vertex whose locked state is to be returned + * @return true if v is locked in any of the layouts, and false otherwise + */ + public boolean isLocked(V v) { + for (Layout layout : layouts.keySet()) { + if (layout.isLocked(v)) { + return true; + } + } + return delegate.isLocked(v); + } + + /** + * Locks this vertex in the main layout and in any sublayouts whose graph contains this vertex. + * + * @param v the vertex whose locked state is to be set + * @param state {@code true} if the vertex is to be locked, and {@code false} if unlocked + */ + public void lock(V v, boolean state) { + for (Layout layout : layouts.keySet()) { + if (layout.getGraph().getVertices().contains(v)) { + layout.lock(v, state); + } + } + delegate.lock(v, state); + } + + public void reset() { + for (Layout layout : layouts.keySet()) { + layout.reset(); + } + delegate.reset(); + } + + public void setGraph(Graph graph) { + delegate.setGraph(graph); + } + + public void setInitializer(Function initializer) { + delegate.setInitializer(initializer); + } + + public void setLocation(V v, Point2D location) { + boolean wasInSublayout = false; + for (Layout layout : layouts.keySet()) { + if (layout.getGraph().getVertices().contains(v)) { + Point2D center = layouts.get(layout); + // transform by the layout itself, but offset to the + // center of the sublayout + Dimension d = layout.getSize(); + + AffineTransform at = + AffineTransform.getTranslateInstance( + -center.getX() + d.width / 2, -center.getY() + d.height / 2); + Point2D localLocation = at.transform(location, null); + layout.setLocation(v, localLocation); + wasInSublayout = true; + } + } + if (wasInSublayout == false && getGraph().getVertices().contains(v)) { + delegate.setLocation(v, location); + } + } + + public void setSize(Dimension d) { + delegate.setSize(d); + } + + /** @return a map from each {@code Layout} instance to its center point. */ + public Map, Point2D> getLayouts() { + return layouts; + } + + /** + * Returns the location of the vertex. The location is specified first by the sublayouts, and then + * by the base layout if no sublayouts operate on this vertex. + * + * @return the location of the vertex + */ + public Point2D apply(V v) { + boolean wasInSublayout = false; + for (Layout layout : layouts.keySet()) { + if (layout.getGraph().getVertices().contains(v)) { + wasInSublayout = true; + Point2D center = layouts.get(layout); + // transform by the layout itself, but offset to the + // center of the sublayout + Dimension d = layout.getSize(); + AffineTransform at = + AffineTransform.getTranslateInstance( + center.getX() - d.width / 2, center.getY() - d.height / 2); + return at.transform(layout.apply(v), null); + } + } + if (wasInSublayout == false) { + return delegate.apply(v); + } + return null; + } + + /** @return {@code true} iff the delegate layout and all sublayouts are done */ + public boolean done() { + for (Layout layout : layouts.keySet()) { + if (layout instanceof IterativeContext) { + if (!((IterativeContext) layout).done()) { + return false; + } + } + } + if (delegate instanceof IterativeContext) { + return ((IterativeContext) delegate).done(); + } + return true; + } + + /** Call step on any sublayout that is also an IterativeContext and is not done */ + public void step() { + for (Layout layout : layouts.keySet()) { + if (layout instanceof IterativeContext) { + IterativeContext context = (IterativeContext) layout; + if (context.done() == false) { + context.step(); + } + } + } + if (delegate instanceof IterativeContext) { + IterativeContext context = (IterativeContext) delegate; + if (context.done() == false) { + context.step(); + } + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java index 9badb229..542bbc09 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/BalloonLayout.java @@ -9,137 +9,134 @@ */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.graph.Forest; import edu.uci.ics.jung.graph.util.TreeUtils; +import java.awt.Dimension; +import java.awt.geom.Point2D; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; /** - * A {@code Layout} implementation that assigns positions to {@code Tree} or - * {@code Forest} vertices using associations with nested circles ("balloons"). - * A balloon is nested inside another balloon if the first balloon's subtree - * is a subtree of the second balloon's subtree. - * - * @author Tom Nelson - * + * A {@code Layout} implementation that assigns positions to {@code Tree} or {@code Forest} vertices + * using associations with nested circles ("balloons"). A balloon is nested inside another balloon + * if the first balloon's subtree is a subtree of the second balloon's subtree. + * + * @author Tom Nelson */ -public class BalloonLayout extends TreeLayout { - - protected LoadingCache polarLocations - = CacheBuilder.newBuilder().build(new CacheLoader() { - public PolarPoint load(V vertex) { - return new PolarPoint(); - } - }); - - protected Map radii = new HashMap(); - - /** - * Creates an instance based on the input forest. - * @param g the forest on which this layout will operate - */ - public BalloonLayout(Forest g) - { - super(g); - } - - protected void setRootPolars() - { - List roots = TreeUtils.getRoots(graph); - if(roots.size() == 1) { - // its a Tree - V root = roots.get(0); - setRootPolar(root); - setPolars(new ArrayList(graph.getChildren(root)), - getCenter(), getSize().width/2); - } else if (roots.size() > 1) { - // its a Forest - setPolars(roots, getCenter(), getSize().width/2); - } +public class BalloonLayout extends TreeLayout { + + protected LoadingCache polarLocations = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public PolarPoint load(V vertex) { + return new PolarPoint(); + } + }); + + protected Map radii = new HashMap(); + + /** + * Creates an instance based on the input forest. + * + * @param g the forest on which this layout will operate + */ + public BalloonLayout(Forest g) { + super(g); + } + + protected void setRootPolars() { + List roots = TreeUtils.getRoots(graph); + if (roots.size() == 1) { + // its a Tree + V root = roots.get(0); + setRootPolar(root); + setPolars(new ArrayList(graph.getChildren(root)), getCenter(), getSize().width / 2); + } else if (roots.size() > 1) { + // its a Forest + setPolars(roots, getCenter(), getSize().width / 2); } - - protected void setRootPolar(V root) { - PolarPoint pp = new PolarPoint(0,0); - Point2D p = getCenter(); - polarLocations.put(root, pp); - locations.put(root, p); + } + + protected void setRootPolar(V root) { + PolarPoint pp = new PolarPoint(0, 0); + Point2D p = getCenter(); + polarLocations.put(root, pp); + locations.put(root, p); + } + + protected void setPolars(List kids, Point2D parentLocation, double parentRadius) { + + int childCount = kids.size(); + if (childCount == 0) { + return; } - - - protected void setPolars(List kids, Point2D parentLocation, double parentRadius) { - - int childCount = kids.size(); - if(childCount == 0) return; - // handle the 1-child case with 0 limit on angle. - double angle = Math.max(0, Math.PI / 2 * (1 - 2.0/childCount)); - double childRadius = parentRadius*Math.cos(angle) / (1 + Math.cos(angle)); - double radius = parentRadius - childRadius; - - double rand = Math.random(); - - for(int i=0; i< childCount; i++) { - V child = kids.get(i); - double theta = i* 2*Math.PI/childCount + rand; - radii.put(child, childRadius); - - PolarPoint pp = new PolarPoint(theta, radius); - polarLocations.put(child, pp); - - Point2D p = PolarPoint.polarToCartesian(pp); - p.setLocation(p.getX()+parentLocation.getX(), p.getY()+parentLocation.getY()); - locations.put(child, p); - setPolars(new ArrayList(graph.getChildren(child)), p, childRadius); - } + // handle the 1-child case with 0 limit on angle. + double angle = Math.max(0, Math.PI / 2 * (1 - 2.0 / childCount)); + double childRadius = parentRadius * Math.cos(angle) / (1 + Math.cos(angle)); + double radius = parentRadius - childRadius; + + double rand = Math.random(); + + for (int i = 0; i < childCount; i++) { + V child = kids.get(i); + double theta = i * 2 * Math.PI / childCount + rand; + radii.put(child, childRadius); + + PolarPoint pp = new PolarPoint(theta, radius); + polarLocations.put(child, pp); + + Point2D p = PolarPoint.polarToCartesian(pp); + p.setLocation(p.getX() + parentLocation.getX(), p.getY() + parentLocation.getY()); + locations.put(child, p); + setPolars(new ArrayList(graph.getChildren(child)), p, childRadius); } + } + + @Override + public void setSize(Dimension size) { + this.size = size; + setRootPolars(); + } - @Override - public void setSize(Dimension size) { - this.size = size; - setRootPolars(); + /** + * @param v the vertex whose center is to be returned + * @return the coordinates of {@code v}'s parent, or the center of this layout's area if it's a + * root. + */ + public Point2D getCenter(V v) { + V parent = graph.getParent(v); + if (parent == null) { + return getCenter(); } + return locations.getUnchecked(parent); + } + + @Override + public void setLocation(V v, Point2D location) { + Point2D c = getCenter(v); + Point2D pv = new Point2D.Double(location.getX() - c.getX(), location.getY() - c.getY()); + PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); + polarLocations.getUnchecked(v).setLocation(newLocation); + + Point2D center = getCenter(v); + pv.setLocation(pv.getX() + center.getX(), pv.getY() + center.getY()); + locations.put(v, pv); + } + + @Override + public Point2D apply(V v) { + return locations.getUnchecked(v); + } - /** - * @param v the vertex whose center is to be returned - * @return the coordinates of {@code v}'s parent, or the center of this layout's area if it's a root. - */ - public Point2D getCenter(V v) { - V parent = graph.getParent(v); - if(parent == null) { - return getCenter(); - } - return locations.getUnchecked(parent); - } - - @Override - public void setLocation(V v, Point2D location) { - Point2D c = getCenter(v); - Point2D pv = new Point2D.Double(location.getX()-c.getX(),location.getY()-c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - polarLocations.getUnchecked(v).setLocation(newLocation); - - Point2D center = getCenter(v); - pv.setLocation(pv.getX()+center.getX(), pv.getY()+center.getY()); - locations.put(v, pv); - } - - @Override - public Point2D apply(V v) { - return locations.getUnchecked(v); - } - - /** - * @return the radii - */ - public Map getRadii() { - return radii; - } + /** @return the radii */ + public Map getRadii() { + return radii; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java index 5aad08e7..05655ca6 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/CircleLayout.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -12,6 +12,10 @@ */ package edu.uci.ics.jung.algorithms.layout; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import edu.uci.ics.jung.graph.Graph; import java.awt.Dimension; import java.awt.geom.Point2D; import java.util.ArrayList; @@ -19,130 +23,118 @@ import java.util.Comparator; import java.util.List; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; - -import edu.uci.ics.jung.graph.Graph; - - - /** * A {@code Layout} implementation that positions vertices equally spaced on a regular circle. * * @author Masanori Harada */ -public class CircleLayout extends AbstractLayout { - - private double radius; - private List vertex_ordered_list; - - protected LoadingCache circleVertexDatas = - CacheBuilder.newBuilder().build(new CacheLoader() { - public CircleVertexData load(V vertex) { - return new CircleVertexData(); - } - }); - - public CircleLayout(Graph g) { - super(g); - } - - /** - * @return the radius of the circle. - */ - public double getRadius() { - return radius; - } - - /** - * Sets the radius of the circle. Must be called before {@code initialize()} is called. - * @param radius the radius of the circle - */ - public void setRadius(double radius) { - this.radius = radius; - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * specified by {@code comparator}. - * @param comparator the comparator to use to order the vertices - */ - public void setVertexOrder(Comparator comparator) - { - if (vertex_ordered_list == null) - vertex_ordered_list = new ArrayList(getGraph().getVertices()); - Collections.sort(vertex_ordered_list, comparator); - } - - /** - * Sets the order of the vertices in the layout according to the ordering - * of {@code vertex_list}. - * @param vertex_list a list specifying the ordering of the vertices - */ - public void setVertexOrder(List vertex_list) - { - if (!vertex_list.containsAll(getGraph().getVertices())) - throw new IllegalArgumentException("Supplied list must include " + - "all vertices of the graph"); - this.vertex_ordered_list = vertex_list; - } - - public void reset() { - initialize(); - } - - public void initialize() - { - Dimension d = getSize(); - - if (d != null) - { - if (vertex_ordered_list == null) - setVertexOrder(new ArrayList(getGraph().getVertices())); - - double height = d.getHeight(); - double width = d.getWidth(); - - if (radius <= 0) { - radius = 0.45 * (height < width ? height : width); - } - - int i = 0; - for (V v : vertex_ordered_list) - { - Point2D coord = apply(v); - - double angle = (2 * Math.PI * i) / vertex_ordered_list.size(); - - coord.setLocation(Math.cos(angle) * radius + width / 2, - Math.sin(angle) * radius + height / 2); - - CircleVertexData data = getCircleData(v); - data.setAngle(angle); - i++; - } - } - } - - protected CircleVertexData getCircleData(V v) { - return circleVertexDatas.getUnchecked(v); - } - - protected static class CircleVertexData { - private double angle; - - protected double getAngle() { - return angle; - } - - protected void setAngle(double angle) { - this.angle = angle; - } - - @Override - public String toString() { - return "CircleVertexData: angle=" + angle; - } - } +public class CircleLayout extends AbstractLayout { + + private double radius; + private List vertex_ordered_list; + + protected LoadingCache circleVertexDatas = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public CircleVertexData load(V vertex) { + return new CircleVertexData(); + } + }); + + public CircleLayout(Graph g) { + super(g); + } + + /** @return the radius of the circle. */ + public double getRadius() { + return radius; + } + + /** + * Sets the radius of the circle. Must be called before {@code initialize()} is called. + * + * @param radius the radius of the circle + */ + public void setRadius(double radius) { + this.radius = radius; + } + + /** + * Sets the order of the vertices in the layout according to the ordering specified by {@code + * comparator}. + * + * @param comparator the comparator to use to order the vertices + */ + public void setVertexOrder(Comparator comparator) { + if (vertex_ordered_list == null) + vertex_ordered_list = new ArrayList(getGraph().getVertices()); + Collections.sort(vertex_ordered_list, comparator); + } + + /** + * Sets the order of the vertices in the layout according to the ordering of {@code vertex_list}. + * + * @param vertex_list a list specifying the ordering of the vertices + */ + public void setVertexOrder(List vertex_list) { + if (!vertex_list.containsAll(getGraph().getVertices())) + throw new IllegalArgumentException( + "Supplied list must include " + "all vertices of the graph"); + this.vertex_ordered_list = vertex_list; + } + + public void reset() { + initialize(); + } + + public void initialize() { + Dimension d = getSize(); + + if (d != null) { + if (vertex_ordered_list == null) setVertexOrder(new ArrayList(getGraph().getVertices())); + + double height = d.getHeight(); + double width = d.getWidth(); + + if (radius <= 0) { + radius = 0.45 * (height < width ? height : width); + } + + int i = 0; + for (V v : vertex_ordered_list) { + Point2D coord = apply(v); + + double angle = (2 * Math.PI * i) / vertex_ordered_list.size(); + + coord.setLocation( + Math.cos(angle) * radius + width / 2, Math.sin(angle) * radius + height / 2); + + CircleVertexData data = getCircleData(v); + data.setAngle(angle); + i++; + } + } + } + + protected CircleVertexData getCircleData(V v) { + return circleVertexDatas.getUnchecked(v); + } + + protected static class CircleVertexData { + private double angle; + + protected double getAngle() { + return angle; + } + + protected void setAngle(double angle) { + this.angle = angle; + } + + @Override + public String toString() { + return "CircleVertexData: angle=" + angle; + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java index 06006931..04c5d376 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/DAGLayout.java @@ -11,330 +11,301 @@ */ package edu.uci.ics.jung.algorithms.layout; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.util.Pair; import java.awt.Dimension; import java.awt.geom.Point2D; import java.util.HashMap; import java.util.Map; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; - /** - * An implementation of {@code Layout} suitable for tree-like directed - * acyclic graphs. Parts of it will probably not terminate if the graph is - * cyclic! The layout will result in directed edges pointing generally upwards. - * Any vertices with no successors are considered to be level 0, and tend - * towards the top of the layout. Any vertex has a level one greater than the - * maximum level of all its successors. - * + * An implementation of {@code Layout} suitable for tree-like directed acyclic graphs. Parts of it + * will probably not terminate if the graph is cyclic! The layout will result in directed edges + * pointing generally upwards. Any vertices with no successors are considered to be level 0, and + * tend towards the top of the layout. Any vertex has a level one greater than the maximum level of + * all its successors. * * @author John Yesberg */ -public class DAGLayout extends SpringLayout { - - /** - * Each vertex has a minimumLevel. Any vertex with no successors has - * minimumLevel of zero. The minimumLevel of any vertex must be strictly - * greater than the minimumLevel of its parents. (Vertex A is a parent of - * Vertex B iff there is an edge from B to A.) Typically, a vertex will - * have a minimumLevel which is one greater than the minimumLevel of its - * parent's. However, if the vertex has two parents, its minimumLevel will - * be one greater than the maximum of the parents'. We need to calculate - * the minimumLevel for each vertex. When we layout the graph, vertices - * cannot be drawn any higher than the minimumLevel. The graphHeight of a - * graph is the greatest minimumLevel that is used. We will modify the - * SpringLayout calculations so that nodes cannot move above their assigned - * minimumLevel. - */ - private Map minLevels = new HashMap(); - // Simpler than the "pair" technique. - static int graphHeight; - static int numRoots; - final double SPACEFACTOR = 1.3; - // How much space do we allow for additional floating at the bottom. - final double LEVELATTRACTIONRATE = 0.8; - - /** - * A bunch of parameters to help work out when to stop quivering. - * - * If the MeanSquareVel(ocity) ever gets below the MSV_THRESHOLD, then we - * will start a final cool-down phase of COOL_DOWN_INCREMENT increments. If - * the MeanSquareVel ever exceeds the threshold, we will exit the cool down - * phase, and continue looking for another opportunity. - */ - final double MSV_THRESHOLD = 10.0; - double meanSquareVel; - boolean stoppingIncrements = false; - int incrementsLeft; - final int COOL_DOWN_INCREMENTS = 200; - - public DAGLayout(Graph g) { - super(g); - } - - /** - * Calculates the level of each vertex in the graph. Level 0 is - * allocated to each vertex with no successors. Level n+1 is allocated to - * any vertex whose successors' maximum level is n. - */ - public void setRoot() { - numRoots = 0; - Graph g = getGraph(); - for(V v : g.getVertices()) { - if (g.getSuccessors(v).isEmpty()) { - setRoot(v); - numRoots++; - } - } - } - - /** - * Set vertex v to be level 0. - * @param v the vertex to set as root - */ - public void setRoot(V v) { - minLevels.put(v, new Integer(0)); - // set all the levels. - propagateMinimumLevel(v); - } - - /** - * A recursive method for allocating the level for each vertex. Ensures - * that all predecessors of v have a level which is at least one greater - * than the level of v. - * - * @param v the vertex whose minimum level is to be calculated - */ - public void propagateMinimumLevel(V v) { - int level = minLevels.get(v).intValue(); - for(V child : getGraph().getPredecessors(v)) { - int oldLevel, newLevel; - Number o = minLevels.get(child); - if (o != null) - oldLevel = o.intValue(); - else - oldLevel = 0; - newLevel = Math.max(oldLevel, level + 1); - minLevels.put(child, new Integer(newLevel)); - - if (newLevel > graphHeight) - graphHeight = newLevel; - propagateMinimumLevel(child); - } - } - - /** - * Sets a random location for a vertex within the dimensions of the space. - * - * @param v the vertex whose position is to be set - * @param coord the coordinates of the vertex once the position has been set - * @param d the dimensions of the space - */ - private void initializeLocation( - V v, - Point2D coord, - Dimension d) { - - int level = minLevels.get(v).intValue(); - int minY = (int) (level * d.getHeight() / (graphHeight * SPACEFACTOR)); - double x = Math.random() * d.getWidth(); - double y = Math.random() * (d.getHeight() - minY) + minY; - coord.setLocation(x,y); - } - - @Override - public void setSize(Dimension size) { - super.setSize(size); - for(V v : getGraph().getVertices()) { - initializeLocation(v,apply(v),getSize()); - } - } - - /** - * Had to override this one as well, to ensure that setRoot() is called. - */ - @Override - public void initialize() { - super.initialize(); - setRoot(); - } - - /** - * Override the moveNodes() method from SpringLayout. The only change we - * need to make is to make sure that nodes don't float higher than the minY - * coordinate, as calculated by their minimumLevel. - */ - @Override - protected void moveNodes() { - // Dimension d = currentSize; - double oldMSV = meanSquareVel; - meanSquareVel = 0; - - synchronized (getSize()) { - - for(V v : getGraph().getVertices()) { - if (isLocked(v)) - continue; - SpringLayout.SpringVertexData vd = springVertexData.getUnchecked(v); - Point2D xyd = apply(v); - - int width = getSize().width; - int height = getSize().height; - - // (JY addition: three lines are new) - int level = - minLevels.get(v).intValue(); - int minY = (int) (level * height / (graphHeight * SPACEFACTOR)); - int maxY = - level == 0 - ? (int) (height / (graphHeight * SPACEFACTOR * 2)) - : height; - - // JY added 2* - double the sideways repulsion. - vd.dx += 2 * vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // JY Addition: Attract the vertex towards it's minimumLevel - // height. - double delta = xyd.getY() - minY; - vd.dy -= delta * LEVELATTRACTIONRATE; - if (level == 0) - vd.dy -= delta * LEVELATTRACTIONRATE; - // twice as much at the top. - - // JY addition: - meanSquareVel += (vd.dx * vd.dx + vd.dy * vd.dy); - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)) , xyd.getY()+Math.max(-5, Math.min(5, vd.dy)) ); - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - - // (JY addition: These two lines replaced 0 with minY) - if (xyd.getY() < minY) { - xyd.setLocation(xyd.getX(), minY); - // (JY addition: replace height with maxY) - } else if (xyd.getY() > maxY) { - xyd.setLocation(xyd.getX(), maxY); - } - - // (JY addition: if there's only one root, anchor it in the - // middle-top of the screen) - if (numRoots == 1 && level == 0) { - xyd.setLocation(width/2, xyd.getY()); - } - } - } - //System.out.println("MeanSquareAccel="+meanSquareVel); - if (!stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) < MSV_THRESHOLD) { - stoppingIncrements = true; - incrementsLeft = COOL_DOWN_INCREMENTS; - } else if ( - stoppingIncrements - && Math.abs(meanSquareVel - oldMSV) <= MSV_THRESHOLD) { - incrementsLeft--; - if (incrementsLeft <= 0) - incrementsLeft = 0; - } - } - - /** - * Override incrementsAreDone so that we can eventually stop. - */ - @Override - public boolean done() { - if (stoppingIncrements && incrementsLeft == 0) - return true; - else - return false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - * @param picked the vertex whose location is to be set - * @param x the x coordinate of the location to set - * @param y the y coordinate of the location to set - */ - @Override - public void setLocation(V picked, double x, double y) { - Point2D coord = apply(picked); - coord.setLocation(x,y); - stoppingIncrements = false; - } - - /** - * Override forceMove so that if someone moves a node, we can re-layout - * everything. - * @param picked the vertex whose location is to be set - * @param p the location to set - */ - @Override - public void setLocation(V picked, Point2D p) { - setLocation(picked, p.getX(), p.getY()); - } - - /** - * Overridden relaxEdges. This one reduces the effect of edges between - * greatly different levels. - * - */ - @Override - protected void relaxEdges() { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - // JY addition. - int level1 = - minLevels.get(v1).intValue(); - int level2 = - minLevels.get(v2).intValue(); - - double desiredLen = lengthFunction.apply(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - // force factor: optimal length minus actual length, - // is made smaller as the current actual length gets larger. - // why? - - // System.out.println("Desired : " + getLength( e )); - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch / 100.0, - (getGraph().degree(v1) + getGraph().degree(v2) -2)); - - // JY addition. If this is an edge which stretches a long way, - // don't be so concerned about it. - if (level1 != level2) - f = f / Math.pow(Math.abs(level2 - level1), 1.5); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.getUnchecked(v1); - v2D = springVertexData.getUnchecked(v2); - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } +public class DAGLayout extends SpringLayout { + + /** + * Each vertex has a minimumLevel. Any vertex with no successors has minimumLevel of zero. The + * minimumLevel of any vertex must be strictly greater than the minimumLevel of its parents. + * (Vertex A is a parent of Vertex B iff there is an edge from B to A.) Typically, a vertex will + * have a minimumLevel which is one greater than the minimumLevel of its parent's. However, if the + * vertex has two parents, its minimumLevel will be one greater than the maximum of the parents'. + * We need to calculate the minimumLevel for each vertex. When we layout the graph, vertices + * cannot be drawn any higher than the minimumLevel. The graphHeight of a graph is the greatest + * minimumLevel that is used. We will modify the SpringLayout calculations so that nodes cannot + * move above their assigned minimumLevel. + */ + private Map minLevels = new HashMap(); + // Simpler than the "pair" technique. + static int graphHeight; + static int numRoots; + final double SPACEFACTOR = 1.3; + // How much space do we allow for additional floating at the bottom. + final double LEVELATTRACTIONRATE = 0.8; + + /** + * A bunch of parameters to help work out when to stop quivering. + * + *

If the MeanSquareVel(ocity) ever gets below the MSV_THRESHOLD, then we will start a final + * cool-down phase of COOL_DOWN_INCREMENT increments. If the MeanSquareVel ever exceeds the + * threshold, we will exit the cool down phase, and continue looking for another opportunity. + */ + final double MSV_THRESHOLD = 10.0; + + double meanSquareVel; + boolean stoppingIncrements = false; + int incrementsLeft; + final int COOL_DOWN_INCREMENTS = 200; + + public DAGLayout(Graph g) { + super(g); + } + + /** + * Calculates the level of each vertex in the graph. Level 0 is allocated to each vertex with no + * successors. Level n+1 is allocated to any vertex whose successors' maximum level is n. + */ + public void setRoot() { + numRoots = 0; + Graph g = getGraph(); + for (V v : g.getVertices()) { + if (g.getSuccessors(v).isEmpty()) { + setRoot(v); + numRoots++; + } + } + } + + /** + * Set vertex v to be level 0. + * + * @param v the vertex to set as root + */ + public void setRoot(V v) { + minLevels.put(v, new Integer(0)); + // set all the levels. + propagateMinimumLevel(v); + } + + /** + * A recursive method for allocating the level for each vertex. Ensures that all predecessors of v + * have a level which is at least one greater than the level of v. + * + * @param v the vertex whose minimum level is to be calculated + */ + public void propagateMinimumLevel(V v) { + int level = minLevels.get(v).intValue(); + for (V child : getGraph().getPredecessors(v)) { + int oldLevel, newLevel; + Number o = minLevels.get(child); + if (o != null) oldLevel = o.intValue(); + else oldLevel = 0; + newLevel = Math.max(oldLevel, level + 1); + minLevels.put(child, new Integer(newLevel)); + + if (newLevel > graphHeight) graphHeight = newLevel; + propagateMinimumLevel(child); + } + } + + /** + * Sets a random location for a vertex within the dimensions of the space. + * + * @param v the vertex whose position is to be set + * @param coord the coordinates of the vertex once the position has been set + * @param d the dimensions of the space + */ + private void initializeLocation(V v, Point2D coord, Dimension d) { + + int level = minLevels.get(v).intValue(); + int minY = (int) (level * d.getHeight() / (graphHeight * SPACEFACTOR)); + double x = Math.random() * d.getWidth(); + double y = Math.random() * (d.getHeight() - minY) + minY; + coord.setLocation(x, y); + } + + @Override + public void setSize(Dimension size) { + super.setSize(size); + for (V v : getGraph().getVertices()) { + initializeLocation(v, apply(v), getSize()); + } + } + + /** Had to override this one as well, to ensure that setRoot() is called. */ + @Override + public void initialize() { + super.initialize(); + setRoot(); + } + + /** + * Override the moveNodes() method from SpringLayout. The only change we need to make is to make + * sure that nodes don't float higher than the minY coordinate, as calculated by their + * minimumLevel. + */ + @Override + protected void moveNodes() { + // Dimension d = currentSize; + double oldMSV = meanSquareVel; + meanSquareVel = 0; + + synchronized (getSize()) { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } + SpringLayout.SpringVertexData vd = springVertexData.getUnchecked(v); + Point2D xyd = apply(v); + + int width = getSize().width; + int height = getSize().height; + + // (JY addition: three lines are new) + int level = minLevels.get(v).intValue(); + int minY = (int) (level * height / (graphHeight * SPACEFACTOR)); + int maxY = level == 0 ? (int) (height / (graphHeight * SPACEFACTOR * 2)) : height; + + // JY added 2* - double the sideways repulsion. + vd.dx += 2 * vd.repulsiondx + vd.edgedx; + vd.dy += vd.repulsiondy + vd.edgedy; + + // JY Addition: Attract the vertex towards it's minimumLevel + // height. + double delta = xyd.getY() - minY; + vd.dy -= delta * LEVELATTRACTIONRATE; + if (level == 0) vd.dy -= delta * LEVELATTRACTIONRATE; + // twice as much at the top. + + // JY addition: + meanSquareVel += (vd.dx * vd.dx + vd.dy * vd.dy); + + // keeps nodes from moving any faster than 5 per time unit + xyd.setLocation( + xyd.getX() + Math.max(-5, Math.min(5, vd.dx)), + xyd.getY() + Math.max(-5, Math.min(5, vd.dy))); + + if (xyd.getX() < 0) { + xyd.setLocation(0, xyd.getY()); + } else if (xyd.getX() > width) { + xyd.setLocation(width, xyd.getY()); + } + + // (JY addition: These two lines replaced 0 with minY) + if (xyd.getY() < minY) { + xyd.setLocation(xyd.getX(), minY); + // (JY addition: replace height with maxY) + } else if (xyd.getY() > maxY) { + xyd.setLocation(xyd.getX(), maxY); + } + + // (JY addition: if there's only one root, anchor it in the + // middle-top of the screen) + if (numRoots == 1 && level == 0) { + xyd.setLocation(width / 2, xyd.getY()); + } + } + } + //System.out.println("MeanSquareAccel="+meanSquareVel); + if (!stoppingIncrements && Math.abs(meanSquareVel - oldMSV) < MSV_THRESHOLD) { + stoppingIncrements = true; + incrementsLeft = COOL_DOWN_INCREMENTS; + } else if (stoppingIncrements && Math.abs(meanSquareVel - oldMSV) <= MSV_THRESHOLD) { + incrementsLeft--; + if (incrementsLeft <= 0) incrementsLeft = 0; + } + } + + /** Override incrementsAreDone so that we can eventually stop. */ + @Override + public boolean done() { + if (stoppingIncrements && incrementsLeft == 0) { + return true; + } else { + return false; + } + } + + /** + * Override forceMove so that if someone moves a node, we can re-layout everything. + * + * @param picked the vertex whose location is to be set + * @param x the x coordinate of the location to set + * @param y the y coordinate of the location to set + */ + @Override + public void setLocation(V picked, double x, double y) { + Point2D coord = apply(picked); + coord.setLocation(x, y); + stoppingIncrements = false; + } + + /** + * Override forceMove so that if someone moves a node, we can re-layout everything. + * + * @param picked the vertex whose location is to be set + * @param p the location to set + */ + @Override + public void setLocation(V picked, Point2D p) { + setLocation(picked, p.getX(), p.getY()); + } + + /** + * Overridden relaxEdges. This one reduces the effect of edges between greatly different levels. + */ + @Override + protected void relaxEdges() { + for (E e : getGraph().getEdges()) { + Pair endpoints = getGraph().getEndpoints(e); + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + double vx = p1.getX() - p2.getX(); + double vy = p1.getY() - p2.getY(); + double len = Math.sqrt(vx * vx + vy * vy); + + // JY addition. + int level1 = minLevels.get(v1).intValue(); + int level2 = minLevels.get(v2).intValue(); + + double desiredLen = lengthFunction.apply(e); + + // round from zero, if needed [zero would be Bad.]. + len = (len == 0) ? .0001 : len; + + // force factor: optimal length minus actual length, + // is made smaller as the current actual length gets larger. + // why? + + // System.out.println("Desired : " + getLength( e )); + double f = force_multiplier * (desiredLen - len) / len; + + f = f * Math.pow(stretch / 100.0, (getGraph().degree(v1) + getGraph().degree(v2) - 2)); + + // JY addition. If this is an edge which stretches a long way, + // don't be so concerned about it. + if (level1 != level2) f = f / Math.pow(Math.abs(level2 - level1), 1.5); + + // the actual movement distance 'dx' is the force multiplied by the + // distance to go. + double dx = f * vx; + double dy = f * vy; + SpringVertexData v1D, v2D; + v1D = springVertexData.getUnchecked(v1); + v2D = springVertexData.getUnchecked(v2); + + v1D.edgedx += dx; + v1D.edgedy += dy; + v2D.edgedx += -dx; + v2D.edgedy += -dy; + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java index e6ec131a..9de1adea 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout.java @@ -7,312 +7,310 @@ */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.Pair; +import java.awt.Dimension; +import java.awt.geom.Point2D; +import java.util.ConcurrentModificationException; /** * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * + * *

Behavior is determined by the following settable parameters: + * *

    - *
  • attraction multiplier: how much edges try to keep their vertices together - *
  • repulsion multiplier: how much vertices try to push each other apart - *
  • maximum iterations: how many iterations this algorithm will use before stopping + *
  • attraction multiplier: how much edges try to keep their vertices together + *
  • repulsion multiplier: how much vertices try to push each other apart + *
  • maximum iterations: how many iterations this algorithm will use before stopping *
+ * * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. * * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" + * @see + * "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" * @author Scott White, Yan-Biao Boey, Danyel Fisher */ public class FRLayout extends AbstractLayout implements IterativeContext { - private double forceConstant; + private double forceConstant; - private double temperature; + private double temperature; - private int currentIteration; + private int currentIteration; - private int mMaxIterations = 700; + private int mMaxIterations = 700; - protected LoadingCache frVertexData = - CacheBuilder.newBuilder().build(new CacheLoader() { - public FRVertexData load(V vertex) { - return new FRVertexData(); - } - }); + protected LoadingCache frVertexData = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public FRVertexData load(V vertex) { + return new FRVertexData(); + } + }); - private double attraction_multiplier = 0.75; + private double attraction_multiplier = 0.75; - private double attraction_constant; + private double attraction_constant; - private double repulsion_multiplier = 0.75; + private double repulsion_multiplier = 0.75; - private double repulsion_constant; + private double repulsion_constant; - private double max_dimension; + private double max_dimension; - public FRLayout(Graph g) { - super(g); - } + public FRLayout(Graph g) { + super(g); + } - public FRLayout(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - initialize(); - max_dimension = Math.max(d.height, d.width); - } + public FRLayout(Graph g, Dimension d) { + super(g, new RandomLocationTransformer(d), d); + initialize(); + max_dimension = Math.max(d.height, d.width); + } - @Override - public void setSize(Dimension size) { - if(initialized == false) { - setInitializer(new RandomLocationTransformer(size)); - } - super.setSize(size); - max_dimension = Math.max(size.height, size.width); - } - - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; + @Override + public void setSize(Dimension size) { + if (initialized == false) { + setInitializer(new RandomLocationTransformer(size)); } + super.setSize(size); + max_dimension = Math.max(size.height, size.width); + } - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } + public void setAttractionMultiplier(double attraction) { + this.attraction_multiplier = attraction; + } - public void reset() { - doInit(); - } + public void setRepulsionMultiplier(double repulsion) { + this.repulsion_multiplier = repulsion; + } - public void initialize() { - doInit(); - } + public void reset() { + doInit(); + } - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } + public void initialize() { + doInit(); + } - private double EPSILON = 0.000001D; + private void doInit() { + Graph graph = getGraph(); + Dimension d = getSize(); + if (graph != null && d != null) { + currentIteration = 0; + temperature = d.getWidth() / 10; - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; + forceConstant = Math.sqrt(d.getHeight() * d.getWidth() / graph.getVertexCount()); - /** - * Calculate repulsion - */ - while(true) { + attraction_constant = attraction_multiplier * forceConstant; + repulsion_constant = repulsion_multiplier * forceConstant; + } + } - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } + private double EPSILON = 0.000001D; - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - - calcAttraction(e); - } - break; - } catch(ConcurrentModificationException cme) {} - } + /** + * Moves the iteration forward one notch, calculation attraction and repulsion between vertices + * and edges and cooling the temperature. + */ + public synchronized void step() { + currentIteration++; + /** Calculate repulsion */ + while (true) { - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} + try { + for (V v1 : getGraph().getVertices()) { + calcRepulsion(v1); } - cool(); + break; + } catch (ConcurrentModificationException cme) { + } } - protected synchronized void calcPositions(V v) { - FRVertexData fvd = getFRData(v); - if(fvd == null) return; - Point2D xyd = apply(v); - double deltaLength = Math.max(EPSILON, fvd.norm()); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - if (Double.isNaN(newXDisp)) { - throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); } - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - xyd.setLocation(xyd.getX()+newXDisp, xyd.getY()+newYDisp); - - double borderWidth = getSize().getWidth() / 50.0; - double newXPos = xyd.getX(); - if (newXPos < borderWidth) { - newXPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newXPos > (getSize().getWidth() - borderWidth)) { - newXPos = getSize().getWidth() - borderWidth - Math.random() - * borderWidth * 2.0; - } + /** Calculate attraction */ + while (true) { + try { + for (E e : getGraph().getEdges()) { - double newYPos = xyd.getY(); - if (newYPos < borderWidth) { - newYPos = borderWidth + Math.random() * borderWidth * 2.0; - } else if (newYPos > (getSize().getHeight() - borderWidth)) { - newYPos = getSize().getHeight() - borderWidth - - Math.random() * borderWidth * 2.0; + calcAttraction(e); } - - xyd.setLocation(newXPos, newYPos); + break; + } catch (ConcurrentModificationException cme) { + } } - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; + while (true) { + try { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } + calcPositions(v); } - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); + break; + } catch (ConcurrentModificationException cme) { + } + } + cool(); + } - double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) - + (yDelta * yDelta))); + protected synchronized void calcPositions(V v) { + FRVertexData fvd = getFRData(v); + if (fvd == null) { + return; + } + Point2D xyd = apply(v); + double deltaLength = Math.max(EPSILON, fvd.norm()); - double force = (deltaLength * deltaLength) / attraction_constant; + double newXDisp = fvd.getX() / deltaLength * Math.min(deltaLength, temperature); - if (Double.isNaN(force)) { throw new IllegalArgumentException( - "Unexpected mathematical result in FRLayout:calcPositions [force]"); } + if (Double.isNaN(newXDisp)) { + throw new IllegalArgumentException( + "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); + } - double dx = (xDelta / deltaLength) * force; - double dy = (yDelta / deltaLength) * force; - if(v1_locked == false) { - FRVertexData fvd1 = getFRData(v1); - fvd1.offset(-dx, -dy); - } - if(v2_locked == false) { - FRVertexData fvd2 = getFRData(v2); - fvd2.offset(dx, dy); - } + double newYDisp = fvd.getY() / deltaLength * Math.min(deltaLength, temperature); + xyd.setLocation(xyd.getX() + newXDisp, xyd.getY() + newYDisp); + + double borderWidth = getSize().getWidth() / 50.0; + double newXPos = xyd.getX(); + if (newXPos < borderWidth) { + newXPos = borderWidth + Math.random() * borderWidth * 2.0; + } else if (newXPos > (getSize().getWidth() - borderWidth)) { + newXPos = getSize().getWidth() - borderWidth - Math.random() * borderWidth * 2.0; } - protected void calcRepulsion(V v1) { - FRVertexData fvd1 = getFRData(v1); - if(fvd1 == null) - return; - fvd1.setLocation(0, 0); + double newYPos = xyd.getY(); + if (newYPos < borderWidth) { + newYPos = borderWidth + Math.random() * borderWidth * 2.0; + } else if (newYPos > (getSize().getHeight() - borderWidth)) { + newYPos = getSize().getHeight() - borderWidth - Math.random() * borderWidth * 2.0; + } - try { - for(V v2 : getGraph().getVertices()) { + xyd.setLocation(newXPos, newYPos); + } -// if (isLocked(v2)) continue; - if (v1 != v2) { - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); + protected void calcAttraction(E e) { + Pair endpoints = getGraph().getEndpoints(e); + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + boolean v1_locked = isLocked(v1); + boolean v2_locked = isLocked(v2); - double deltaLength = Math.max(EPSILON, Math - .sqrt((xDelta * xDelta) + (yDelta * yDelta))); + if (v1_locked && v2_locked) { + // both locked, do nothing + return; + } + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + if (p1 == null || p2 == null) { + return; + } + double xDelta = p1.getX() - p2.getX(); + double yDelta = p1.getY() - p2.getY(); - double force = (repulsion_constant * repulsion_constant) / deltaLength; + double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) + (yDelta * yDelta))); - if (Double.isNaN(force)) { throw new RuntimeException( - "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); } + double force = (deltaLength * deltaLength) / attraction_constant; - fvd1.offset((xDelta / deltaLength) * force, - (yDelta / deltaLength) * force); - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); - } + if (Double.isNaN(force)) { + throw new IllegalArgumentException( + "Unexpected mathematical result in FRLayout:calcPositions [force]"); } - private void cool() { - temperature *= (1.0 - currentIteration / (double) mMaxIterations); + double dx = (xDelta / deltaLength) * force; + double dy = (yDelta / deltaLength) * force; + if (v1_locked == false) { + FRVertexData fvd1 = getFRData(v1); + fvd1.offset(-dx, -dy); } - - public void setMaxIterations(int maxIterations) { - mMaxIterations = maxIterations; + if (v2_locked == false) { + FRVertexData fvd2 = getFRData(v2); + fvd2.offset(dx, dy); } + } - protected FRVertexData getFRData(V v) { - return frVertexData.getUnchecked(v); + protected void calcRepulsion(V v1) { + FRVertexData fvd1 = getFRData(v1); + if (fvd1 == null) { + return; } + fvd1.setLocation(0, 0); - /** - * @return true - */ - public boolean isIncremental() { - return true; - } + try { + for (V v2 : getGraph().getVertices()) { - /** - * @return true once the current iteration has passed the maximum count. - */ - public boolean done() { - if (currentIteration > mMaxIterations || temperature < 1.0/max_dimension) - { - return true; + // if (isLocked(v2)) { + // continue; + // } + if (v1 != v2) { + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + if (p1 == null || p2 == null) { + continue; + } + double xDelta = p1.getX() - p2.getX(); + double yDelta = p1.getY() - p2.getY(); + + double deltaLength = Math.max(EPSILON, Math.sqrt((xDelta * xDelta) + (yDelta * yDelta))); + + double force = (repulsion_constant * repulsion_constant) / deltaLength; + + if (Double.isNaN(force)) { + throw new RuntimeException( + "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); + } + + fvd1.offset((xDelta / deltaLength) * force, (yDelta / deltaLength) * force); } - return false; + } + } catch (ConcurrentModificationException cme) { + calcRepulsion(v1); } + } - @SuppressWarnings("serial") - protected static class FRVertexData extends Point2D.Double - { - protected void offset(double x, double y) - { - this.x += x; - this.y += y; - } + private void cool() { + temperature *= (1.0 - currentIteration / (double) mMaxIterations); + } - protected double norm() - { - return Math.sqrt(x*x + y*y); - } - } -} \ No newline at end of file + public void setMaxIterations(int maxIterations) { + mMaxIterations = maxIterations; + } + + protected FRVertexData getFRData(V v) { + return frVertexData.getUnchecked(v); + } + + /** @return true */ + public boolean isIncremental() { + return true; + } + + /** @return true once the current iteration has passed the maximum count. */ + public boolean done() { + if (currentIteration > mMaxIterations || temperature < 1.0 / max_dimension) { + return true; + } + return false; + } + + @SuppressWarnings("serial") + protected static class FRVertexData extends Point2D.Double { + protected void offset(double x, double y) { + this.x += x; + this.y += y; + } + + protected double norm() { + return Math.sqrt(x * x + y * y); + } + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java index 4214e124..4e189478 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/FRLayout2.java @@ -1,317 +1,319 @@ /* * Copyright (c) 2003, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.awt.geom.Rectangle2D; -import java.util.ConcurrentModificationException; - import com.google.common.base.Preconditions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.Pair; +import java.awt.Dimension; +import java.awt.geom.Point2D; +import java.awt.geom.Rectangle2D; +import java.util.ConcurrentModificationException; /** - * Implements the Fruchterman-Reingold force-directed algorithm for node layout. - * This is an experimental attempt at optimizing {@code FRLayout}; if it is successful - * it will be folded back into {@code FRLayout} (and this class will disappear). - * + * Implements the Fruchterman-Reingold force-directed algorithm for node layout. This is an + * experimental attempt at optimizing {@code FRLayout}; if it is successful it will be folded back + * into {@code FRLayout} (and this class will disappear). + * *

Behavior is determined by the following settable parameters: + * *

    - *
  • attraction multiplier: how much edges try to keep their vertices together - *
  • repulsion multiplier: how much vertices try to push each other apart - *
  • maximum iterations: how many iterations this algorithm will use before stopping + *
  • attraction multiplier: how much edges try to keep their vertices together + *
  • repulsion multiplier: how much vertices try to push each other apart + *
  • maximum iterations: how many iterations this algorithm will use before stopping *
+ * * Each of the first two defaults to 0.75; the maximum number of iterations defaults to 700. - - * + * * @see "Fruchterman and Reingold, 'Graph Drawing by Force-directed Placement'" - * @see "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" - * + * @see + * "http://i11www.ilkd.uni-karlsruhe.de/teaching/SS_04/visualisierung/papers/fruchterman91graph.pdf" * @author Tom Nelson * @author Scott White, Yan-Biao Boey, Danyel Fisher */ public class FRLayout2 extends AbstractLayout implements IterativeContext { - private double forceConstant; - - private double temperature; - - private int currentIteration; - - private int maxIterations = 700; - - protected LoadingCache frVertexData = - CacheBuilder.newBuilder().build(new CacheLoader() { - public Point2D load(V vertex) { - return new Point2D.Double(); - } - }); - - private double attraction_multiplier = 0.75; - - private double attraction_constant; - - private double repulsion_multiplier = 0.75; - - private double repulsion_constant; - - private double max_dimension; - - private Rectangle2D innerBounds = new Rectangle2D.Double(); - - private boolean checked = false; - - public FRLayout2(Graph g) { - super(g); - } - - public FRLayout2(Graph g, Dimension d) { - super(g, new RandomLocationTransformer(d), d); - max_dimension = Math.max(d.height, d.width); - initialize(); - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - double t = size.width/50.0; - innerBounds.setFrameFromDiagonal(t,t,size.width-t,size.height-t); - max_dimension = Math.max(size.height, size.width); - } - - public void setAttractionMultiplier(double attraction) { - this.attraction_multiplier = attraction; - } - - public void setRepulsionMultiplier(double repulsion) { - this.repulsion_multiplier = repulsion; - } - - public void reset() { - doInit(); - } - - public void initialize() { - doInit(); - } + private double forceConstant; - private void doInit() { - Graph graph = getGraph(); - Dimension d = getSize(); - if(graph != null && d != null) { - currentIteration = 0; - temperature = d.getWidth() / 10; - - forceConstant = - Math - .sqrt(d.getHeight() - * d.getWidth() - / graph.getVertexCount()); - - attraction_constant = attraction_multiplier * forceConstant; - repulsion_constant = repulsion_multiplier * forceConstant; - } - } + private double temperature; - private double EPSILON = 0.000001D; - - /** - * Moves the iteration forward one notch, calculation attraction and - * repulsion between vertices and edges and cooling the temperature. - */ - public synchronized void step() { - currentIteration++; - - /** - * Calculate repulsion - */ - while(true) { - - try { - for(V v1 : getGraph().getVertices()) { - calcRepulsion(v1); - } - break; - } catch(ConcurrentModificationException cme) {} - } + private int currentIteration; - /** - * Calculate attraction - */ - while(true) { - try { - for(E e : getGraph().getEdges()) { - calcAttraction(e); + private int maxIterations = 700; + + protected LoadingCache frVertexData = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public Point2D load(V vertex) { + return new Point2D.Double(); } - break; - } catch(ConcurrentModificationException cme) {} - } + }); + private double attraction_multiplier = 0.75; - while(true) { - try { - for(V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - calcPositions(v); - } - break; - } catch(ConcurrentModificationException cme) {} - } - cool(); - } + private double attraction_constant; + + private double repulsion_multiplier = 0.75; + + private double repulsion_constant; + + private double max_dimension; + + private Rectangle2D innerBounds = new Rectangle2D.Double(); + + private boolean checked = false; + + public FRLayout2(Graph g) { + super(g); + } - protected synchronized void calcPositions(V v) { - Point2D fvd = this.frVertexData.getUnchecked(v); - if(fvd == null) return; - Point2D xyd = apply(v); - double deltaLength = Math.max(EPSILON, - Math.sqrt(fvd.getX()*fvd.getX()+fvd.getY()*fvd.getY())); - - double newXDisp = fvd.getX() / deltaLength - * Math.min(deltaLength, temperature); - - Preconditions.checkState(!Double.isNaN(newXDisp), - "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); - - double newYDisp = fvd.getY() / deltaLength - * Math.min(deltaLength, temperature); - double newX = xyd.getX()+Math.max(-5, Math.min(5,newXDisp)); - double newY = xyd.getY()+Math.max(-5, Math.min(5,newYDisp)); - - newX = Math.max(innerBounds.getMinX(), Math.min(newX, innerBounds.getMaxX())); - newY = Math.max(innerBounds.getMinY(), Math.min(newY, innerBounds.getMaxY())); - - xyd.setLocation(newX, newY); + public FRLayout2(Graph g, Dimension d) { + super(g, new RandomLocationTransformer(d), d); + max_dimension = Math.max(d.height, d.width); + initialize(); + } + @Override + public void setSize(Dimension size) { + if (initialized == false) setInitializer(new RandomLocationTransformer(size)); + super.setSize(size); + double t = size.width / 50.0; + innerBounds.setFrameFromDiagonal(t, t, size.width - t, size.height - t); + max_dimension = Math.max(size.height, size.width); + } + + public void setAttractionMultiplier(double attraction) { + this.attraction_multiplier = attraction; + } + + public void setRepulsionMultiplier(double repulsion) { + this.repulsion_multiplier = repulsion; + } + + public void reset() { + doInit(); + } + + public void initialize() { + doInit(); + } + + private void doInit() { + Graph graph = getGraph(); + Dimension d = getSize(); + if (graph != null && d != null) { + currentIteration = 0; + temperature = d.getWidth() / 10; + + forceConstant = Math.sqrt(d.getHeight() * d.getWidth() / graph.getVertexCount()); + + attraction_constant = attraction_multiplier * forceConstant; + repulsion_constant = repulsion_multiplier * forceConstant; } + } - protected void calcAttraction(E e) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - boolean v1_locked = isLocked(v1); - boolean v2_locked = isLocked(v2); - - if(v1_locked && v2_locked) { - // both locked, do nothing - return; - } - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - if(p1 == null || p2 == null) return; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distance(p2)); - - double force = deltaLength / attraction_constant; - - Preconditions.checkState(!Double.isNaN(force), - "Unexpected mathematical result in FRLayout:calcPositions [force]"); - - double dx = xDelta * force; - double dy = yDelta * force; - Point2D fvd1 = frVertexData.getUnchecked(v1); - Point2D fvd2 = frVertexData.getUnchecked(v2); - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()-2*dx, fvd1.getY()-2*dy); - } else { - fvd1.setLocation(fvd1.getX()-dx, fvd1.getY()-dy); + private double EPSILON = 0.000001D; + + /** + * Moves the iteration forward one notch, calculation attraction and repulsion between vertices + * and edges and cooling the temperature. + */ + public synchronized void step() { + currentIteration++; + + /** Calculate repulsion */ + while (true) { + + try { + for (V v1 : getGraph().getVertices()) { + calcRepulsion(v1); } - if(v1_locked) { - // double the offset for v2, as v1 will not be moving in - // the opposite direction - fvd2.setLocation(fvd2.getX()+2*dx, fvd2.getY()+2*dy); - } else { - fvd2.setLocation(fvd2.getX()+dx, fvd2.getY()+dy); + break; + } catch (ConcurrentModificationException cme) { + } } + + /** Calculate attraction */ + while (true) { + try { + for (E e : getGraph().getEdges()) { + calcAttraction(e); + } + break; + } catch (ConcurrentModificationException cme) { + } } - protected void calcRepulsion(V v1) { - Point2D fvd1 = frVertexData.getUnchecked(v1); - if(fvd1 == null) return; - fvd1.setLocation(0, 0); - boolean v1_locked = isLocked(v1); - - try { - for(V v2 : getGraph().getVertices()) { - - boolean v2_locked = isLocked(v2); - if (v1_locked && v2_locked) continue; - if (v1 != v2) { - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - if(p1 == null || p2 == null) continue; - double xDelta = p1.getX() - p2.getX(); - double yDelta = p1.getY() - p2.getY(); - - double deltaLength = Math.max(EPSILON, p1.distanceSq(p2)); - - double force = (repulsion_constant * repulsion_constant);// / deltaLength; - - double forceOverDeltaLength = force / deltaLength; - - Preconditions.checkState(!Double.isNaN(force), - "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); - - if(v2_locked) { - // double the offset for v1, as v2 will not be moving in - // the opposite direction - fvd1.setLocation(fvd1.getX()+2 * xDelta * forceOverDeltaLength, - fvd1.getY()+ 2 * yDelta * forceOverDeltaLength); - } else { - fvd1.setLocation(fvd1.getX()+xDelta * forceOverDeltaLength, - fvd1.getY()+yDelta * forceOverDeltaLength); - } - } - } - } catch(ConcurrentModificationException cme) { - calcRepulsion(v1); + while (true) { + try { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } + calcPositions(v); } + break; + } catch (ConcurrentModificationException cme) { + } } + cool(); + } - private void cool() { - temperature *= (1.0 - currentIteration / (double) maxIterations); + protected synchronized void calcPositions(V v) { + Point2D fvd = this.frVertexData.getUnchecked(v); + if (fvd == null) { + return; } + Point2D xyd = apply(v); + double deltaLength = + Math.max(EPSILON, Math.sqrt(fvd.getX() * fvd.getX() + fvd.getY() * fvd.getY())); - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; + double newXDisp = fvd.getX() / deltaLength * Math.min(deltaLength, temperature); + + Preconditions.checkState( + !Double.isNaN(newXDisp), + "Unexpected mathematical result in FRLayout:calcPositions [xdisp]"); + + double newYDisp = fvd.getY() / deltaLength * Math.min(deltaLength, temperature); + double newX = xyd.getX() + Math.max(-5, Math.min(5, newXDisp)); + double newY = xyd.getY() + Math.max(-5, Math.min(5, newYDisp)); + + newX = Math.max(innerBounds.getMinX(), Math.min(newX, innerBounds.getMaxX())); + newY = Math.max(innerBounds.getMinY(), Math.min(newY, innerBounds.getMaxY())); + + xyd.setLocation(newX, newY); + } + + protected void calcAttraction(E e) { + Pair endpoints = getGraph().getEndpoints(e); + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + boolean v1_locked = isLocked(v1); + boolean v2_locked = isLocked(v2); + + if (v1_locked && v2_locked) { + // both locked, do nothing + return; + } + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + if (p1 == null || p2 == null) { + return; } + double xDelta = p1.getX() - p2.getX(); + double yDelta = p1.getY() - p2.getY(); + + double deltaLength = Math.max(EPSILON, p1.distance(p2)); + + double force = deltaLength / attraction_constant; + + Preconditions.checkState( + !Double.isNaN(force), "Unexpected mathematical result in FRLayout:calcPositions [force]"); + + double dx = xDelta * force; + double dy = yDelta * force; + Point2D fvd1 = frVertexData.getUnchecked(v1); + Point2D fvd2 = frVertexData.getUnchecked(v2); + if (v2_locked) { + // double the offset for v1, as v2 will not be moving in + // the opposite direction + fvd1.setLocation(fvd1.getX() - 2 * dx, fvd1.getY() - 2 * dy); + } else { + fvd1.setLocation(fvd1.getX() - dx, fvd1.getY() - dy); + } + if (v1_locked) { + // double the offset for v2, as v1 will not be moving in + // the opposite direction + fvd2.setLocation(fvd2.getX() + 2 * dx, fvd2.getY() + 2 * dy); + } else { + fvd2.setLocation(fvd2.getX() + dx, fvd2.getY() + dy); + } + } - /** - * @return true - */ - public boolean isIncremental() { - return true; + protected void calcRepulsion(V v1) { + Point2D fvd1 = frVertexData.getUnchecked(v1); + if (fvd1 == null) { + return; } + fvd1.setLocation(0, 0); + boolean v1_locked = isLocked(v1); - /** - * @return true once the current iteration has passed the maximum count. - */ - public boolean done() { - if (currentIteration > maxIterations || temperature < 1.0/max_dimension) { - if (!checked) - { - checked = true; - } - return true; - } - return false; + try { + for (V v2 : getGraph().getVertices()) { + + boolean v2_locked = isLocked(v2); + if (v1_locked && v2_locked) { + continue; + } + if (v1 != v2) { + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + if (p1 == null || p2 == null) { + continue; + } + double xDelta = p1.getX() - p2.getX(); + double yDelta = p1.getY() - p2.getY(); + + double deltaLength = Math.max(EPSILON, p1.distanceSq(p2)); + + double force = (repulsion_constant * repulsion_constant); // / deltaLength; + + double forceOverDeltaLength = force / deltaLength; + + Preconditions.checkState( + !Double.isNaN(force), + "Unexpected mathematical result in FRLayout:calcPositions [repulsion]"); + + if (v2_locked) { + // double the offset for v1, as v2 will not be moving in + // the opposite direction + fvd1.setLocation( + fvd1.getX() + 2 * xDelta * forceOverDeltaLength, + fvd1.getY() + 2 * yDelta * forceOverDeltaLength); + } else { + fvd1.setLocation( + fvd1.getX() + xDelta * forceOverDeltaLength, + fvd1.getY() + yDelta * forceOverDeltaLength); + } + } + } + } catch (ConcurrentModificationException cme) { + calcRepulsion(v1); + } + } + + private void cool() { + temperature *= (1.0 - currentIteration / (double) maxIterations); + } + + public void setMaxIterations(int maxIterations) { + this.maxIterations = maxIterations; + } + + /** @return true */ + public boolean isIncremental() { + return true; + } + + /** @return true once the current iteration has passed the maximum count. */ + public boolean done() { + if (currentIteration > maxIterations || temperature < 1.0 / max_dimension) { + if (!checked) { + checked = true; + } + return true; } + return false; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java index cfce5eea..9836acce 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/GraphElementAccessor.java @@ -1,10 +1,10 @@ /* * Copyright (c) 2005, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * + * * * Created on Apr 12, 2005 */ @@ -15,35 +15,35 @@ /** * Interface for coordinate-based selection of graph components. + * * @author Tom Nelson * @author Joshua O'Madadhain */ -public interface GraphElementAccessor -{ - /** - * Returns the vertex, if any, associated with (x, y). - * - * @param layout the layout instance that records the positions for all vertices - * @param x the x coordinate of the pick point - * @param y the y coordinate of the pick point - * @return the vertex associated with (x, y) - */ - V getVertex(Layout layout, double x, double y); - - /** - * @param layout the layout instance that records the positions for all vertices - * @param rectangle the region in which the returned vertices are located - * @return the vertices whose locations given by {@code layout} - * are contained within {@code rectangle} - */ - Collection getVertices(Layout layout, Shape rectangle); +public interface GraphElementAccessor { + /** + * Returns the vertex, if any, associated with (x, y). + * + * @param layout the layout instance that records the positions for all vertices + * @param x the x coordinate of the pick point + * @param y the y coordinate of the pick point + * @return the vertex associated with (x, y) + */ + V getVertex(Layout layout, double x, double y); + + /** + * @param layout the layout instance that records the positions for all vertices + * @param rectangle the region in which the returned vertices are located + * @return the vertices whose locations given by {@code layout} are contained within {@code + * rectangle} + */ + Collection getVertices(Layout layout, Shape rectangle); - /** - * @param layout the context in which the location is defined - * @param x the x coordinate of the location - * @param y the y coordinate of the location - * @return an edge which is associated with the location {@code (x,y)} - * as given by {@code layout}, generally by reference to the edge's endpoints - */ - E getEdge(Layout layout, double x, double y); -} \ No newline at end of file + /** + * @param layout the context in which the location is defined + * @param x the x coordinate of the location + * @param y the y coordinate of the location + * @return an edge which is associated with the location {@code (x,y)} as given by {@code layout}, + * generally by reference to the edge's endpoints + */ + E getEdge(Layout layout, double x, double y); +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java index 22ad2b8b..cb7e60d1 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/ISOMLayout.java @@ -1,225 +1,223 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.geom.Point2D; -import java.util.ArrayList; -import java.util.Collection; -import java.util.ConcurrentModificationException; -import java.util.List; - import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; +import java.awt.geom.Point2D; +import java.util.ArrayList; +import java.util.Collection; +import java.util.ConcurrentModificationException; +import java.util.List; /** - * Implements a self-organizing map layout algorithm, based on Meyer's - * self-organizing graph methods. + * Implements a self-organizing map layout algorithm, based on Meyer's self-organizing graph + * methods. * * @author Yan Biao Boey */ -public class ISOMLayout extends AbstractLayout implements IterativeContext { - - protected LoadingCache isomVertexData = - CacheBuilder.newBuilder().build(new CacheLoader() { - public ISOMVertexData load(V vertex) { - return new ISOMVertexData(); - } - }); - - private int maxEpoch; - private int epoch; - - private int radiusConstantTime; - private int radius; - private int minRadius; - - private double adaption; - private double initialAdaption; - private double minAdaption; - - protected GraphElementAccessor elementAccessor = - new RadiusGraphElementAccessor(); - - private double coolingFactor; - - private List queue = new ArrayList(); - private String status = null; - - /** - * @return the current number of epochs and execution status, as a string. - */ - public String getStatus() { - return status; - } - - public ISOMLayout(Graph g) { - super(g); - } - - public void initialize() { - - setInitializer(new RandomLocationTransformer(getSize())); - maxEpoch = 2000; - epoch = 1; - - radiusConstantTime = 100; - radius = 5; - minRadius = 1; - - initialAdaption = 90.0D / 100.0D; - adaption = initialAdaption; - minAdaption = 0; - - //factor = 0; //Will be set later on - coolingFactor = 2; - - //temperature = 0.03; - //initialJumpRadius = 100; - //jumpRadius = initialJumpRadius; - - //delay = 100; - } - - - /** - * Advances the current positions of the graph elements. - */ - public void step() { - status = "epoch: " + epoch + "; "; - if (epoch < maxEpoch) { - adjust(); - updateParameters(); - status += " status: running"; - } else { - status += "adaption: " + adaption + "; "; - status += "status: done"; -// done = true; - } - } - - private synchronized void adjust() { - //Generate random position in graph space - Point2D tempXYD = new Point2D.Double(); - - // creates a new XY data location - tempXYD.setLocation(10 + Math.random() * getSize().getWidth(), - 10 + Math.random() * getSize().getHeight()); - - //Get closest vertex to random position - V winner = elementAccessor.getVertex(this, tempXYD.getX(), tempXYD.getY()); - - while(true) { - try { - for(V v : getGraph().getVertices()) { - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = false; - } - break; - } catch(ConcurrentModificationException cme) {} +public class ISOMLayout extends AbstractLayout implements IterativeContext { + + protected LoadingCache isomVertexData = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public ISOMVertexData load(V vertex) { + return new ISOMVertexData(); + } + }); + + private int maxEpoch; + private int epoch; + + private int radiusConstantTime; + private int radius; + private int minRadius; + + private double adaption; + private double initialAdaption; + private double minAdaption; + + protected GraphElementAccessor elementAccessor = new RadiusGraphElementAccessor(); + + private double coolingFactor; + + private List queue = new ArrayList(); + private String status = null; + + /** @return the current number of epochs and execution status, as a string. */ + public String getStatus() { + return status; + } + + public ISOMLayout(Graph g) { + super(g); + } + + public void initialize() { + + setInitializer(new RandomLocationTransformer(getSize())); + maxEpoch = 2000; + epoch = 1; + + radiusConstantTime = 100; + radius = 5; + minRadius = 1; + + initialAdaption = 90.0D / 100.0D; + adaption = initialAdaption; + minAdaption = 0; + + //factor = 0; //Will be set later on + coolingFactor = 2; + + //temperature = 0.03; + //initialJumpRadius = 100; + //jumpRadius = initialJumpRadius; + + //delay = 100; + } + + /** Advances the current positions of the graph elements. */ + public void step() { + status = "epoch: " + epoch + "; "; + if (epoch < maxEpoch) { + adjust(); + updateParameters(); + status += " status: running"; + } else { + status += "adaption: " + adaption + "; "; + status += "status: done"; + // done = true; + } + } + + private synchronized void adjust() { + //Generate random position in graph space + Point2D tempXYD = new Point2D.Double(); + + // creates a new XY data location + tempXYD.setLocation( + 10 + Math.random() * getSize().getWidth(), 10 + Math.random() * getSize().getHeight()); + + //Get closest vertex to random position + V winner = elementAccessor.getVertex(this, tempXYD.getX(), tempXYD.getY()); + + while (true) { + try { + for (V v : getGraph().getVertices()) { + ISOMVertexData ivd = getISOMVertexData(v); + ivd.distance = 0; + ivd.visited = false; + } + break; + } catch (ConcurrentModificationException cme) { + } + } + adjustVertex(winner, tempXYD); + } + + private synchronized void updateParameters() { + epoch++; + double factor = Math.exp(-1 * coolingFactor * (1.0 * epoch / maxEpoch)); + adaption = Math.max(minAdaption, factor * initialAdaption); + //jumpRadius = (int) factor * jumpRadius; + //temperature = factor * temperature; + if ((radius > minRadius) && (epoch % radiusConstantTime == 0)) { + radius--; + } + } + + private synchronized void adjustVertex(V v, Point2D tempXYD) { + queue.clear(); + ISOMVertexData ivd = getISOMVertexData(v); + ivd.distance = 0; + ivd.visited = true; + queue.add(v); + V current; + + while (!queue.isEmpty()) { + current = queue.remove(0); + ISOMVertexData currData = getISOMVertexData(current); + Point2D currXYData = apply(current); + + double dx = tempXYD.getX() - currXYData.getX(); + double dy = tempXYD.getY() - currXYData.getY(); + double factor = adaption / Math.pow(2, currData.distance); + + currXYData.setLocation(currXYData.getX() + (factor * dx), currXYData.getY() + (factor * dy)); + + if (currData.distance < radius) { + Collection s = getGraph().getNeighbors(current); + while (true) { + try { + for (V child : s) { + ISOMVertexData childData = getISOMVertexData(child); + if (childData != null && !childData.visited) { + childData.visited = true; + childData.distance = currData.distance + 1; + queue.add(child); + } + } + break; + } catch (ConcurrentModificationException cme) { + } } - adjustVertex(winner, tempXYD); - } - - private synchronized void updateParameters() { - epoch++; - double factor = Math.exp(-1 * coolingFactor * (1.0 * epoch / maxEpoch)); - adaption = Math.max(minAdaption, factor * initialAdaption); - //jumpRadius = (int) factor * jumpRadius; - //temperature = factor * temperature; - if ((radius > minRadius) && (epoch % radiusConstantTime == 0)) { - radius--; - } - } - - private synchronized void adjustVertex(V v, Point2D tempXYD) { - queue.clear(); - ISOMVertexData ivd = getISOMVertexData(v); - ivd.distance = 0; - ivd.visited = true; - queue.add(v); - V current; - - while (!queue.isEmpty()) { - current = queue.remove(0); - ISOMVertexData currData = getISOMVertexData(current); - Point2D currXYData = apply(current); - - double dx = tempXYD.getX() - currXYData.getX(); - double dy = tempXYD.getY() - currXYData.getY(); - double factor = adaption / Math.pow(2, currData.distance); - - currXYData.setLocation(currXYData.getX()+(factor*dx), currXYData.getY()+(factor*dy)); - - if (currData.distance < radius) { - Collection s = getGraph().getNeighbors(current); - while(true) { - try { - for(V child : s) { - ISOMVertexData childData = getISOMVertexData(child); - if (childData != null && !childData.visited) { - childData.visited = true; - childData.distance = currData.distance + 1; - queue.add(child); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - } - } - } - - protected ISOMVertexData getISOMVertexData(V v) { - return isomVertexData.getUnchecked(v); - } - - /** - * This one is an incremental visualization. - * @return true is the layout algorithm is incremental, false otherwise - */ - public boolean isIncremental() { - return true; - } - - /** - * Returns true if the vertex positions are no longer being - * updated. Currently ISOMLayout stops updating vertex - * positions after a certain number of iterations have taken place. - * @return true if the vertex position updates have stopped, - * false otherwise - */ - public boolean done() { - return epoch >= maxEpoch; - } - - protected static class ISOMVertexData { - int distance; - boolean visited; - - protected ISOMVertexData() { - distance = 0; - visited = false; - } - } - - /** - * Resets the layout iteration count to 0, which allows the layout algorithm to - * continue updating vertex positions. - */ - public void reset() { - epoch = 0; - } -} \ No newline at end of file + } + } + } + + protected ISOMVertexData getISOMVertexData(V v) { + return isomVertexData.getUnchecked(v); + } + + /** + * This one is an incremental visualization. + * + * @return true is the layout algorithm is incremental, false otherwise + */ + public boolean isIncremental() { + return true; + } + + /** + * Returns true if the vertex positions are no longer being updated. Currently + * ISOMLayout stops updating vertex positions after a certain number of iterations have + * taken place. + * + * @return true if the vertex position updates have stopped, false + * otherwise + */ + public boolean done() { + return epoch >= maxEpoch; + } + + protected static class ISOMVertexData { + int distance; + boolean visited; + + protected ISOMVertexData() { + distance = 0; + visited = false; + } + } + + /** + * Resets the layout iteration count to 0, which allows the layout algorithm to continue updating + * vertex positions. + */ + public void reset() { + epoch = 0; + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java index dcac0f76..f6845f60 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/KKLayout.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -13,405 +13,386 @@ * https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ -import java.awt.Dimension; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; import edu.uci.ics.jung.algorithms.shortestpath.Distance; import edu.uci.ics.jung.algorithms.shortestpath.DistanceStatistics; import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; +import java.awt.Dimension; +import java.awt.geom.Point2D; +import java.util.ConcurrentModificationException; /** - * Implements the Kamada-Kawai algorithm for node layout. - * Does not respect filter calls, and sometimes crashes when the view changes to it. - * - * @see "Tomihisa Kamada and Satoru Kawai: An algorithm for drawing general indirect graphs. Information Processing Letters 31(1):7-15, 1989" - * @see "Tomihisa Kamada: On visualization of abstract objects and relations. Ph.D. dissertation, Dept. of Information Science, Univ. of Tokyo, Dec. 1988." + * Implements the Kamada-Kawai algorithm for node layout. Does not respect filter calls, and + * sometimes crashes when the view changes to it. * + * @see "Tomihisa Kamada and Satoru Kawai: An algorithm for drawing general indirect graphs. + * Information Processing Letters 31(1):7-15, 1989" + * @see "Tomihisa Kamada: On visualization of abstract objects and relations. Ph.D. dissertation, + * Dept. of Information Science, Univ. of Tokyo, Dec. 1988." * @author Masanori Harada */ -public class KKLayout extends AbstractLayout implements IterativeContext { - - private double EPSILON = 0.1d; - - private int currentIteration; - private int maxIterations = 2000; - private String status = "KKLayout"; - - private double L; // the ideal length of an edge - private double K = 1; // arbitrary const number - private double[][] dm; // distance matrix - - private boolean adjustForGravity = true; - private boolean exchangeVertices = true; - - private V[] vertices; - private Point2D[] xydata; - - /** - * Retrieves graph distances between vertices of the visible graph - */ - protected Distance distance; - - /** - * The diameter of the visible graph. In other words, the maximum over all pairs - * of vertices of the length of the shortest path between a and bf the visible graph. - */ - protected double diameter; - - /** - * A multiplicative factor which partly specifies the "preferred" length of an edge (L). - */ - private double length_factor = 0.9; - - /** - * A multiplicative factor which specifies the fraction of the graph's diameter to be - * used as the inter-vertex distance between disconnected vertices. - */ - private double disconnected_multiplier = 0.5; - - public KKLayout(Graph g) - { - this(g, new UnweightedShortestPath(g)); - } - - /** - * Creates an instance for the specified graph and distance metric. - * @param g the graph on which the layout algorithm is to operate - * @param distance specifies the distance between pairs of vertices - */ - public KKLayout(Graph g, Distance distance){ - super(g); - this.distance = distance; +public class KKLayout extends AbstractLayout implements IterativeContext { + + private double EPSILON = 0.1d; + + private int currentIteration; + private int maxIterations = 2000; + private String status = "KKLayout"; + + private double L; // the ideal length of an edge + private double K = 1; // arbitrary const number + private double[][] dm; // distance matrix + + private boolean adjustForGravity = true; + private boolean exchangeVertices = true; + + private V[] vertices; + private Point2D[] xydata; + + /** Retrieves graph distances between vertices of the visible graph */ + protected Distance distance; + + /** + * The diameter of the visible graph. In other words, the maximum over all pairs of vertices of + * the length of the shortest path between a and bf the visible graph. + */ + protected double diameter; + + /** A multiplicative factor which partly specifies the "preferred" length of an edge (L). */ + private double length_factor = 0.9; + + /** + * A multiplicative factor which specifies the fraction of the graph's diameter to be used as the + * inter-vertex distance between disconnected vertices. + */ + private double disconnected_multiplier = 0.5; + + public KKLayout(Graph g) { + this(g, new UnweightedShortestPath(g)); + } + + /** + * Creates an instance for the specified graph and distance metric. + * + * @param g the graph on which the layout algorithm is to operate + * @param distance specifies the distance between pairs of vertices + */ + public KKLayout(Graph g, Distance distance) { + super(g); + this.distance = distance; + } + + /** + * @param length_factor a multiplicative factor which partially specifies the preferred length of + * an edge + */ + public void setLengthFactor(double length_factor) { + this.length_factor = length_factor; + } + + /** + * @param disconnected_multiplier a multiplicative factor that specifies the fraction of the + * graph's diameter to be used as the inter-vertex distance between disconnected vertices + */ + public void setDisconnectedDistanceMultiplier(double disconnected_multiplier) { + this.disconnected_multiplier = disconnected_multiplier; + } + + /** @return a string with information about the current status of the algorithm. */ + public String getStatus() { + return status + this.getSize(); + } + + public void setMaxIterations(int maxIterations) { + this.maxIterations = maxIterations; + } + + /** @return true */ + public boolean isIncremental() { + return true; + } + + /** @return true if the current iteration has passed the maximum count. */ + public boolean done() { + if (currentIteration > maxIterations) { + return true; } - - /** - * @param length_factor a multiplicative factor which partially specifies - * the preferred length of an edge - */ - public void setLengthFactor(double length_factor){ - this.length_factor = length_factor; + return false; + } + + @SuppressWarnings("unchecked") + public void initialize() { + currentIteration = 0; + + if (graph != null && size != null) { + + double height = size.getHeight(); + double width = size.getWidth(); + + int n = graph.getVertexCount(); + dm = new double[n][n]; + vertices = (V[]) graph.getVertices().toArray(); + xydata = new Point2D[n]; + + // assign IDs to all visible vertices + while (true) { + try { + int index = 0; + for (V v : graph.getVertices()) { + Point2D xyd = apply(v); + vertices[index] = v; + xydata[index] = xyd; + index++; + } + break; + } catch (ConcurrentModificationException cme) { + } + } + + diameter = DistanceStatistics.diameter(graph, distance, true); + + double L0 = Math.min(height, width); + L = (L0 / diameter) * length_factor; // length_factor used to be hardcoded to 0.9 + //L = 0.75 * Math.sqrt(height * width / n); + + for (int i = 0; i < n - 1; i++) { + for (int j = i + 1; j < n; j++) { + Number d_ij = distance.getDistance(vertices[i], vertices[j]); + Number d_ji = distance.getDistance(vertices[j], vertices[i]); + double dist = diameter * disconnected_multiplier; + if (d_ij != null) dist = Math.min(d_ij.doubleValue(), dist); + if (d_ji != null) dist = Math.min(d_ji.doubleValue(), dist); + dm[i][j] = dm[j][i] = dist; + } + } + } + } + + public void step() { + try { + currentIteration++; + double energy = calcEnergy(); + status = + "Kamada-Kawai V=" + + getGraph().getVertexCount() + + "(" + + getGraph().getVertexCount() + + ")" + + " IT: " + + currentIteration + + " E=" + + energy; + + int n = getGraph().getVertexCount(); + if (n == 0) { + return; + } + + double maxDeltaM = 0; + int pm = -1; // the node having max deltaM + for (int i = 0; i < n; i++) { + if (isLocked(vertices[i])) { + continue; + } + double deltam = calcDeltaM(i); + + if (maxDeltaM < deltam) { + maxDeltaM = deltam; + pm = i; + } + } + if (pm == -1) { + return; + } + + for (int i = 0; i < 100; i++) { + double[] dxy = calcDeltaXY(pm); + xydata[pm].setLocation(xydata[pm].getX() + dxy[0], xydata[pm].getY() + dxy[1]); + + double deltam = calcDeltaM(pm); + if (deltam < EPSILON) { + break; + } + } + + if (adjustForGravity) adjustForGravity(); + + if (exchangeVertices && maxDeltaM < EPSILON) { + energy = calcEnergy(); + for (int i = 0; i < n - 1; i++) { + if (isLocked(vertices[i])) { + continue; + } + for (int j = i + 1; j < n; j++) { + if (isLocked(vertices[j])) { + continue; + } + double xenergy = calcEnergyIfExchanged(i, j); + if (energy > xenergy) { + double sx = xydata[i].getX(); + double sy = xydata[i].getY(); + xydata[i].setLocation(xydata[j]); + xydata[j].setLocation(sx, sy); + return; + } + } + } + } + } finally { + // fireStateChanged(); + } + } + + /** Shift all vertices so that the center of gravity is located at the center of the screen. */ + public void adjustForGravity() { + Dimension d = getSize(); + double height = d.getHeight(); + double width = d.getWidth(); + double gx = 0; + double gy = 0; + for (int i = 0; i < xydata.length; i++) { + gx += xydata[i].getX(); + gy += xydata[i].getY(); + } + gx /= xydata.length; + gy /= xydata.length; + double diffx = width / 2 - gx; + double diffy = height / 2 - gy; + for (int i = 0; i < xydata.length; i++) { + xydata[i].setLocation(xydata[i].getX() + diffx, xydata[i].getY() + diffy); + } + } + + @Override + public void setSize(Dimension size) { + if (initialized == false) setInitializer(new RandomLocationTransformer(size)); + super.setSize(size); + } + + public void setAdjustForGravity(boolean on) { + adjustForGravity = on; + } + + public boolean getAdjustForGravity() { + return adjustForGravity; + } + + /** + * Enable or disable the local minimum escape technique by exchanging vertices. + * + * @param on iff the local minimum escape technique is to be enabled + */ + public void setExchangeVertices(boolean on) { + exchangeVertices = on; + } + + public boolean getExchangeVertices() { + return exchangeVertices; + } + + /** Determines a step to new position of the vertex m. */ + private double[] calcDeltaXY(int m) { + double dE_dxm = 0; + double dE_dym = 0; + double d2E_d2xm = 0; + double d2E_dxmdym = 0; + double d2E_dymdxm = 0; + double d2E_d2ym = 0; + + for (int i = 0; i < vertices.length; i++) { + if (i != m) { + + double dist = dm[m][i]; + double l_mi = L * dist; + double k_mi = K / (dist * dist); + double dx = xydata[m].getX() - xydata[i].getX(); + double dy = xydata[m].getY() - xydata[i].getY(); + double d = Math.sqrt(dx * dx + dy * dy); + double ddd = d * d * d; + + dE_dxm += k_mi * (1 - l_mi / d) * dx; + dE_dym += k_mi * (1 - l_mi / d) * dy; + d2E_d2xm += k_mi * (1 - l_mi * dy * dy / ddd); + d2E_dxmdym += k_mi * l_mi * dx * dy / ddd; + d2E_d2ym += k_mi * (1 - l_mi * dx * dx / ddd); + } + } + // d2E_dymdxm equals to d2E_dxmdym. + d2E_dymdxm = d2E_dxmdym; + + double denomi = d2E_d2xm * d2E_d2ym - d2E_dxmdym * d2E_dymdxm; + double deltaX = (d2E_dxmdym * dE_dym - d2E_d2ym * dE_dxm) / denomi; + double deltaY = (d2E_dymdxm * dE_dxm - d2E_d2xm * dE_dym) / denomi; + return new double[] {deltaX, deltaY}; + } + + /** Calculates the gradient of energy function at the vertex m. */ + private double calcDeltaM(int m) { + double dEdxm = 0; + double dEdym = 0; + for (int i = 0; i < vertices.length; i++) { + if (i != m) { + double dist = dm[m][i]; + double l_mi = L * dist; + double k_mi = K / (dist * dist); + + double dx = xydata[m].getX() - xydata[i].getX(); + double dy = xydata[m].getY() - xydata[i].getY(); + double d = Math.sqrt(dx * dx + dy * dy); + + double common = k_mi * (1 - l_mi / d); + dEdxm += common * dx; + dEdym += common * dy; + } } - - /** - * @param disconnected_multiplier a multiplicative factor that specifies the fraction of the - * graph's diameter to be used as the inter-vertex distance between disconnected vertices - */ - public void setDisconnectedDistanceMultiplier(double disconnected_multiplier){ - this.disconnected_multiplier = disconnected_multiplier; + return Math.sqrt(dEdxm * dEdxm + dEdym * dEdym); + } + + /** Calculates the energy function E. */ + private double calcEnergy() { + double energy = 0; + for (int i = 0; i < vertices.length - 1; i++) { + for (int j = i + 1; j < vertices.length; j++) { + double dist = dm[i][j]; + double l_ij = L * dist; + double k_ij = K / (dist * dist); + double dx = xydata[i].getX() - xydata[j].getX(); + double dy = xydata[i].getY() - xydata[j].getY(); + double d = Math.sqrt(dx * dx + dy * dy); + + energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - 2 * l_ij * d); + } } - - /** - * @return a string with information about the current status of the algorithm. - */ - public String getStatus() { - return status + this.getSize(); - } - - public void setMaxIterations(int maxIterations) { - this.maxIterations = maxIterations; + return energy; + } + + /** Calculates the energy function E as if positions of the specified vertices are exchanged. */ + private double calcEnergyIfExchanged(int p, int q) { + if (p >= q) throw new RuntimeException("p should be < q"); + double energy = 0; // < 0 + for (int i = 0; i < vertices.length - 1; i++) { + for (int j = i + 1; j < vertices.length; j++) { + int ii = i; + int jj = j; + if (i == p) ii = q; + if (j == q) jj = p; + + double dist = dm[i][j]; + double l_ij = L * dist; + double k_ij = K / (dist * dist); + double dx = xydata[ii].getX() - xydata[jj].getX(); + double dy = xydata[ii].getY() - xydata[jj].getY(); + double d = Math.sqrt(dx * dx + dy * dy); + + energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - 2 * l_ij * d); + } } + return energy; + } - /** - * @return true - */ - public boolean isIncremental() { - return true; - } - - /** - * @return true if the current iteration has passed the maximum count. - */ - public boolean done() { - if (currentIteration > maxIterations) { - return true; - } - return false; - } - - @SuppressWarnings("unchecked") - public void initialize() { - currentIteration = 0; - - if(graph != null && size != null) { - - double height = size.getHeight(); - double width = size.getWidth(); - - int n = graph.getVertexCount(); - dm = new double[n][n]; - vertices = (V[])graph.getVertices().toArray(); - xydata = new Point2D[n]; - - // assign IDs to all visible vertices - while(true) { - try { - int index = 0; - for(V v : graph.getVertices()) { - Point2D xyd = apply(v); - vertices[index] = v; - xydata[index] = xyd; - index++; - } - break; - } catch(ConcurrentModificationException cme) {} - } - - diameter = DistanceStatistics.diameter(graph, distance, true); - - double L0 = Math.min(height, width); - L = (L0 / diameter) * length_factor; // length_factor used to be hardcoded to 0.9 - //L = 0.75 * Math.sqrt(height * width / n); - - for (int i = 0; i < n - 1; i++) { - for (int j = i + 1; j < n; j++) { - Number d_ij = distance.getDistance(vertices[i], vertices[j]); - Number d_ji = distance.getDistance(vertices[j], vertices[i]); - double dist = diameter * disconnected_multiplier; - if (d_ij != null) - dist = Math.min(d_ij.doubleValue(), dist); - if (d_ji != null) - dist = Math.min(d_ji.doubleValue(), dist); - dm[i][j] = dm[j][i] = dist; - } - } - } - } - - public void step() { - try { - currentIteration++; - double energy = calcEnergy(); - status = "Kamada-Kawai V=" + getGraph().getVertexCount() - + "(" + getGraph().getVertexCount() + ")" - + " IT: " + currentIteration - + " E=" + energy - ; - - int n = getGraph().getVertexCount(); - if (n == 0) - return; - - double maxDeltaM = 0; - int pm = -1; // the node having max deltaM - for (int i = 0; i < n; i++) { - if (isLocked(vertices[i])) - continue; - double deltam = calcDeltaM(i); - - if (maxDeltaM < deltam) { - maxDeltaM = deltam; - pm = i; - } - } - if (pm == -1) - return; - - for (int i = 0; i < 100; i++) { - double[] dxy = calcDeltaXY(pm); - xydata[pm].setLocation(xydata[pm].getX()+dxy[0], xydata[pm].getY()+dxy[1]); - - double deltam = calcDeltaM(pm); - if (deltam < EPSILON) - break; - } - - if (adjustForGravity) - adjustForGravity(); - - if (exchangeVertices && maxDeltaM < EPSILON) { - energy = calcEnergy(); - for (int i = 0; i < n - 1; i++) { - if (isLocked(vertices[i])) - continue; - for (int j = i + 1; j < n; j++) { - if (isLocked(vertices[j])) - continue; - double xenergy = calcEnergyIfExchanged(i, j); - if (energy > xenergy) { - double sx = xydata[i].getX(); - double sy = xydata[i].getY(); - xydata[i].setLocation(xydata[j]); - xydata[j].setLocation(sx, sy); - return; - } - } - } - } - } - finally { -// fireStateChanged(); - } - } - - /** - * Shift all vertices so that the center of gravity is located at - * the center of the screen. - */ - public void adjustForGravity() { - Dimension d = getSize(); - double height = d.getHeight(); - double width = d.getWidth(); - double gx = 0; - double gy = 0; - for (int i = 0; i < xydata.length; i++) { - gx += xydata[i].getX(); - gy += xydata[i].getY(); - } - gx /= xydata.length; - gy /= xydata.length; - double diffx = width / 2 - gx; - double diffy = height / 2 - gy; - for (int i = 0; i < xydata.length; i++) { - xydata[i].setLocation(xydata[i].getX()+diffx, xydata[i].getY()+diffy); - } - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - public void setAdjustForGravity(boolean on) { - adjustForGravity = on; - } - - public boolean getAdjustForGravity() { - return adjustForGravity; - } - - /** - * Enable or disable the local minimum escape technique by - * exchanging vertices. - * @param on iff the local minimum escape technique is to be enabled - */ - public void setExchangeVertices(boolean on) { - exchangeVertices = on; - } - - public boolean getExchangeVertices() { - return exchangeVertices; - } - - /** - * Determines a step to new position of the vertex m. - */ - private double[] calcDeltaXY(int m) { - double dE_dxm = 0; - double dE_dym = 0; - double d2E_d2xm = 0; - double d2E_dxmdym = 0; - double d2E_dymdxm = 0; - double d2E_d2ym = 0; - - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - double ddd = d * d * d; - - dE_dxm += k_mi * (1 - l_mi / d) * dx; - dE_dym += k_mi * (1 - l_mi / d) * dy; - d2E_d2xm += k_mi * (1 - l_mi * dy * dy / ddd); - d2E_dxmdym += k_mi * l_mi * dx * dy / ddd; - d2E_d2ym += k_mi * (1 - l_mi * dx * dx / ddd); - } - } - // d2E_dymdxm equals to d2E_dxmdym. - d2E_dymdxm = d2E_dxmdym; - - double denomi = d2E_d2xm * d2E_d2ym - d2E_dxmdym * d2E_dymdxm; - double deltaX = (d2E_dxmdym * dE_dym - d2E_d2ym * dE_dxm) / denomi; - double deltaY = (d2E_dymdxm * dE_dxm - d2E_d2xm * dE_dym) / denomi; - return new double[]{deltaX, deltaY}; - } - - /** - * Calculates the gradient of energy function at the vertex m. - */ - private double calcDeltaM(int m) { - double dEdxm = 0; - double dEdym = 0; - for (int i = 0; i < vertices.length; i++) { - if (i != m) { - double dist = dm[m][i]; - double l_mi = L * dist; - double k_mi = K / (dist * dist); - - double dx = xydata[m].getX() - xydata[i].getX(); - double dy = xydata[m].getY() - xydata[i].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - double common = k_mi * (1 - l_mi / d); - dEdxm += common * dx; - dEdym += common * dy; - } - } - return Math.sqrt(dEdxm * dEdxm + dEdym * dEdym); - } - - /** - * Calculates the energy function E. - */ - private double calcEnergy() { - double energy = 0; - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[i].getX() - xydata[j].getX(); - double dy = xydata[i].getY() - xydata[j].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - /** - * Calculates the energy function E as if positions of the - * specified vertices are exchanged. - */ - private double calcEnergyIfExchanged(int p, int q) { - if (p >= q) - throw new RuntimeException("p should be < q"); - double energy = 0; // < 0 - for (int i = 0; i < vertices.length - 1; i++) { - for (int j = i + 1; j < vertices.length; j++) { - int ii = i; - int jj = j; - if (i == p) ii = q; - if (j == q) jj = p; - - double dist = dm[i][j]; - double l_ij = L * dist; - double k_ij = K / (dist * dist); - double dx = xydata[ii].getX() - xydata[jj].getX(); - double dy = xydata[ii].getY() - xydata[jj].getY(); - double d = Math.sqrt(dx * dx + dy * dy); - - energy += k_ij / 2 * (dx * dx + dy * dy + l_ij * l_ij - - 2 * l_ij * d); - } - } - return energy; - } - - public void reset() { - currentIteration = 0; - } + public void reset() { + currentIteration = 0; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java index 36ed57e3..1a82adf7 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/Layout.java @@ -1,87 +1,74 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; - import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Graph; +import java.awt.Dimension; +import java.awt.geom.Point2D; /** - * A generalized interface is a mechanism for returning (x,y) coordinates - * from vertices. In general, most of these methods are used to both control and - * get information from the layout algorithm. + * A generalized interface is a mechanism for returning (x,y) coordinates from vertices. In general, + * most of these methods are used to both control and get information from the layout algorithm. + * *

+ * * @author danyelf * @author tom nelson */ -public interface Layout extends Function { - - /** - * Initializes fields in the node that may not have - * been set during the constructor. Must be called before - * the iterations begin. - */ - void initialize(); - - /** - * @param initializer a function that specifies initial locations for all vertices - */ - void setInitializer(Function initializer); - - /** - * @param graph the graph that this algorithm is to operate on - */ - void setGraph(Graph graph); +public interface Layout extends Function { + + /** + * Initializes fields in the node that may not have been set during the constructor. Must be + * called before the iterations begin. + */ + void initialize(); + + /** @param initializer a function that specifies initial locations for all vertices */ + void setInitializer(Function initializer); + + /** @param graph the graph that this algorithm is to operate on */ + void setGraph(Graph graph); + + /** @return the graph that this Layout refers to */ + Graph getGraph(); + + void reset(); - /** - * @return the graph that this Layout refers to - */ - Graph getGraph(); - - void reset(); - - /** - * @param d the space to use to lay out this graph - */ - void setSize(Dimension d); - - /** - * @return the current size of the visualization's space - */ - Dimension getSize(); + /** @param d the space to use to lay out this graph */ + void setSize(Dimension d); + /** @return the current size of the visualization's space */ + Dimension getSize(); - /** - * Locks or unlocks the specified vertex. Locking the vertex fixes it at its current position, - * so that it will not be affected by the layout algorithm. Unlocking it allows the layout - * algorithm to change the vertex's position. - * - * @param v the vertex to lock/unlock - * @param state {@code true} to lock the vertex, {@code false} to unlock it - */ - void lock(V v, boolean state); + /** + * Locks or unlocks the specified vertex. Locking the vertex fixes it at its current position, so + * that it will not be affected by the layout algorithm. Unlocking it allows the layout algorithm + * to change the vertex's position. + * + * @param v the vertex to lock/unlock + * @param state {@code true} to lock the vertex, {@code false} to unlock it + */ + void lock(V v, boolean state); - /** - * @param v the vertex whose locked state is being queried - * @return true if the position of vertex v is locked - */ - boolean isLocked(V v); + /** + * @param v the vertex whose locked state is being queried + * @return true if the position of vertex v is locked + */ + boolean isLocked(V v); - /** - * Changes the layout coordinates of {@code v} to {@code location}. - * @param v the vertex whose location is to be specified - * @param location the coordinates of the specified location - */ - void setLocation(V v, Point2D location); - + /** + * Changes the layout coordinates of {@code v} to {@code location}. + * + * @param v the vertex whose location is to be specified + * @param location the coordinates of the specified location + */ + void setLocation(V v, Point2D location); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java index 921ec99b..783f42d5 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/LayoutDecorator.java @@ -10,98 +10,94 @@ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; - import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; +import java.awt.Dimension; +import java.awt.geom.Point2D; /** - * a pure decorator for the Layout interface. Intended to be overridden - * to provide specific behavior decoration - * - * @author Tom Nelson + * a pure decorator for the Layout interface. Intended to be overridden to provide specific behavior + * decoration * + * @author Tom Nelson */ public abstract class LayoutDecorator implements Layout, IterativeContext { - - protected Layout delegate; - - /** - * Creates an instance backed by the specified {@code delegate}. - * @param delegate the layout to which this instance is delegating - */ - public LayoutDecorator(Layout delegate) { - this.delegate = delegate; - } - /** - * @return the backing (delegate) layout. - */ - public Layout getDelegate() { - return delegate; + protected Layout delegate; + + /** + * Creates an instance backed by the specified {@code delegate}. + * + * @param delegate the layout to which this instance is delegating + */ + public LayoutDecorator(Layout delegate) { + this.delegate = delegate; + } + + /** @return the backing (delegate) layout. */ + public Layout getDelegate() { + return delegate; + } + + public void setDelegate(Layout delegate) { + this.delegate = delegate; + } + + public void step() { + if (delegate instanceof IterativeContext) { + ((IterativeContext) delegate).step(); } + } - public void setDelegate(Layout delegate) { - this.delegate = delegate; - } + public void initialize() { + delegate.initialize(); + } - public void step() { - if(delegate instanceof IterativeContext) { - ((IterativeContext)delegate).step(); - } - } + public void setInitializer(Function initializer) { + delegate.setInitializer(initializer); + } - public void initialize() { - delegate.initialize(); - } + public void setLocation(V v, Point2D location) { + delegate.setLocation(v, location); + } - public void setInitializer(Function initializer) { - delegate.setInitializer(initializer); - } + public Dimension getSize() { + return delegate.getSize(); + } - public void setLocation(V v, Point2D location) { - delegate.setLocation(v, location); - } + public Graph getGraph() { + return delegate.getGraph(); + } - public Dimension getSize() { - return delegate.getSize(); - } + public Point2D transform(V v) { + return delegate.apply(v); + } - public Graph getGraph() { - return delegate.getGraph(); + public boolean done() { + if (delegate instanceof IterativeContext) { + return ((IterativeContext) delegate).done(); } + return true; + } - public Point2D transform(V v) { - return delegate.apply(v); - } + public void lock(V v, boolean state) { + delegate.lock(v, state); + } - public boolean done() { - if(delegate instanceof IterativeContext) { - return ((IterativeContext)delegate).done(); - } - return true; - } + public boolean isLocked(V v) { + return delegate.isLocked(v); + } - public void lock(V v, boolean state) { - delegate.lock(v, state); - } + public void setSize(Dimension d) { + delegate.setSize(d); + } - public boolean isLocked(V v) { - return delegate.isLocked(v); - } - - public void setSize(Dimension d) { - delegate.setSize(d); - } + public void reset() { + delegate.reset(); + } - public void reset() { - delegate.reset(); - } - - public void setGraph(Graph graph) { - delegate.setGraph(graph); - } + public void setGraph(Graph graph) { + delegate.setGraph(graph); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java index bd7d1b6f..a5e2ab24 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/PolarPoint.java @@ -1,106 +1,108 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.layout; import java.awt.geom.Point2D; /** - * Represents a point in polar coordinates: distance and angle from the origin. - * Includes conversions between polar and Cartesian - * coordinates (Point2D). - * + * Represents a point in polar coordinates: distance and angle from the origin. Includes conversions + * between polar and Cartesian coordinates (Point2D). + * * @author Tom Nelson - tomnelson@dev.java.net */ -public class PolarPoint -{ - double theta; - double radius; - - /** - * Creates a new instance with radius and angle each 0. - */ - public PolarPoint() { - this(0,0); - } +public class PolarPoint { + double theta; + double radius; + + /** Creates a new instance with radius and angle each 0. */ + public PolarPoint() { + this(0, 0); + } + + /** + * Creates a new instance with the specified radius and angle. + * + * @param theta the angle of the point to create + * @param radius the distance from the origin of the point to create + */ + public PolarPoint(double theta, double radius) { + this.theta = theta; + this.radius = radius; + } + + /** @return the angle for this point */ + public double getTheta() { + return theta; + } + + /** @return the radius for this point */ + public double getRadius() { + return radius; + } + + public void setTheta(double theta) { + this.theta = theta; + } + + public void setRadius(double radius) { + this.radius = radius; + } - /** - * Creates a new instance with the specified radius and angle. - * @param theta the angle of the point to create - * @param radius the distance from the origin of the point to create - */ - public PolarPoint(double theta, double radius) { - this.theta = theta; - this.radius = radius; - } - - /** - * @return the angle for this point - */ - public double getTheta() { return theta; } + /** + * @param polar the input location to convert + * @return the result of converting polar to Cartesian coordinates. + */ + public static Point2D polarToCartesian(PolarPoint polar) { + return polarToCartesian(polar.getTheta(), polar.getRadius()); + } - /** - * @return the radius for this point - */ - public double getRadius() { return radius; } - - public void setTheta(double theta) { this.theta = theta; } - - public void setRadius(double radius) { this.radius = radius; } + /** + * @param theta the angle of the input location + * @param radius the distance from the origin of the input location + * @return the result of converting (theta, radius) to Cartesian coordinates. + */ + public static Point2D polarToCartesian(double theta, double radius) { + return new Point2D.Double(radius * Math.cos(theta), radius * Math.sin(theta)); + } - /** - * @param polar the input location to convert - * @return the result of converting polar to Cartesian coordinates. - */ - public static Point2D polarToCartesian(PolarPoint polar) { - return polarToCartesian(polar.getTheta(), polar.getRadius()); - } + /** + * @param point the input location + * @return the result of converting point to polar coordinates. + */ + public static PolarPoint cartesianToPolar(Point2D point) { + return cartesianToPolar(point.getX(), point.getY()); + } - /** - * @param theta the angle of the input location - * @param radius the distance from the origin of the input location - * @return the result of converting (theta, radius) to Cartesian coordinates. - */ - public static Point2D polarToCartesian(double theta, double radius) { - return new Point2D.Double(radius*Math.cos(theta), radius*Math.sin(theta)); - } + /** + * @param x the x coordinate of the input location + * @param y the y coordinate of the input location + * @return the result of converting (x, y) to polar coordinates. + */ + public static PolarPoint cartesianToPolar(double x, double y) { + double theta = Math.atan2(y, x); + double radius = Math.sqrt(x * x + y * y); + return new PolarPoint(theta, radius); + } - /** - * @param point the input location - * @return the result of converting point to polar coordinates. - */ - public static PolarPoint cartesianToPolar(Point2D point) { - return cartesianToPolar(point.getX(), point.getY()); - } + @Override + public String toString() { + return "PolarPoint[" + radius + "," + theta + "]"; + } - /** - * @param x the x coordinate of the input location - * @param y the y coordinate of the input location - * @return the result of converting (x, y) to polar coordinates. - */ - public static PolarPoint cartesianToPolar(double x, double y) { - double theta = Math.atan2(y,x); - double radius = Math.sqrt(x*x+y*y); - return new PolarPoint(theta, radius); - } - - @Override - public String toString() { - return "PolarPoint[" + radius + "," + theta +"]"; - } - - /** - * Sets the angle and radius of this point to those of {@code p}. - * @param p the point whose location is copied into this instance - */ - public void setLocation(PolarPoint p) { - this.theta = p.getTheta(); - this.radius = p.getRadius(); - } -} \ No newline at end of file + /** + * Sets the angle and radius of this point to those of {@code p}. + * + * @param p the point whose location is copied into this instance + */ + public void setLocation(PolarPoint p) { + this.theta = p.getTheta(); + this.radius = p.getRadius(); + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java index 5be2fe26..c56ce7e9 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadialTreeLayout.java @@ -9,108 +9,101 @@ */ package edu.uci.ics.jung.algorithms.layout; + +import edu.uci.ics.jung.graph.Forest; import java.awt.Dimension; import java.awt.geom.Point2D; import java.util.HashMap; import java.util.Map; -import edu.uci.ics.jung.graph.Forest; - /** * A radial layout for Tree or Forest graphs. - * - * @author Tom Nelson - * + * + * @author Tom Nelson */ -public class RadialTreeLayout extends TreeLayout { +public class RadialTreeLayout extends TreeLayout { - protected Map polarLocations; + protected Map polarLocations; - public RadialTreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); - } + public RadialTreeLayout(Forest g) { + this(g, DEFAULT_DISTX, DEFAULT_DISTY); + } - public RadialTreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } + public RadialTreeLayout(Forest g, int distx) { + this(g, distx, DEFAULT_DISTY); + } - public RadialTreeLayout(Forest g, int distx, int disty) { - super(g, distx, disty); - } - - @Override - protected void buildTree() { - super.buildTree(); - this.polarLocations = new HashMap(); - setRadialLocations(); - } + public RadialTreeLayout(Forest g, int distx, int disty) { + super(g, distx, disty); + } - @Override - public void setSize(Dimension size) { - this.size = size; - buildTree(); - } + @Override + protected void buildTree() { + super.buildTree(); + this.polarLocations = new HashMap(); + setRadialLocations(); + } + + @Override + public void setSize(Dimension size) { + this.size = size; + buildTree(); + } + + @Override + protected void setCurrentPositionFor(V vertex) { + locations.getUnchecked(vertex).setLocation(m_currentPoint); + } + + @Override + public void setLocation(V v, Point2D location) { + Point2D c = getCenter(); + Point2D pv = new Point2D.Double(location.getX() - c.getX(), location.getY() - c.getY()); + PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); + PolarPoint currentLocation = polarLocations.get(v); + if (currentLocation == null) polarLocations.put(v, newLocation); + else currentLocation.setLocation(newLocation); + } - @Override - protected void setCurrentPositionFor(V vertex) { - locations.getUnchecked(vertex).setLocation(m_currentPoint); + /** @return a map from vertices to their locations in polar coordinates. */ + public Map getPolarLocations() { + return polarLocations; + } + + @Override + public Point2D apply(V v) { + PolarPoint pp = polarLocations.get(v); + double centerX = getSize().getWidth() / 2; + double centerY = getSize().getHeight() / 2; + Point2D cartesian = PolarPoint.polarToCartesian(pp); + cartesian.setLocation(cartesian.getX() + centerX, cartesian.getY() + centerY); + return cartesian; + } + + private Point2D getMaxXY() { + double maxx = 0; + double maxy = 0; + for (Point2D p : locations.asMap().values()) { + maxx = Math.max(maxx, p.getX()); + maxy = Math.max(maxy, p.getY()); } + return new Point2D.Double(maxx, maxy); + } - @Override - public void setLocation(V v, Point2D location) - { - Point2D c = getCenter(); - Point2D pv = new Point2D.Double(location.getX() - c.getX(), - location.getY() - c.getY()); - PolarPoint newLocation = PolarPoint.cartesianToPolar(pv); - PolarPoint currentLocation = polarLocations.get(v); - if (currentLocation == null) - polarLocations.put(v, newLocation); - else - currentLocation.setLocation(newLocation); - } - - /** - * @return a map from vertices to their locations in polar coordinates. - */ - public Map getPolarLocations() { - return polarLocations; - } - - @Override - public Point2D apply(V v) { - PolarPoint pp = polarLocations.get(v); - double centerX = getSize().getWidth()/2; - double centerY = getSize().getHeight()/2; - Point2D cartesian = PolarPoint.polarToCartesian(pp); - cartesian.setLocation(cartesian.getX()+centerX,cartesian.getY()+centerY); - return cartesian; - } - - private Point2D getMaxXY() { - double maxx = 0; - double maxy = 0; - for(Point2D p : locations.asMap().values()) { - maxx = Math.max(maxx, p.getX()); - maxy = Math.max(maxy, p.getY()); - } - return new Point2D.Double(maxx,maxy); - } - - private void setRadialLocations() { - Point2D max = getMaxXY(); - double maxx = max.getX(); - double maxy = max.getY(); - maxx = Math.max(maxx, size.width); - double theta = 2*Math.PI/maxx; - - double deltaRadius = size.width/2/maxy; - for(Map.Entry entry : locations.asMap().entrySet()) { - V v = entry.getKey(); - Point2D p = entry.getValue(); - PolarPoint polarPoint = - new PolarPoint(p.getX()*theta, (p.getY() - this.distY)*deltaRadius); - polarLocations.put(v, polarPoint); - } - } + private void setRadialLocations() { + Point2D max = getMaxXY(); + double maxx = max.getX(); + double maxy = max.getY(); + maxx = Math.max(maxx, size.width); + double theta = 2 * Math.PI / maxx; + + double deltaRadius = size.width / 2 / maxy; + for (Map.Entry entry : locations.asMap().entrySet()) { + V v = entry.getKey(); + Point2D p = entry.getValue(); + PolarPoint polarPoint = + new PolarPoint(p.getX() * theta, (p.getY() - this.distY) * deltaRadius); + polarLocations.put(v, polarPoint); + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java index 4438a9b4..03da1887 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/RadiusGraphElementAccessor.java @@ -1,15 +1,16 @@ /* * Copyright (c) 2005, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * + * * * Created on Apr 12, 2005 */ package edu.uci.ics.jung.algorithms.layout; +import edu.uci.ics.jung.graph.Graph; import java.awt.Shape; import java.awt.geom.Point2D; import java.util.Collection; @@ -18,175 +19,165 @@ import java.util.Iterator; import java.util.Set; -import edu.uci.ics.jung.graph.Graph; - - /** - * Simple implementation of PickSupport that returns the vertex or edge - * that is closest to the specified location. This implementation - * provides the same picking options that were available in + * Simple implementation of PickSupport that returns the vertex or edge that is closest to the + * specified location. This implementation provides the same picking options that were available in * previous versions of AbstractLayout. - * - *

No element will be returned that is farther away than the specified - * maximum distance. - * + * + *

No element will be returned that is farther away than the specified maximum distance. + * * @author Tom Nelson * @author Joshua O'Madadhain */ public class RadiusGraphElementAccessor implements GraphElementAccessor { - - protected double maxDistance; - - /** - * Creates an instance with an effectively infinite default maximum distance. - */ - public RadiusGraphElementAccessor() { - this(Math.sqrt(Double.MAX_VALUE - 1000)); - } - - /** - * Creates an instance with the specified default maximum distance. - * @param maxDistance the maximum distance at which any element can be from a specified location - * and still be returned - */ - public RadiusGraphElementAccessor(double maxDistance) { - this.maxDistance = maxDistance; + + protected double maxDistance; + + /** Creates an instance with an effectively infinite default maximum distance. */ + public RadiusGraphElementAccessor() { + this(Math.sqrt(Double.MAX_VALUE - 1000)); + } + + /** + * Creates an instance with the specified default maximum distance. + * + * @param maxDistance the maximum distance at which any element can be from a specified location + * and still be returned + */ + public RadiusGraphElementAccessor(double maxDistance) { + this.maxDistance = maxDistance; + } + + /** + * Gets the vertex nearest to the location of the (x,y) location selected, within a distance of + * maxDistance. Iterates through all visible vertices and checks their distance from the + * click. Override this method to provide a more efficient implementation. + * + * @param layout the context in which the location is defined + * @param x the x coordinate of the location + * @param y the y coordinate of the location + * @return a vertex which is associated with the location {@code (x,y)} as given by {@code layout} + */ + public V getVertex(Layout layout, double x, double y) { + return getVertex(layout, x, y, this.maxDistance); + } + + /** + * Gets the vertex nearest to the location of the (x,y) location selected, within a distance of + * {@code maxDistance}. Iterates through all visible vertices and checks their distance from the + * location. Override this method to provide a more efficient implementation. + * + * @param layout the context in which the location is defined + * @param x the x coordinate of the location + * @param y the y coordinate of the location + * @param maxDistance the maximum distance at which any element can be from a specified location + * and still be returned + * @return a vertex which is associated with the location {@code (x,y)} as given by {@code layout} + */ + public V getVertex(Layout layout, double x, double y, double maxDistance) { + double minDistance = maxDistance * maxDistance; + V closest = null; + while (true) { + try { + for (V v : layout.getGraph().getVertices()) { + + Point2D p = layout.apply(v); + double dx = p.getX() - x; + double dy = p.getY() - y; + double dist = dx * dx + dy * dy; + if (dist < minDistance) { + minDistance = dist; + closest = v; + } + } + break; + } catch (ConcurrentModificationException cme) { + } } - - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of maxDistance. Iterates through all - * visible vertices and checks their distance from the click. Override this - * method to provide a more efficient implementation. - * - * @param layout the context in which the location is defined - * @param x the x coordinate of the location - * @param y the y coordinate of the location - * @return a vertex which is associated with the location {@code (x,y)} - * as given by {@code layout} - */ - public V getVertex(Layout layout, double x, double y) { - return getVertex(layout, x, y, this.maxDistance); - } + return closest; + } + + public Collection getVertices(Layout layout, Shape rectangle) { + Set pickedVertices = new HashSet(); + while (true) { + try { + for (V v : layout.getGraph().getVertices()) { - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * within a distance of {@code maxDistance}. Iterates through all - * visible vertices and checks their distance from the location. Override this - * method to provide a more efficient implementation. - * - * @param layout the context in which the location is defined - * @param x the x coordinate of the location - * @param y the y coordinate of the location - * @param maxDistance the maximum distance at which any element can be from a specified location - * and still be returned - * @return a vertex which is associated with the location {@code (x,y)} - * as given by {@code layout} - */ - public V getVertex(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - V closest = null; - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { + Point2D p = layout.apply(v); + if (rectangle.contains(p)) { + pickedVertices.add(v); + } + } + break; + } catch (ConcurrentModificationException cme) { + } + } + return pickedVertices; + } - Point2D p = layout.apply(v); - double dx = p.getX() - x; - double dy = p.getY() - y; - double dist = dx * dx + dy * dy; - if (dist < minDistance) { - minDistance = dist; - closest = v; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } - - public Collection getVertices(Layout layout, Shape rectangle) { - Set pickedVertices = new HashSet(); - while(true) { - try { - for(V v : layout.getGraph().getVertices()) { + public E getEdge(Layout layout, double x, double y) { + return getEdge(layout, x, y, this.maxDistance); + } - Point2D p = layout.apply(v); - if(rectangle.contains(p)) { - pickedVertices.add(v); - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return pickedVertices; - } - - public E getEdge(Layout layout, double x, double y) { - return getEdge(layout, x, y, this.maxDistance); - } + /** + * Gets the vertex nearest to the location of the (x,y) location selected, whose endpoints are + * < {@code maxDistance}. Iterates through all visible vertices and checks their distance from + * the location. Override this method to provide a more efficient implementation. + * + * @param layout the context in which the location is defined + * @param x the x coordinate of the location + * @param y the y coordinate of the location + * @param maxDistance the maximum distance at which any element can be from a specified location + * and still be returned + * @return an edge which is associated with the location {@code (x,y)} as given by {@code layout} + */ + public E getEdge(Layout layout, double x, double y, double maxDistance) { + double minDistance = maxDistance * maxDistance; + E closest = null; + while (true) { + try { + for (E e : layout.getGraph().getEdges()) { - /** - * Gets the vertex nearest to the location of the (x,y) location selected, - * whose endpoints are < {@code maxDistance}. Iterates through all - * visible vertices and checks their distance from the location. Override this - * method to provide a more efficient implementation. - * - * @param layout the context in which the location is defined - * @param x the x coordinate of the location - * @param y the y coordinate of the location - * @param maxDistance the maximum distance at which any element can be from a specified location - * and still be returned - * @return an edge which is associated with the location {@code (x,y)} - * as given by {@code layout} - */ - public E getEdge(Layout layout, double x, double y, double maxDistance) { - double minDistance = maxDistance * maxDistance; - E closest = null; - while(true) { - try { - for(E e : layout.getGraph().getEdges()) { + // Could replace all this set stuff with getFrom_internal() etc. + Graph graph = layout.getGraph(); + Collection vertices = graph.getIncidentVertices(e); + Iterator vertexIterator = vertices.iterator(); + V v1 = vertexIterator.next(); + V v2 = vertexIterator.next(); + // Get coords + Point2D p1 = layout.apply(v1); + Point2D p2 = layout.apply(v2); + double x1 = p1.getX(); + double y1 = p1.getY(); + double x2 = p2.getX(); + double y2 = p2.getY(); + // Calculate location on line closest to (x,y) + // First, check that v1 and v2 are not coincident. + if (x1 == x2 && y1 == y2) { + continue; + } + double b = + ((y - y1) * (y2 - y1) + (x - x1) * (x2 - x1)) + / ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)); + // + double distance2; // square of the distance + if (b <= 0) distance2 = (x - x1) * (x - x1) + (y - y1) * (y - y1); + else if (b >= 1) distance2 = (x - x2) * (x - x2) + (y - y2) * (y - y2); + else { + double x3 = x1 + b * (x2 - x1); + double y3 = y1 + b * (y2 - y1); + distance2 = (x - x3) * (x - x3) + (y - y3) * (y - y3); + } - // Could replace all this set stuff with getFrom_internal() etc. - Graph graph = layout.getGraph(); - Collection vertices = graph.getIncidentVertices(e); - Iterator vertexIterator = vertices.iterator(); - V v1 = vertexIterator.next(); - V v2 = vertexIterator.next(); - // Get coords - Point2D p1 = layout.apply(v1); - Point2D p2 = layout.apply(v2); - double x1 = p1.getX(); - double y1 = p1.getY(); - double x2 = p2.getX(); - double y2 = p2.getY(); - // Calculate location on line closest to (x,y) - // First, check that v1 and v2 are not coincident. - if (x1 == x2 && y1 == y2) - continue; - double b = - ((y - y1) * (y2 - y1) + (x - x1) * (x2 - x1)) - / ((x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1)); - // - double distance2; // square of the distance - if (b <= 0) - distance2 = (x - x1) * (x - x1) + (y - y1) * (y - y1); - else if (b >= 1) - distance2 = (x - x2) * (x - x2) + (y - y2) * (y - y2); - else { - double x3 = x1 + b * (x2 - x1); - double y3 = y1 + b * (y2 - y1); - distance2 = (x - x3) * (x - x3) + (y - y3) * (y - y3); - } - - if (distance2 < minDistance) { - minDistance = distance2; - closest = e; - } - } - break; - } catch(ConcurrentModificationException cme) {} - } - return closest; - } + if (distance2 < minDistance) { + minDistance = distance2; + closest = e; + } + } + break; + } catch (ConcurrentModificationException cme) { + } + } + return closest; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java index 865bc578..9b7a9901 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout.java @@ -7,324 +7,317 @@ */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.event.ComponentAdapter; -import java.awt.event.ComponentEvent; -import java.awt.geom.Point2D; -import java.util.ConcurrentModificationException; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.algorithms.layout.util.RandomLocationTransformer; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.Pair; +import java.awt.Dimension; +import java.awt.event.ComponentAdapter; +import java.awt.event.ComponentEvent; +import java.awt.geom.Point2D; +import java.util.ConcurrentModificationException; /** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. + * The SpringLayout package represents a visualization of a set of nodes. The SpringLayout, which is + * initialized with a Graph, assigns X/Y locations to each node. When called relax(), + * the SpringLayout moves the visualization forward one step. * * @author Danyel Fisher * @author Joshua O'Madadhain */ -public class SpringLayout extends AbstractLayout implements IterativeContext { - - protected double stretch = 0.70; - protected Function lengthFunction; - protected int repulsion_range_sq = 100 * 100; - protected double force_multiplier = 1.0 / 3.0; - - protected LoadingCache springVertexData = - CacheBuilder.newBuilder().build(new CacheLoader() { - public SpringVertexData load(V vertex) { - return new SpringVertexData(); - } - }); -// protected Map springVertexData = -// new MapMaker().makeComputingMap(new Function(){ -// public SpringVertexData apply(V arg0) { -// return new SpringVertexData(); -// }}); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - * @param g the graph on which the layout algorithm is to operate - */ - @SuppressWarnings("unchecked") - public SpringLayout(Graph g) { - this(g, (Function)Functions.constant(30)); - } - - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the graph on which the layout algorithm is to operate - * @param length_function provides a length for each edge - */ - public SpringLayout(Graph g, Function length_function) - { - super(g); - this.lengthFunction = length_function; - } - - /** - * @return the current value for the stretch parameter - */ - public double getStretch() { - return stretch; - } - - @Override - public void setSize(Dimension size) { - if(initialized == false) - setInitializer(new RandomLocationTransformer(size)); - super.setSize(size); - } - - /** - *

Sets the stretch parameter for this instance. This value - * specifies how much the degrees of an edge's incident vertices - * should influence how easily the endpoints of that edge - * can move (that is, that edge's tendency to change its length). - * - *

The default value is 0.70. Positive values less than 1 cause - * high-degree vertices to move less than low-degree vertices, and - * values > 1 cause high-degree vertices to move more than - * low-degree vertices. Negative values will have unpredictable - * and inconsistent results. - * @param stretch the stretch parameter - */ - public void setStretch(double stretch) { - this.stretch = stretch; - } - - public int getRepulsionRange() { - return (int)(Math.sqrt(repulsion_range_sq)); - } - - /** - * Sets the node repulsion range (in drawing area units) for this instance. - * Outside this range, nodes do not repel each other. The default value - * is 100. Negative values are treated as their positive equivalents. - * @param range the maximum repulsion range - */ - public void setRepulsionRange(int range) { - this.repulsion_range_sq = range * range; - } - - public double getForceMultiplier() { - return force_multiplier; - } - - /** - * Sets the force multiplier for this instance. This value is used to - * specify how strongly an edge "wants" to be its default length - * (higher values indicate a greater attraction for the default length), - * which affects how much its endpoints move at each timestep. - * The default value is 1/3. A value of 0 turns off any attempt by the - * layout to cause edges to conform to the default length. Negative - * values cause long edges to get longer and short edges to get shorter; use - * at your own risk. - * @param force an energy field created by all living things that binds the galaxy together - */ - public void setForceMultiplier(double force) { - this.force_multiplier = force; - } - - public void initialize() { - } - - /** - * Relaxation step. Moves all nodes a smidge. - */ - public void step() { - try { - for(V v : getGraph().getVertices()) { - SpringVertexData svd = springVertexData.getUnchecked(v); - if (svd == null) { - continue; - } - svd.dx /= 4; - svd.dy /= 4; - svd.edgedx = svd.edgedy = 0; - svd.repulsiondx = svd.repulsiondy = 0; - } - } catch(ConcurrentModificationException cme) { - step(); - } - - relaxEdges(); - calculateRepulsion(); - moveNodes(); - } - - protected void relaxEdges() { - try { - for(E e : getGraph().getEdges()) { - Pair endpoints = getGraph().getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - - Point2D p1 = apply(v1); - Point2D p2 = apply(v2); - if(p1 == null || p2 == null) continue; - double vx = p1.getX() - p2.getX(); - double vy = p1.getY() - p2.getY(); - double len = Math.sqrt(vx * vx + vy * vy); - - double desiredLen = lengthFunction.apply(e); - - // round from zero, if needed [zero would be Bad.]. - len = (len == 0) ? .0001 : len; - - double f = force_multiplier * (desiredLen - len) / len; - - f = f * Math.pow(stretch, (getGraph().degree(v1) + getGraph().degree(v2) - 2)); - - // the actual movement distance 'dx' is the force multiplied by the - // distance to go. - double dx = f * vx; - double dy = f * vy; - SpringVertexData v1D, v2D; - v1D = springVertexData.getUnchecked(v1); - v2D = springVertexData.getUnchecked(v2); - - v1D.edgedx += dx; - v1D.edgedy += dy; - v2D.edgedx += -dx; - v2D.edgedy += -dy; - } - } catch(ConcurrentModificationException cme) { - relaxEdges(); - } - } - - protected void calculateRepulsion() { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - - SpringVertexData svd = springVertexData.getUnchecked(v); - if(svd == null) continue; - double dx = 0, dy = 0; - - for (V v2 : getGraph().getVertices()) { - if (v == v2) continue; - Point2D p = apply(v); - Point2D p2 = apply(v2); - if(p == null || p2 == null) continue; - double vx = p.getX() - p2.getX(); - double vy = p.getY() - p2.getY(); - double distanceSq = p.distanceSq(p2); - if (distanceSq == 0) { - dx += Math.random(); - dy += Math.random(); - } else if (distanceSq < repulsion_range_sq) { - double factor = 1; - dx += factor * vx / distanceSq; - dy += factor * vy / distanceSq; +public class SpringLayout extends AbstractLayout implements IterativeContext { + + protected double stretch = 0.70; + protected Function lengthFunction; + protected int repulsion_range_sq = 100 * 100; + protected double force_multiplier = 1.0 / 3.0; + + protected LoadingCache springVertexData = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public SpringVertexData load(V vertex) { + return new SpringVertexData(); } - } - double dlen = dx * dx + dy * dy; - if (dlen > 0) { - dlen = Math.sqrt(dlen) / 2; - svd.repulsiondx += dx / dlen; - svd.repulsiondy += dy / dlen; - } - } - } catch(ConcurrentModificationException cme) { - calculateRepulsion(); + }); + // protected Map springVertexData = + // new MapMaker().makeComputingMap(new Function(){ + // public SpringVertexData apply(V arg0) { + // return new SpringVertexData(); + // }}); + + /** + * Constructor for a SpringLayout for a raw graph with associated dimension--the input knows how + * big the graph is. Defaults to the unit length function. + * + * @param g the graph on which the layout algorithm is to operate + */ + @SuppressWarnings("unchecked") + public SpringLayout(Graph g) { + this(g, (Function) Functions.constant(30)); + } + + /** + * Constructor for a SpringLayout for a raw graph with associated component. + * + * @param g the graph on which the layout algorithm is to operate + * @param length_function provides a length for each edge + */ + public SpringLayout(Graph g, Function length_function) { + super(g); + this.lengthFunction = length_function; + } + + /** @return the current value for the stretch parameter */ + public double getStretch() { + return stretch; + } + + @Override + public void setSize(Dimension size) { + if (initialized == false) setInitializer(new RandomLocationTransformer(size)); + super.setSize(size); + } + + /** + * Sets the stretch parameter for this instance. This value specifies how much the degrees of an + * edge's incident vertices should influence how easily the endpoints of that edge can move (that + * is, that edge's tendency to change its length). + * + *

The default value is 0.70. Positive values less than 1 cause high-degree vertices to move + * less than low-degree vertices, and values > 1 cause high-degree vertices to move more than + * low-degree vertices. Negative values will have unpredictable and inconsistent results. + * + * @param stretch the stretch parameter + */ + public void setStretch(double stretch) { + this.stretch = stretch; + } + + public int getRepulsionRange() { + return (int) (Math.sqrt(repulsion_range_sq)); + } + + /** + * Sets the node repulsion range (in drawing area units) for this instance. Outside this range, + * nodes do not repel each other. The default value is 100. Negative values are treated as their + * positive equivalents. + * + * @param range the maximum repulsion range + */ + public void setRepulsionRange(int range) { + this.repulsion_range_sq = range * range; + } + + public double getForceMultiplier() { + return force_multiplier; + } + + /** + * Sets the force multiplier for this instance. This value is used to specify how strongly an edge + * "wants" to be its default length (higher values indicate a greater attraction for the default + * length), which affects how much its endpoints move at each timestep. The default value is 1/3. + * A value of 0 turns off any attempt by the layout to cause edges to conform to the default + * length. Negative values cause long edges to get longer and short edges to get shorter; use at + * your own risk. + * + * @param force an energy field created by all living things that binds the galaxy together + */ + public void setForceMultiplier(double force) { + this.force_multiplier = force; + } + + public void initialize() {} + + /** Relaxation step. Moves all nodes a smidge. */ + public void step() { + try { + for (V v : getGraph().getVertices()) { + SpringVertexData svd = springVertexData.getUnchecked(v); + if (svd == null) { + continue; } + svd.dx /= 4; + svd.dy /= 4; + svd.edgedx = svd.edgedy = 0; + svd.repulsiondx = svd.repulsiondy = 0; + } + } catch (ConcurrentModificationException cme) { + step(); } - protected void moveNodes() - { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.getUnchecked(v); - if(vd == null) continue; - Point2D xyd = apply(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY()); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(), 0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } + relaxEdges(); + calculateRepulsion(); + moveNodes(); + } + + protected void relaxEdges() { + try { + for (E e : getGraph().getEdges()) { + Pair endpoints = getGraph().getEndpoints(e); + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + + Point2D p1 = apply(v1); + Point2D p2 = apply(v2); + if (p1 == null || p2 == null) { + continue; } + double vx = p1.getX() - p2.getX(); + double vy = p1.getY() - p2.getY(); + double len = Math.sqrt(vx * vx + vy * vy); + + double desiredLen = lengthFunction.apply(e); + + // round from zero, if needed [zero would be Bad.]. + len = (len == 0) ? .0001 : len; + + double f = force_multiplier * (desiredLen - len) / len; + + f = f * Math.pow(stretch, (getGraph().degree(v1) + getGraph().degree(v2) - 2)); + + // the actual movement distance 'dx' is the force multiplied by the + // distance to go. + double dx = f * vx; + double dy = f * vy; + SpringVertexData v1D, v2D; + v1D = springVertexData.getUnchecked(v1); + v2D = springVertexData.getUnchecked(v2); + + v1D.edgedx += dx; + v1D.edgedy += dy; + v2D.edgedx += -dx; + v2D.edgedy += -dy; + } + } catch (ConcurrentModificationException cme) { + relaxEdges(); } + } - protected static class SpringVertexData { - protected double edgedx; - protected double edgedy; - protected double repulsiondx; - protected double repulsiondy; - - /** movement speed, x */ - protected double dx; + protected void calculateRepulsion() { + try { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } - /** movement speed, y */ - protected double dy; + SpringVertexData svd = springVertexData.getUnchecked(v); + if (svd == null) { + continue; + } + double dx = 0, dy = 0; + + for (V v2 : getGraph().getVertices()) { + if (v == v2) { + continue; + } + Point2D p = apply(v); + Point2D p2 = apply(v2); + if (p == null || p2 == null) { + continue; + } + double vx = p.getX() - p2.getX(); + double vy = p.getY() - p2.getY(); + double distanceSq = p.distanceSq(p2); + if (distanceSq == 0) { + dx += Math.random(); + dy += Math.random(); + } else if (distanceSq < repulsion_range_sq) { + double factor = 1; + dx += factor * vx / distanceSq; + dy += factor * vy / distanceSq; + } + } + double dlen = dx * dx + dy * dy; + if (dlen > 0) { + dlen = Math.sqrt(dlen) / 2; + svd.repulsiondx += dx / dlen; + svd.repulsiondy += dy / dlen; + } + } + } catch (ConcurrentModificationException cme) { + calculateRepulsion(); } + } - - /** - * Used for changing the size of the layout in response to a component's size. - */ - public class SpringDimensionChecker extends ComponentAdapter { - @Override - public void componentResized(ComponentEvent e) { - setSize(e.getComponent().getSize()); + protected void moveNodes() { + synchronized (getSize()) { + try { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } + SpringVertexData vd = springVertexData.getUnchecked(v); + if (vd == null) { + continue; + } + Point2D xyd = apply(v); + + vd.dx += vd.repulsiondx + vd.edgedx; + vd.dy += vd.repulsiondy + vd.edgedy; + + // keeps nodes from moving any faster than 5 per time unit + xyd.setLocation( + xyd.getX() + Math.max(-5, Math.min(5, vd.dx)), + xyd.getY() + Math.max(-5, Math.min(5, vd.dy))); + + Dimension d = getSize(); + int width = d.width; + int height = d.height; + + if (xyd.getX() < 0) { + xyd.setLocation(0, xyd.getY()); + } else if (xyd.getX() > width) { + xyd.setLocation(width, xyd.getY()); + } + if (xyd.getY() < 0) { + xyd.setLocation(xyd.getX(), 0); + } else if (xyd.getY() > height) { + xyd.setLocation(xyd.getX(), height); + } } + } catch (ConcurrentModificationException cme) { + moveNodes(); + } } - - /** - * @return true - */ - public boolean isIncremental() { - return true; + } + + protected static class SpringVertexData { + protected double edgedx; + protected double edgedy; + protected double repulsiondx; + protected double repulsiondy; + + /** movement speed, x */ + protected double dx; + + /** movement speed, y */ + protected double dy; + } + + /** Used for changing the size of the layout in response to a component's size. */ + public class SpringDimensionChecker extends ComponentAdapter { + @Override + public void componentResized(ComponentEvent e) { + setSize(e.getComponent().getSize()); } + } - /** - * @return false - */ - public boolean done() { - return false; - } + /** @return true */ + public boolean isIncremental() { + return true; + } + + /** @return false */ + public boolean done() { + return false; + } - /** - * No effect. - */ - public void reset() { - } -} \ No newline at end of file + /** No effect. */ + public void reset() {} +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java index be7dbcd3..d0a90f10 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/SpringLayout2.java @@ -1,139 +1,132 @@ /* * Copyright (c) 2003, The JUNG Authors * All rights reserved. - * + * * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ package edu.uci.ics.jung.algorithms.layout; +import com.google.common.base.Function; +import edu.uci.ics.jung.graph.Graph; import java.awt.Dimension; import java.awt.geom.Point2D; import java.util.ConcurrentModificationException; -import com.google.common.base.Function; - -import edu.uci.ics.jung.graph.Graph; - /** - * The SpringLayout package represents a visualization of a set of nodes. The - * SpringLayout, which is initialized with a Graph, assigns X/Y locations to - * each node. When called relax(), the SpringLayout moves the - * visualization forward one step. - * - * - * + * The SpringLayout package represents a visualization of a set of nodes. The SpringLayout, which is + * initialized with a Graph, assigns X/Y locations to each node. When called relax(), + * the SpringLayout moves the visualization forward one step. + * * @author Danyel Fisher * @author Joshua O'Madadhain */ -public class SpringLayout2 extends SpringLayout -{ - protected int currentIteration; - protected int averageCounter; - protected int loopCountMax = 4; - protected boolean done; - - protected Point2D averageDelta = new Point2D.Double(); - - /** - * Constructor for a SpringLayout for a raw graph with associated - * dimension--the input knows how big the graph is. Defaults to the unit - * length function. - * @param g the graph on which the layout algorithm is to operate - */ - public SpringLayout2(Graph g) { - super(g); - } +public class SpringLayout2 extends SpringLayout { + protected int currentIteration; + protected int averageCounter; + protected int loopCountMax = 4; + protected boolean done; - /** - * Constructor for a SpringLayout for a raw graph with associated component. - * - * @param g the {@code Graph} to lay out - * @param length_function provides a length for each edge - */ - public SpringLayout2(Graph g, Function length_function) - { - super(g, length_function); - } + protected Point2D averageDelta = new Point2D.Double(); + + /** + * Constructor for a SpringLayout for a raw graph with associated dimension--the input knows how + * big the graph is. Defaults to the unit length function. + * + * @param g the graph on which the layout algorithm is to operate + */ + public SpringLayout2(Graph g) { + super(g); + } + + /** + * Constructor for a SpringLayout for a raw graph with associated component. + * + * @param g the {@code Graph} to lay out + * @param length_function provides a length for each edge + */ + public SpringLayout2(Graph g, Function length_function) { + super(g, length_function); + } - /** - * Relaxation step. Moves all nodes a smidge. - */ - @Override - public void step() { - super.step(); - currentIteration++; - testAverageDeltas(); + /** Relaxation step. Moves all nodes a smidge. */ + @Override + public void step() { + super.step(); + currentIteration++; + testAverageDeltas(); + } + + private void testAverageDeltas() { + double dx = this.averageDelta.getX(); + double dy = this.averageDelta.getY(); + if (Math.abs(dx) < .001 && Math.abs(dy) < .001) { + done = true; + System.err.println("done, dx=" + dx + ", dy=" + dy); } - - private void testAverageDeltas() { - double dx = this.averageDelta.getX(); - double dy = this.averageDelta.getY(); - if(Math.abs(dx) < .001 && Math.abs(dy) < .001) { - done = true; - System.err.println("done, dx="+dx+", dy="+dy); - } - if(currentIteration > loopCountMax) { - this.averageDelta.setLocation(0,0); - averageCounter = 0; - currentIteration = 0; - } + if (currentIteration > loopCountMax) { + this.averageDelta.setLocation(0, 0); + averageCounter = 0; + currentIteration = 0; } + } - @Override - protected void moveNodes() { - synchronized (getSize()) { - try { - for (V v : getGraph().getVertices()) { - if (isLocked(v)) continue; - SpringVertexData vd = springVertexData.getUnchecked(v); - if(vd == null) continue; - Point2D xyd = apply(v); - - vd.dx += vd.repulsiondx + vd.edgedx; - vd.dy += vd.repulsiondy + vd.edgedy; - -// int currentCount = currentIteration % this.loopCountMax; -// System.err.println(averageCounter+" --- vd.dx="+vd.dx+", vd.dy="+vd.dy); -// System.err.println("averageDelta was "+averageDelta); + @Override + protected void moveNodes() { + synchronized (getSize()) { + try { + for (V v : getGraph().getVertices()) { + if (isLocked(v)) { + continue; + } + SpringVertexData vd = springVertexData.getUnchecked(v); + if (vd == null) { + continue; + } + Point2D xyd = apply(v); - averageDelta.setLocation( - ((averageDelta.getX() * averageCounter) + vd.dx) / (averageCounter+1), - ((averageDelta.getY() * averageCounter) + vd.dy) / (averageCounter+1) - ); -// System.err.println("averageDelta now "+averageDelta); -// System.err.println(); - averageCounter++; - - // keeps nodes from moving any faster than 5 per time unit - xyd.setLocation(xyd.getX()+Math.max(-5, Math.min(5, vd.dx)), - xyd.getY()+Math.max(-5, Math.min(5, vd.dy))); - - Dimension d = getSize(); - int width = d.width; - int height = d.height; - - if (xyd.getX() < 0) { - xyd.setLocation(0, xyd.getY());// setX(0); - } else if (xyd.getX() > width) { - xyd.setLocation(width, xyd.getY()); //setX(width); - } - if (xyd.getY() < 0) { - xyd.setLocation(xyd.getX(),0);//setY(0); - } else if (xyd.getY() > height) { - xyd.setLocation(xyd.getX(), height); //setY(height); - } - - } - } catch(ConcurrentModificationException cme) { - moveNodes(); - } - } - } + vd.dx += vd.repulsiondx + vd.edgedx; + vd.dy += vd.repulsiondy + vd.edgedy; + + // int currentCount = currentIteration % this.loopCountMax; + // System.err.println(averageCounter+" --- vd.dx="+vd.dx+", vd.dy="+vd.dy); + // System.err.println("averageDelta was "+averageDelta); - @Override - public boolean done() { - return done; + averageDelta.setLocation( + ((averageDelta.getX() * averageCounter) + vd.dx) / (averageCounter + 1), + ((averageDelta.getY() * averageCounter) + vd.dy) / (averageCounter + 1)); + // System.err.println("averageDelta now "+averageDelta); + // System.err.println(); + averageCounter++; + + // keeps nodes from moving any faster than 5 per time unit + xyd.setLocation( + xyd.getX() + Math.max(-5, Math.min(5, vd.dx)), + xyd.getY() + Math.max(-5, Math.min(5, vd.dy))); + + Dimension d = getSize(); + int width = d.width; + int height = d.height; + + if (xyd.getX() < 0) { + xyd.setLocation(0, xyd.getY()); // setX(0); + } else if (xyd.getX() > width) { + xyd.setLocation(width, xyd.getY()); //setX(width); + } + if (xyd.getY() < 0) { + xyd.setLocation(xyd.getX(), 0); //setY(0); + } else if (xyd.getY() > height) { + xyd.setLocation(xyd.getX(), height); //setY(height); + } + } + } catch (ConcurrentModificationException cme) { + moveNodes(); + } } + } -} \ No newline at end of file + @Override + public boolean done() { + return done; + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java index a9958331..c625b0d9 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/StaticLayout.java @@ -1,7 +1,7 @@ /* * Created on Jul 21, 2005 * - * Copyright (c) 2005, The JUNG Authors + * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * @@ -11,39 +11,37 @@ */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.geom.Point2D; - import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Graph; +import java.awt.Dimension; +import java.awt.geom.Point2D; /** - * StaticLayout places the vertices in the locations specified by its initializer, - * and has no other behavior. - * Vertex locations can be placed in a {@code Map} and then supplied to - * this layout as follows: {@code Function vertexLocations = Functions.forMap(map);} + * StaticLayout places the vertices in the locations specified by its initializer, and has no other + * behavior. Vertex locations can be placed in a {@code Map} and then supplied to this + * layout as follows: {@code Function vertexLocations = Functions.forMap(map);} + * * @author Tom Nelson - tomnelson@dev.java.net */ -public class StaticLayout extends AbstractLayout { - - public StaticLayout(Graph graph, Function initializer, Dimension size) { - super(graph, initializer, size); - } - - public StaticLayout(Graph graph, Function initializer) { - super(graph, initializer); - } - - public StaticLayout(Graph graph) { - super(graph); - } - - public StaticLayout(Graph graph, Dimension size) { - super(graph, size); - } - - public void initialize() {} - - public void reset() {} +public class StaticLayout extends AbstractLayout { + + public StaticLayout(Graph graph, Function initializer, Dimension size) { + super(graph, initializer, size); + } + + public StaticLayout(Graph graph, Function initializer) { + super(graph, initializer); + } + + public StaticLayout(Graph graph) { + super(graph); + } + + public StaticLayout(Graph graph, Dimension size) { + super(graph, size); + } + + public void initialize() {} + + public void reset() {} } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java index 62f3d3c3..6e368105 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/TreeLayout.java @@ -9,244 +9,228 @@ */ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.awt.Point; -import java.awt.geom.Point2D; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; import com.google.common.base.Function; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; - import edu.uci.ics.jung.graph.Forest; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.TreeUtils; +import java.awt.Dimension; +import java.awt.Point; +import java.awt.geom.Point2D; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** * @author Karlheinz Toni * @author Tom Nelson - converted to jung2 */ -public class TreeLayout implements Layout { - - protected Dimension size = new Dimension(600,600); - protected Forest graph; - protected Map basePositions = new HashMap(); - - protected LoadingCache locations = - CacheBuilder.newBuilder().build(new CacheLoader() { - public Point2D load(V vertex) { - return new Point2D.Double(); - } - }); - - protected transient Set alreadyDone = new HashSet(); - - /** - * The default horizontal vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTX = 50; - - /** - * The default vertical vertex spacing. Initialized to 50. - */ - public static int DEFAULT_DISTY = 50; - - /** - * The horizontal vertex spacing. Defaults to {@code DEFAULT_XDIST}. - */ - protected int distX = 50; - - /** - * The vertical vertex spacing. Defaults to {@code DEFAULT_YDIST}. - */ - protected int distY = 50; - - protected transient Point m_currentPoint = new Point(); - - /** - * Creates an instance for the specified graph with default X and Y distances. - * @param g the graph on which the layout algorithm is to operate - */ - public TreeLayout(Forest g) { - this(g, DEFAULT_DISTX, DEFAULT_DISTY); +public class TreeLayout implements Layout { + + protected Dimension size = new Dimension(600, 600); + protected Forest graph; + protected Map basePositions = new HashMap(); + + protected LoadingCache locations = + CacheBuilder.newBuilder() + .build( + new CacheLoader() { + public Point2D load(V vertex) { + return new Point2D.Double(); + } + }); + + protected transient Set alreadyDone = new HashSet(); + + /** The default horizontal vertex spacing. Initialized to 50. */ + public static int DEFAULT_DISTX = 50; + + /** The default vertical vertex spacing. Initialized to 50. */ + public static int DEFAULT_DISTY = 50; + + /** The horizontal vertex spacing. Defaults to {@code DEFAULT_XDIST}. */ + protected int distX = 50; + + /** The vertical vertex spacing. Defaults to {@code DEFAULT_YDIST}. */ + protected int distY = 50; + + protected transient Point m_currentPoint = new Point(); + + /** + * Creates an instance for the specified graph with default X and Y distances. + * + * @param g the graph on which the layout algorithm is to operate + */ + public TreeLayout(Forest g) { + this(g, DEFAULT_DISTX, DEFAULT_DISTY); + } + + /** + * Creates an instance for the specified graph and X distance with default Y distance. + * + * @param g the graph on which the layout algorithm is to operate + * @param distx the horizontal spacing between adjacent siblings + */ + public TreeLayout(Forest g, int distx) { + this(g, distx, DEFAULT_DISTY); + } + + /** + * Creates an instance for the specified graph, X distance, and Y distance. + * + * @param g the graph on which the layout algorithm is to operate + * @param distx the horizontal spacing between adjacent siblings + * @param disty the vertical spacing between adjacent siblings + */ + public TreeLayout(Forest g, int distx, int disty) { + if (g == null) throw new IllegalArgumentException("Graph must be non-null"); + if (distx < 1 || disty < 1) + throw new IllegalArgumentException("X and Y distances must each be positive"); + this.graph = g; + this.distX = distx; + this.distY = disty; + buildTree(); + } + + protected void buildTree() { + this.m_currentPoint = new Point(0, 20); + Collection roots = TreeUtils.getRoots(graph); + if (roots.size() > 0 && graph != null) { + calculateDimensionX(roots); + for (V v : roots) { + calculateDimensionX(v); + m_currentPoint.x += this.basePositions.get(v) / 2 + this.distX; + buildTree(v, this.m_currentPoint.x); + } } + } - /** - * Creates an instance for the specified graph and X distance with - * default Y distance. - * @param g the graph on which the layout algorithm is to operate - * @param distx the horizontal spacing between adjacent siblings - */ - public TreeLayout(Forest g, int distx) { - this(g, distx, DEFAULT_DISTY); - } + protected void buildTree(V v, int x) { - /** - * Creates an instance for the specified graph, X distance, and Y distance. - * @param g the graph on which the layout algorithm is to operate - * @param distx the horizontal spacing between adjacent siblings - * @param disty the vertical spacing between adjacent siblings - */ - public TreeLayout(Forest g, int distx, int disty) { - if (g == null) - throw new IllegalArgumentException("Graph must be non-null"); - if (distx < 1 || disty < 1) - throw new IllegalArgumentException("X and Y distances must each be positive"); - this.graph = g; - this.distX = distx; - this.distY = disty; - buildTree(); - } - - protected void buildTree() { - this.m_currentPoint = new Point(0, 20); - Collection roots = TreeUtils.getRoots(graph); - if (roots.size() > 0 && graph != null) { - calculateDimensionX(roots); - for(V v : roots) { - calculateDimensionX(v); - m_currentPoint.x += this.basePositions.get(v)/2 + this.distX; - buildTree(v, this.m_currentPoint.x); - } - } + if (alreadyDone.add(v)) { + //go one level further down + this.m_currentPoint.y += this.distY; + this.m_currentPoint.x = x; + + this.setCurrentPositionFor(v); + + int sizeXofCurrent = basePositions.get(v); + + int lastX = x - sizeXofCurrent / 2; + + int sizeXofChild; + int startXofChild; + + for (V element : graph.getSuccessors(v)) { + sizeXofChild = this.basePositions.get(element); + startXofChild = lastX + sizeXofChild / 2; + buildTree(element, startXofChild); + lastX = lastX + sizeXofChild + distX; + } + this.m_currentPoint.y -= this.distY; } + } - protected void buildTree(V v, int x) { + private int calculateDimensionX(V v) { - if (alreadyDone.add(v)) { - //go one level further down - this.m_currentPoint.y += this.distY; - this.m_currentPoint.x = x; + int size = 0; + int childrenNum = graph.getSuccessors(v).size(); - this.setCurrentPositionFor(v); + if (childrenNum != 0) { + for (V element : graph.getSuccessors(v)) { + size += calculateDimensionX(element) + distX; + } + } + size = Math.max(0, size - distX); + basePositions.put(v, size); - int sizeXofCurrent = basePositions.get(v); + return size; + } - int lastX = x - sizeXofCurrent / 2; + private int calculateDimensionX(Collection roots) { - int sizeXofChild; - int startXofChild; + int size = 0; + for (V v : roots) { + int childrenNum = graph.getSuccessors(v).size(); - for (V element : graph.getSuccessors(v)) { - sizeXofChild = this.basePositions.get(element); - startXofChild = lastX + sizeXofChild / 2; - buildTree(element, startXofChild); - lastX = lastX + sizeXofChild + distX; - } - this.m_currentPoint.y -= this.distY; + if (childrenNum != 0) { + for (V element : graph.getSuccessors(v)) { + size += calculateDimensionX(element) + distX; } + } + size = Math.max(0, size - distX); + basePositions.put(v, size); } - - private int calculateDimensionX(V v) { - int size = 0; - int childrenNum = graph.getSuccessors(v).size(); + return size; + } - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); + /** + * This method is not supported by this class. The size of the layout is determined by the + * topology of the tree, and by the horizontal and vertical spacing (optionally set by the + * constructor). + */ + public void setSize(Dimension size) { + throw new UnsupportedOperationException( + "Size of TreeLayout is set" + " by vertex spacing in constructor"); + } - return size; - } + protected void setCurrentPositionFor(V vertex) { + int x = m_currentPoint.x; + int y = m_currentPoint.y; + if (x < 0) size.width -= x; - private int calculateDimensionX(Collection roots) { + if (x > size.width - distX) size.width = x + distX; - int size = 0; - for(V v : roots) { - int childrenNum = graph.getSuccessors(v).size(); + if (y < 0) size.height -= y; + if (y > size.height - distY) size.height = y + distY; + locations.getUnchecked(vertex).setLocation(m_currentPoint); + } - if (childrenNum != 0) { - for (V element : graph.getSuccessors(v)) { - size += calculateDimensionX(element) + distX; - } - } - size = Math.max(0, size - distX); - basePositions.put(v, size); - } + public Graph getGraph() { + return graph; + } - return size; - } - - /** - * This method is not supported by this class. The size of the layout - * is determined by the topology of the tree, and by the horizontal - * and vertical spacing (optionally set by the constructor). - */ - public void setSize(Dimension size) { - throw new UnsupportedOperationException("Size of TreeLayout is set" + - " by vertex spacing in constructor"); - } + public Dimension getSize() { + return size; + } + + public void initialize() {} + + public boolean isLocked(V v) { + return false; + } - protected void setCurrentPositionFor(V vertex) { - int x = m_currentPoint.x; - int y = m_currentPoint.y; - if(x < 0) size.width -= x; - - if(x > size.width-distX) - size.width = x + distX; - - if(y < 0) size.height -= y; - if(y > size.height-distY) - size.height = y + distY; - locations.getUnchecked(vertex).setLocation(m_currentPoint); + public void lock(V v, boolean state) {} + public void reset() {} + + public void setGraph(Graph graph) { + if (graph instanceof Forest) { + this.graph = (Forest) graph; + buildTree(); + } else { + throw new IllegalArgumentException("graph must be a Forest"); } + } + + public void setInitializer(Function initializer) {} + + /** @return the center of this layout's area. */ + public Point2D getCenter() { + return new Point2D.Double(size.getWidth() / 2, size.getHeight() / 2); + } + + public void setLocation(V v, Point2D location) { + locations.getUnchecked(v).setLocation(location); + } - public Graph getGraph() { - return graph; - } - - public Dimension getSize() { - return size; - } - - public void initialize() { - - } - - public boolean isLocked(V v) { - return false; - } - - public void lock(V v, boolean state) { - } - - public void reset() { - } - - public void setGraph(Graph graph) { - if(graph instanceof Forest) { - this.graph = (Forest)graph; - buildTree(); - } else { - throw new IllegalArgumentException("graph must be a Forest"); - } - } - - public void setInitializer(Function initializer) { - } - - /** - * @return the center of this layout's area. - */ - public Point2D getCenter() { - return new Point2D.Double(size.getWidth()/2,size.getHeight()/2); - } - - public void setLocation(V v, Point2D location) { - locations.getUnchecked(v).setLocation(location); - } - - public Point2D apply(V v) { - return locations.getUnchecked(v); - } + public Point2D apply(V v) { + return locations.getUnchecked(v); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java index 310bc6d4..6b789033 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/RandomLocationTransformer.java @@ -1,7 +1,7 @@ /* * Created on Jul 19, 2005 * - * Copyright (c) 2005, The JUNG Authors + * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * @@ -11,52 +11,49 @@ */ package edu.uci.ics.jung.algorithms.layout.util; +import com.google.common.base.Function; +import edu.uci.ics.jung.algorithms.layout.StaticLayout; import java.awt.Dimension; import java.awt.geom.Point2D; import java.util.Date; import java.util.Random; -import com.google.common.base.Function; - -import edu.uci.ics.jung.algorithms.layout.StaticLayout; - /** - * Provides a random vertex location within the bounds of the Dimension property. - * This provides a random location for unmapped vertices - * the first time they are accessed. - * + * Provides a random vertex location within the bounds of the Dimension property. This provides a + * random location for unmapped vertices the first time they are accessed. + * *

Note: the generated values are not cached, so apply() will generate a new random - * location for the passed vertex every time it is called. If you want a consistent value, - * wrap this layout's generated values in a {@link StaticLayout} instance. - * - * @author Tom Nelson + * location for the passed vertex every time it is called. If you want a consistent value, wrap this + * layout's generated values in a {@link StaticLayout} instance. * + * @author Tom Nelson * @param the vertex type */ -public class RandomLocationTransformer implements Function { - Dimension d; - Random random; - - /** - * Creates an instance with the specified size which uses the current time - * as the random seed. - * @param d the size of the layout area - */ - public RandomLocationTransformer(Dimension d) { - this(d, new Date().getTime()); - } - - /** - * Creates an instance with the specified dimension and random seed. - * @param d the size of the layout area - * @param seed the seed for the internal random number generator - */ - public RandomLocationTransformer(final Dimension d, long seed) { - this.d = d; - this.random = new Random(seed); - } - - public Point2D apply(V v) { - return new Point2D.Double(random.nextDouble() * d.width, random.nextDouble() * d.height); - } +public class RandomLocationTransformer implements Function { + Dimension d; + Random random; + + /** + * Creates an instance with the specified size which uses the current time as the random seed. + * + * @param d the size of the layout area + */ + public RandomLocationTransformer(Dimension d) { + this(d, new Date().getTime()); + } + + /** + * Creates an instance with the specified dimension and random seed. + * + * @param d the size of the layout area + * @param seed the seed for the internal random number generator + */ + public RandomLocationTransformer(final Dimension d, long seed) { + this.d = d; + this.random = new Random(seed); + } + + public Point2D apply(V v) { + return new Point2D.Double(random.nextDouble() * d.width, random.nextDouble() * d.height); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java index 79dba9f8..6f37a4c5 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/Relaxer.java @@ -2,42 +2,26 @@ /** * Interface for operating the relax iterations on a layout. - * - * @author Tom Nelson - tomnelson@dev.java.net * + * @author Tom Nelson - tomnelson@dev.java.net */ public interface Relaxer { - - /** - * Execute a loop of steps in a new Thread, - * firing an event after each step. - */ - void relax(); - - /** - * Execute a loop of steps in the calling - * thread, firing no events. - */ - void prerelax(); - - /** - * Make the relaxer thread wait. - */ - void pause(); - - /** - * Make the relaxer thread resume. - * - */ - void resume(); - - /** - * Set flags to stop the relaxer thread. - */ - void stop(); - /** - * @param i the sleep time between iterations, in milliseconds - */ - void setSleepTime(long i); + /** Execute a loop of steps in a new Thread, firing an event after each step. */ + void relax(); + + /** Execute a loop of steps in the calling thread, firing no events. */ + void prerelax(); + + /** Make the relaxer thread wait. */ + void pause(); + + /** Make the relaxer thread resume. */ + void resume(); + + /** Set flags to stop the relaxer thread. */ + void stop(); + + /** @param i the sleep time between iterations, in milliseconds */ + void setSleepTime(long i); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java index 4fbb9051..095157f3 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/layout/util/VisRunner.java @@ -5,142 +5,135 @@ * This software is open-source under the BSD license; see either "license.txt" * or https://github.com/jrtom/jung/blob/master/LICENSE for a description. * - * + * */ package edu.uci.ics.jung.algorithms.layout.util; import edu.uci.ics.jung.algorithms.util.IterativeContext; /** - * - * Implementation of a relaxer thread for layouts. - * Extracted from the {@code VisualizationModel} in previous - * versions of JUNG. - * - * @author Tom Nelson - tomnelson@dev.java.net + * Implementation of a relaxer thread for layouts. Extracted from the {@code VisualizationModel} in + * previous versions of JUNG. * + * @author Tom Nelson - tomnelson@dev.java.net */ public class VisRunner implements Relaxer, Runnable { - - protected boolean running; - protected IterativeContext process; - protected boolean stop; - protected boolean manualSuspend; - protected Thread thread; - - /** - * how long the relaxer thread pauses between iteration loops. - */ - protected long sleepTime = 100L; - - - /** - * Creates an instance for the specified process. - * @param process the process (generally a layout) for which this instance is created - */ - public VisRunner(IterativeContext process) { - this.process = process; - } - - /** - * @return the relaxerThreadSleepTime - */ - public long getSleepTime() { - return sleepTime; - } - - /** - * @param sleepTime the sleep time to set for this thread - */ - public void setSleepTime(long sleepTime) { - this.sleepTime = sleepTime; - } - - public void prerelax() { - manualSuspend = true; - long timeNow = System.currentTimeMillis(); - while (System.currentTimeMillis() - timeNow < 500 && !process.done()) { - process.step(); - } - manualSuspend = false; - } - - public void pause() { - manualSuspend = true; - } - - public void relax() { - // in case its running - stop(); - stop = false; - thread = new Thread(this); - thread.setPriority(Thread.MIN_PRIORITY); - thread.start(); - } - - /** - * Used for synchronization. - */ - public Object pauseObject = new String("PAUSE OBJECT"); - - public void resume() { - manualSuspend = false; - if(running == false) { - prerelax(); - relax(); - } else { - synchronized(pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public synchronized void stop() { - if(thread != null) { - manualSuspend = false; - stop = true; - // interrupt the relaxer, in case it is paused or sleeping - // this should ensure that visRunnerIsRunning gets set to false - try { thread.interrupt(); } - catch(Exception ex) { - // the applet security manager may have prevented this. - // just sleep for a second to let the thread stop on its own - try { Thread.sleep(1000); } - catch(InterruptedException ie) {} // ignore - } - synchronized (pauseObject) { - pauseObject.notifyAll(); - } - } - } - - public void run() { - running = true; - try { - while (!process.done() && !stop) { - synchronized (pauseObject) { - while (manualSuspend && !stop) { - try { - pauseObject.wait(); - } catch (InterruptedException e) { - // ignore - } - } - } - process.step(); - - if (stop) - return; - - try { - Thread.sleep(sleepTime); - } catch (InterruptedException ie) { - // ignore - } - } - - } finally { - running = false; - } - } + + protected boolean running; + protected IterativeContext process; + protected boolean stop; + protected boolean manualSuspend; + protected Thread thread; + + /** how long the relaxer thread pauses between iteration loops. */ + protected long sleepTime = 100L; + + /** + * Creates an instance for the specified process. + * + * @param process the process (generally a layout) for which this instance is created + */ + public VisRunner(IterativeContext process) { + this.process = process; + } + + /** @return the relaxerThreadSleepTime */ + public long getSleepTime() { + return sleepTime; + } + + /** @param sleepTime the sleep time to set for this thread */ + public void setSleepTime(long sleepTime) { + this.sleepTime = sleepTime; + } + + public void prerelax() { + manualSuspend = true; + long timeNow = System.currentTimeMillis(); + while (System.currentTimeMillis() - timeNow < 500 && !process.done()) { + process.step(); + } + manualSuspend = false; + } + + public void pause() { + manualSuspend = true; + } + + public void relax() { + // in case its running + stop(); + stop = false; + thread = new Thread(this); + thread.setPriority(Thread.MIN_PRIORITY); + thread.start(); + } + + /** Used for synchronization. */ + public Object pauseObject = new String("PAUSE OBJECT"); + + public void resume() { + manualSuspend = false; + if (running == false) { + prerelax(); + relax(); + } else { + synchronized (pauseObject) { + pauseObject.notifyAll(); + } + } + } + + public synchronized void stop() { + if (thread != null) { + manualSuspend = false; + stop = true; + // interrupt the relaxer, in case it is paused or sleeping + // this should ensure that visRunnerIsRunning gets set to false + try { + thread.interrupt(); + } catch (Exception ex) { + // the applet security manager may have prevented this. + // just sleep for a second to let the thread stop on its own + try { + Thread.sleep(1000); + } catch (InterruptedException ie) { + } // ignore + } + synchronized (pauseObject) { + pauseObject.notifyAll(); + } + } + } + + public void run() { + running = true; + try { + while (!process.done() && !stop) { + synchronized (pauseObject) { + while (manualSuspend && !stop) { + try { + pauseObject.wait(); + } catch (InterruptedException e) { + // ignore + } + } + } + process.step(); + + if (stop) { + return; + } + + try { + Thread.sleep(sleepTime); + } catch (InterruptedException ie) { + // ignore + } + } + + } finally { + running = false; + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java index ab11118a..cb3d5d16 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/Metrics.java @@ -1,74 +1,64 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jun 7, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jun 7, 2008 */ package edu.uci.ics.jung.algorithms.metrics; +import edu.uci.ics.jung.graph.Graph; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; -import edu.uci.ics.jung.graph.Graph; +/** A class consisting of static methods for calculating graph metrics. */ +public class Metrics { + /** + * Returns a Map of vertices to their clustering coefficients. The clustering + * coefficient cc(v) of a vertex v is defined as follows: + * + *

    + *
  • degree(v) == {0,1}: 0 + *
  • degree(v) == n, n >= 2: given S, the set of neighbors of v: + * cc(v) = (the sum over all w in S of the number of other elements of w that are neighbors + * of w) / ((|S| * (|S| - 1) / 2). Less formally, the fraction of v's neighbors + * that are also neighbors of each other. + *
+ * + *

Note: This algorithm treats its argument as an undirected graph; edge direction is + * ignored. + * + * @param graph the graph whose clustering coefficients are to be calculated + * @param the vertex type + * @param the edge type + * @return the clustering coefficient for each vertex + * @see "The structure and function of complex networks, M.E.J. Newman, + * aps.arxiv.org/abs/cond-mat/0303516" + */ + public static Map clusteringCoefficients(Graph graph) { + Map coefficients = new HashMap(); -/** - * A class consisting of static methods for calculating graph metrics. - */ -public class Metrics -{ - /** - * Returns a Map of vertices to their clustering coefficients. - * The clustering coefficient cc(v) of a vertex v is defined as follows: - *

    - *
  • degree(v) == {0,1}: 0 - *
  • degree(v) == n, n >= 2: given S, the set of neighbors - * of v: cc(v) = (the sum over all w in S of the number of - * other elements of w that are neighbors of w) / ((|S| * (|S| - 1) / 2). - * Less formally, the fraction of v's neighbors that are also - * neighbors of each other. - *
- *

Note: This algorithm treats its argument as an undirected graph; - * edge direction is ignored. - * @param graph the graph whose clustering coefficients are to be calculated - * @param the vertex type - * @param the edge type - * @return the clustering coefficient for each vertex - * @see "The structure and function of complex networks, M.E.J. Newman, aps.arxiv.org/abs/cond-mat/0303516" - */ - public static Map clusteringCoefficients(Graph graph) - { - Map coefficients = new HashMap(); - - for (V v : graph.getVertices()) - { - int n = graph.getNeighborCount(v); - if (n < 2) - coefficients.put(v, new Double(0)); - else - { - // how many of v's neighbors are connected to each other? - ArrayList neighbors = new ArrayList(graph.getNeighbors(v)); - double edge_count = 0; - for (int i = 0; i < n; i++) - { - V w = neighbors.get(i); - for (int j = i+1; j < n; j++ ) - { - V x = neighbors.get(j); - edge_count += graph.isNeighbor(w, x) ? 1 : 0; - } - } - double possible_edges = (n * (n - 1))/2.0; - coefficients.put(v, new Double(edge_count / possible_edges)); - } + for (V v : graph.getVertices()) { + int n = graph.getNeighborCount(v); + if (n < 2) coefficients.put(v, new Double(0)); + else { + // how many of v's neighbors are connected to each other? + ArrayList neighbors = new ArrayList(graph.getNeighbors(v)); + double edge_count = 0; + for (int i = 0; i < n; i++) { + V w = neighbors.get(i); + for (int j = i + 1; j < n; j++) { + V x = neighbors.get(j); + edge_count += graph.isNeighbor(w, x) ? 1 : 0; + } } - - return coefficients; + double possible_edges = (n * (n - 1)) / 2.0; + coefficients.put(v, new Double(edge_count / possible_edges)); + } } + + return coefficients; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java index bc4dd7c1..7028520b 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/StructuralHoles.java @@ -1,7 +1,7 @@ /* * Created on Sep 19, 2005 * - * Copyright (c) 2005, The JUNG Authors + * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * @@ -12,332 +12,334 @@ package edu.uci.ics.jung.algorithms.metrics; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Graph; /** - * Calculates some of the measures from Burt's text "Structural Holes: - * The Social Structure of Competition". - * - *

Notes: + * Calculates some of the measures from Burt's text "Structural Holes: The Social Structure of + * Competition". + * + *

Notes: + * *

    - *
  • Each of these measures assumes that each edge has an associated - * non-null weight whose value is accessed through the specified - * Transformer instance. - *
  • Nonexistent edges are treated as edges with weight 0 for purposes - * of edge weight calculations. + *
  • Each of these measures assumes that each edge has an associated non-null weight whose value + * is accessed through the specified Transformer instance. + *
  • Nonexistent edges are treated as edges with weight 0 for purposes of edge weight + * calculations. *
- * - *

Based on code donated by Jasper Voskuilen and - * Diederik van Liere of the Department of Information and Decision Sciences - * at Erasmus University. - * + * + *

Based on code donated by Jasper Voskuilen and Diederik van Liere of the Department of + * Information and Decision Sciences at Erasmus University. + * * @author Joshua O'Madadhain * @author Jasper Voskuilen * @see "Ronald Burt, Structural Holes: The Social Structure of Competition" * @author Tom Nelson - converted to jung2 */ -public class StructuralHoles { - - protected Function edge_weight; - protected Graph g; - - /** - * @param graph the graph for which the metrics are to be calculated - * @param nev the edge weights - */ - public StructuralHoles(Graph graph, Function nev) - { - this.g = graph; - this.edge_weight = nev; +public class StructuralHoles { + + protected Function edge_weight; + protected Graph g; + + /** + * @param graph the graph for which the metrics are to be calculated + * @param nev the edge weights + */ + public StructuralHoles(Graph graph, Function nev) { + this.g = graph; + this.edge_weight = nev; + } + + /** + * Burt's measure of the effective size of a vertex's network. Essentially, the number of + * neighbors minus the average degree of those in v's neighbor set, not counting ties + * to v. Formally: + * + *

+   * effectiveSize(v) = v.degree() - (sum_{u in N(v)} sum_{w in N(u), w !=u,v} p(v,w)*m(u,w))
+   * 
+ * + * where + * + *
    + *
  • N(a) = a.getNeighbors() + *
  • p(v,w) = normalized mutual edge weight of v and w + *
  • m(u,w) = maximum-scaled mutual edge weight of u and w + *
+ * + * @param v the vertex whose properties are being measured + * @return the effective size of the vertex's network + * @see #normalizedMutualEdgeWeight(Object, Object) + * @see #maxScaledMutualEdgeWeight(Object, Object) + */ + public double effectiveSize(V v) { + double result = g.degree(v); + for (V u : g.getNeighbors(v)) { + + for (V w : g.getNeighbors(u)) { + + if (w != v && w != u) + result -= normalizedMutualEdgeWeight(v, w) * maxScaledMutualEdgeWeight(u, w); + } } + return result; + } + + /** + * Returns the effective size of v divided by the number of alters in v + * 's network. (In other words, effectiveSize(v) / v.degree().) If + * v.degree() == 0, returns 0. + * + * @param v the vertex whose properties are being measured + * @return the effective size of the vertex divided by its degree + */ + public double efficiency(V v) { + double degree = g.degree(v); - /** - * Burt's measure of the effective size of a vertex's network. Essentially, the - * number of neighbors minus the average degree of those in v's neighbor set, - * not counting ties to v. Formally: - *
-     * effectiveSize(v) = v.degree() - (sum_{u in N(v)} sum_{w in N(u), w !=u,v} p(v,w)*m(u,w))
-     * 
- * where - *
    - *
  • N(a) = a.getNeighbors() - *
  • p(v,w) = normalized mutual edge weight of v and w - *
  • m(u,w) = maximum-scaled mutual edge weight of u and w - *
- * @param v the vertex whose properties are being measured - * @return the effective size of the vertex's network - * - * @see #normalizedMutualEdgeWeight(Object, Object) - * @see #maxScaledMutualEdgeWeight(Object, Object) - */ - public double effectiveSize(V v) - { - double result = g.degree(v); - for(V u : g.getNeighbors(v)) { - - for(V w : g.getNeighbors(u)) { - - if (w != v && w != u) - result -= normalizedMutualEdgeWeight(v,w) * - maxScaledMutualEdgeWeight(u,w); - } - } - return result; + if (degree == 0) { + return 0; + } else { + return effectiveSize(v) / degree; } - - /** - * Returns the effective size of v divided by the number of - * alters in v's network. (In other words, - * effectiveSize(v) / v.degree().) - * If v.degree() == 0, returns 0. - * - * @param v the vertex whose properties are being measured - * @return the effective size of the vertex divided by its degree - */ - public double efficiency(V v) { - double degree = g.degree(v); - - if (degree == 0) - return 0; - else - return effectiveSize(v) / degree; + } + + /** + * Burt's constraint measure (equation 2.4, page 55 of Burt, 1992). Essentially a measure of the + * extent to which v is invested in people who are invested in other of v + * 's alters (neighbors). The "constraint" is characterized by a lack of primary holes + * around each neighbor. Formally: + * + *
+   * constraint(v) = sum_{w in MP(v), w != v} localConstraint(v,w)
+   * 
+ * + * where MP(v) is the subset of v's neighbors that are both predecessors and successors of v. + * + * @see #localConstraint(Object, Object) + * @param v the vertex whose properties are being measured + * @return the constraint of the vertex + */ + public double constraint(V v) { + double result = 0; + for (V w : g.getSuccessors(v)) { + + if (v != w && g.isPredecessor(v, w)) { + result += localConstraint(v, w); + } } - /** - * Burt's constraint measure (equation 2.4, page 55 of Burt, 1992). Essentially a - * measure of the extent to which v is invested in people who are invested in - * other of v's alters (neighbors). The "constraint" is characterized - * by a lack of primary holes around each neighbor. Formally: - *
-     * constraint(v) = sum_{w in MP(v), w != v} localConstraint(v,w)
-     * 
- * where MP(v) is the subset of v's neighbors that are both predecessors and successors of v. - * @see #localConstraint(Object, Object) - * - * @param v the vertex whose properties are being measured - * @return the constraint of the vertex - */ - public double constraint(V v) { - double result = 0; - for(V w : g.getSuccessors(v)) { - - if (v != w && g.isPredecessor(v,w)) - { - result += localConstraint(v, w); - } - } - - return result; + return result; + } + + /** + * Calculates the hierarchy value for a given vertex. Returns NaN when v + * 's degree is 0, and 1 when v's degree is 1. Formally: + * + *
+   * hierarchy(v) = (sum_{v in N(v), w != v} s(v,w) * log(s(v,w))}) / (v.degree() * Math.log(v.degree())
+   * 
+ * + * where + * + *
    + *
  • N(v) = v.getNeighbors() + *
  • s(v,w) = localConstraint(v,w) / (aggregateConstraint(v) / v.degree()) + *
+ * + * @see #localConstraint(Object, Object) + * @see #aggregateConstraint(Object) + * @param v the vertex whose properties are being measured + * @return the hierarchy value for a given vertex + */ + public double hierarchy(V v) { + double v_degree = g.degree(v); + + if (v_degree == 0) { + return Double.NaN; + } + if (v_degree == 1) { + return 1; } - - /** - * Calculates the hierarchy value for a given vertex. Returns NaN when - * v's degree is 0, and 1 when v's degree is 1. - * Formally: - *
-     * hierarchy(v) = (sum_{v in N(v), w != v} s(v,w) * log(s(v,w))}) / (v.degree() * Math.log(v.degree()) 
-     * 
- * where - *
    - *
  • N(v) = v.getNeighbors() - *
  • s(v,w) = localConstraint(v,w) / (aggregateConstraint(v) / v.degree()) - *
- * @see #localConstraint(Object, Object) - * @see #aggregateConstraint(Object) - * - * @param v the vertex whose properties are being measured - * @return the hierarchy value for a given vertex - */ - public double hierarchy(V v) - { - double v_degree = g.degree(v); - - if (v_degree == 0) - return Double.NaN; - if (v_degree == 1) - return 1; - - double v_constraint = aggregateConstraint(v); - - double numerator = 0; - for (V w : g.getNeighbors(v)) { - - if (v != w) - { - double sl_constraint = localConstraint(v, w) / (v_constraint / v_degree); - numerator += sl_constraint * Math.log(sl_constraint); - } - } - - return numerator / (v_degree * Math.log(v_degree)); + double v_constraint = aggregateConstraint(v); + + double numerator = 0; + for (V w : g.getNeighbors(v)) { + + if (v != w) { + double sl_constraint = localConstraint(v, w) / (v_constraint / v_degree); + numerator += sl_constraint * Math.log(sl_constraint); + } } - /** - * Returns the local constraint on v1 from a lack of primary holes - * around its neighbor v2. - * Based on Burt's equation 2.4. Formally: - *
-     * localConstraint(v1, v2) = ( p(v1,v2) + ( sum_{w in N(v)} p(v1,w) * p(w, v2) ) )^2
-     * 
- * where - *
    - *
  • N(v) = v.getNeighbors() - *
  • p(v,w) = normalized mutual edge weight of v and w - *
- * @param v1 the first vertex whose local constraint is desired - * @param v2 the second vertex whose local constraint is desired - * @return the local constraint on (v1, v2) - * @see #normalizedMutualEdgeWeight(Object, Object) - */ - public double localConstraint(V v1, V v2) - { - double nmew_vw = normalizedMutualEdgeWeight(v1, v2); - double inner_result = 0; - for (V w : g.getNeighbors(v1)) { - - inner_result += normalizedMutualEdgeWeight(v1,w) * - normalizedMutualEdgeWeight(w,v2); - } - return (nmew_vw + inner_result) * (nmew_vw + inner_result); + return numerator / (v_degree * Math.log(v_degree)); + } + + /** + * Returns the local constraint on v1 from a lack of primary holes around its + * neighbor v2. Based on Burt's equation 2.4. Formally: + * + *
+   * localConstraint(v1, v2) = ( p(v1,v2) + ( sum_{w in N(v)} p(v1,w) * p(w, v2) ) )^2
+   * 
+ * + * where + * + *
    + *
  • N(v) = v.getNeighbors() + *
  • p(v,w) = normalized mutual edge weight of v and w + *
+ * + * @param v1 the first vertex whose local constraint is desired + * @param v2 the second vertex whose local constraint is desired + * @return the local constraint on (v1, v2) + * @see #normalizedMutualEdgeWeight(Object, Object) + */ + public double localConstraint(V v1, V v2) { + double nmew_vw = normalizedMutualEdgeWeight(v1, v2); + double inner_result = 0; + for (V w : g.getNeighbors(v1)) { + + inner_result += normalizedMutualEdgeWeight(v1, w) * normalizedMutualEdgeWeight(w, v2); } - - /** - * The aggregate constraint on v. Based on Burt's equation 2.7. - * Formally: - *
-     * aggregateConstraint(v) = sum_{w in N(v)} localConstraint(v,w) * O(w)
-     * 
- * where - *
    - *
  • N(v) = v.getNeighbors() - *
  • O(w) = organizationalMeasure(w) - *
- * - * @param v the vertex whose properties are being measured - * @return the aggregate constraint on v - */ - public double aggregateConstraint(V v) - { - double result = 0; - for (V w : g.getNeighbors(v)) { - - result += localConstraint(v, w) * organizationalMeasure(g, w); - } - return result; + return (nmew_vw + inner_result) * (nmew_vw + inner_result); + } + + /** + * The aggregate constraint on v. Based on Burt's equation 2.7. Formally: + * + *
+   * aggregateConstraint(v) = sum_{w in N(v)} localConstraint(v,w) * O(w)
+   * 
+ * + * where + * + *
    + *
  • N(v) = v.getNeighbors() + *
  • O(w) = organizationalMeasure(w) + *
+ * + * @param v the vertex whose properties are being measured + * @return the aggregate constraint on v + */ + public double aggregateConstraint(V v) { + double result = 0; + for (V w : g.getNeighbors(v)) { + + result += localConstraint(v, w) * organizationalMeasure(g, w); + } + return result; + } + + /** + * A measure of the organization of individuals within the subgraph centered on v. + * Burt's text suggests that this is in some sense a measure of how "replaceable" v + * is by some other element of this subgraph. Should be a number in the closed interval [0,1]. + * + *

This implementation returns 1. Users may wish to override this method in order to define + * their own behavior. + * + * @param g the subgraph centered on v + * @param v the vertex whose properties are being measured + * @return 1.0 (in this implementation) + */ + protected double organizationalMeasure(Graph g, V v) { + return 1.0; + } + + /** + * Returns the proportion of v1's network time and energy invested in the + * relationship with v2. Formally: + * + *

+   * normalizedMutualEdgeWeight(a,b) = mutual_weight(a,b) / (sum_c mutual_weight(a,c))
+   * 
+ * + * Returns 0 if either numerator or denominator = 0, or if v1 == v2. + * + * @see #mutualWeight(Object, Object) + * @param v1 the first vertex of the pair whose property is being measured + * @param v2 the second vertex of the pair whose property is being measured + * @return the normalized mutual edge weight between v1 and v2 + */ + protected double normalizedMutualEdgeWeight(V v1, V v2) { + if (v1 == v2) { + return 0; + } + + double numerator = mutualWeight(v1, v2); + + if (numerator == 0) { + return 0; + } + + double denominator = 0; + for (V v : g.getNeighbors(v1)) { + denominator += mutualWeight(v1, v); + } + if (denominator == 0) { + return 0; } - - /** - * A measure of the organization of individuals within the subgraph - * centered on v. Burt's text suggests that this is - * in some sense a measure of how "replaceable" v is by - * some other element of this subgraph. Should be a number in the - * closed interval [0,1]. - * - *

This implementation returns 1. Users may wish to override this - * method in order to define their own behavior. - * @param g the subgraph centered on v - * @param v the vertex whose properties are being measured - * @return 1.0 (in this implementation) - */ - protected double organizationalMeasure(Graph g, V v) { - return 1.0; + + return numerator / denominator; + } + + /** + * Returns the weight of the edge from v1 to v2 plus the weight of the + * edge from v2 to v1; if either edge does not exist, it is treated as + * an edge with weight 0. Undirected edges are treated as two antiparallel directed edges (that + * is, if there is one undirected edge with weight w connecting v1 to v2 + * , the value returned is 2w). Ignores parallel edges; if there are any such, one + * is chosen at random. Throws NullPointerException if either edge is present but not + * assigned a weight by the constructor-specified NumberEdgeValue. + * + * @param v1 the first vertex of the pair whose property is being measured + * @param v2 the second vertex of the pair whose property is being measured + * @return the weights of the edges {@code} and {@code } + */ + protected double mutualWeight(V v1, V v2) { + E e12 = g.findEdge(v1, v2); + E e21 = g.findEdge(v2, v1); + double w12 = (e12 != null ? edge_weight.apply(e12).doubleValue() : 0); + double w21 = (e21 != null ? edge_weight.apply(e21).doubleValue() : 0); + + return w12 + w21; + } + + /** + * The marginal strength of v1's relation with contact v2. Formally: + * + *

+   * normalized_mutual_weight = mutual_weight(a,b) / (max_c mutual_weight(a,c))
+   * 
+ * + * Returns 0 if either numerator or denominator is 0, or if v1 == v2. + * + * @param v1 the first vertex of the pair whose property is being measured + * @param v2 the second vertex of the pair whose property is being measured + * @return the marginal strength of v1's relation with v2 + * @see #mutualWeight(Object, Object) + */ + protected double maxScaledMutualEdgeWeight(V v1, V v2) { + if (v1 == v2) { + return 0; } - - - /** - * Returns the proportion of v1's network time and energy invested - * in the relationship with v2. Formally: - *
-     * normalizedMutualEdgeWeight(a,b) = mutual_weight(a,b) / (sum_c mutual_weight(a,c))
-     * 
- * Returns 0 if either numerator or denominator = 0, or if v1 == v2. - * @see #mutualWeight(Object, Object) - * @param v1 the first vertex of the pair whose property is being measured - * @param v2 the second vertex of the pair whose property is being measured - * @return the normalized mutual edge weight between v1 and v2 - */ - protected double normalizedMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V v : g.getNeighbors(v1)) { - denominator += mutualWeight(v1, v); - } - if (denominator == 0) - return 0; - - return numerator / denominator; + + double numerator = mutualWeight(v1, v2); + + if (numerator == 0) { + return 0; } - - /** - * Returns the weight of the edge from v1 to v2 - * plus the weight of the edge from v2 to v1; - * if either edge does not exist, it is treated as an edge with weight 0. - * Undirected edges are treated as two antiparallel directed edges (that - * is, if there is one undirected edge with weight w connecting - * v1 to v2, the value returned is 2w). - * Ignores parallel edges; if there are any such, one is chosen at random. - * Throws NullPointerException if either edge is - * present but not assigned a weight by the constructor-specified - * NumberEdgeValue. - * - * @param v1 the first vertex of the pair whose property is being measured - * @param v2 the second vertex of the pair whose property is being measured - * @return the weights of the edges {@code} and {@code } - */ - protected double mutualWeight(V v1, V v2) - { - E e12 = g.findEdge(v1,v2); - E e21 = g.findEdge(v2,v1); - double w12 = (e12 != null ? edge_weight.apply(e12).doubleValue() : 0); - double w21 = (e21 != null ? edge_weight.apply(e21).doubleValue() : 0); - - return w12 + w21; + + double denominator = 0; + for (V w : g.getNeighbors(v1)) { + + if (v2 != w) denominator = Math.max(numerator, mutualWeight(v1, w)); } - - /** - * The marginal strength of v1's relation with contact v2. - * Formally: - *
-     * normalized_mutual_weight = mutual_weight(a,b) / (max_c mutual_weight(a,c))
-     * 
- * Returns 0 if either numerator or denominator is 0, or if v1 == v2. - * - * @param v1 the first vertex of the pair whose property is being measured - * @param v2 the second vertex of the pair whose property is being measured - * @return the marginal strength of v1's relation with v2 - * - * @see #mutualWeight(Object, Object) - */ - protected double maxScaledMutualEdgeWeight(V v1, V v2) - { - if (v1 == v2) - return 0; - - double numerator = mutualWeight(v1, v2); - - if (numerator == 0) - return 0; - - double denominator = 0; - for (V w : g.getNeighbors(v1)) { - - if (v2 != w) - denominator = Math.max(numerator, mutualWeight(v1, w)); - } - - if (denominator == 0) - return 0; - - return numerator / denominator; + + if (denominator == 0) { + return 0; } + + return numerator / denominator; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java index e6732e81..2f4873a1 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/metrics/TriadicCensus.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -9,42 +9,37 @@ */ package edu.uci.ics.jung.algorithms.metrics; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.Graph; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.Graph; - - /** - * TriadicCensus is a standard social network tool that counts, for each of the - * different possible configurations of three vertices, the number of times - * that that configuration occurs in the given graph. - * This may then be compared to the set of expected counts for this particular - * graph or to an expected sample. This is often used in p* modeling. - *

- * To use this class, + * TriadicCensus is a standard social network tool that counts, for each of the different possible + * configurations of three vertices, the number of times that that configuration occurs in the given + * graph. This may then be compared to the set of expected counts for this particular graph or to an + * expected sample. This is often used in p* modeling. + * + *

To use this class, + * *

  * long[] triad_counts = TriadicCensus(dg);
  * 
- * where dg is a DirectedGraph. - * ith element of the array (for i in [1,16]) is the number of - * occurrences of the corresponding triad type. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * To get the name of the ith triad (e.g. "003"), - * look at the global constant array c.TRIAD_NAMES[i] - *

- * Triads are named as - * (number of pairs that are mutually tied) - * (number of pairs that are one-way tied) - * (number of non-tied pairs) - * in the triple. Since there are be only three pairs, there is a finite - * set of these possible triads. - *

- * In fact, there are exactly 16, conventionally sorted by the number of - * realized edges in the triad: + * + * where dg is a DirectedGraph. ith element of the array (for i in [1,16]) + * is the number of occurrences of the corresponding triad type. (The 0th element is not meaningful; + * this array is effectively 1-based.) To get the name of the ith triad (e.g. "003"), look at the + * global constant array c.TRIAD_NAMES[i] + * + *

Triads are named as (number of pairs that are mutually tied) (number of pairs that are one-way + * tied) (number of non-tied pairs) in the triple. Since there are be only three pairs, there is a + * finite set of these possible triads. + * + *

In fact, there are exactly 16, conventionally sorted by the number of realized edges in the + * triad: + * * * * @@ -65,145 +60,145 @@ * * *
Descriptions of the different types of triads
Number Configuration Notes
15210
16300The complete
- *

- * This implementation takes O( m ), m is the number of edges in the graph. - *
- * It is based on - * - * A subquadratic triad census algorithm for large sparse networks - * with small maximum degree - * Vladimir Batagelj and Andrej Mrvar, University of Ljubljana - * Published in Social Networks. + * + *

This implementation takes O( m ), m is the number of edges in the graph.
+ * It is based on A + * subquadratic triad census algorithm for large sparse networks with small maximum degree + * Vladimir Batagelj and Andrej Mrvar, University of Ljubljana Published in Social Networks. + * * @author Danyel Fisher * @author Tom Nelson - converted to jung2 - * */ public class TriadicCensus { - // NOTE THAT THIS RETURNS STANDARD 1-16 COUNT! + // NOTE THAT THIS RETURNS STANDARD 1-16 COUNT! + + // and their types + public static final String[] TRIAD_NAMES = { + "N/A", "003", "012", "102", "021D", "021U", "021C", "111D", "111U", "030T", "030C", "201", + "120D", "120U", "120C", "210", "300" + }; - // and their types - public static final String[] TRIAD_NAMES = { "N/A", "003", "012", "102", "021D", - "021U", "021C", "111D", "111U", "030T", "030C", "201", "120D", - "120U", "120C", "210", "300" }; + public static final int MAX_TRIADS = TRIAD_NAMES.length; - public static final int MAX_TRIADS = TRIAD_NAMES.length; + /** + * Returns an array whose ith element (for i in [1,16]) is the number of occurrences of the + * corresponding triad type in g. (The 0th element is not meaningful; this array is + * effectively 1-based.) + * + * @param g the graph whose properties are being measured + * @param the vertex type + * @param the edge type + * @return an array encoding the number of occurrences of each triad type + */ + public static long[] getCounts(DirectedGraph g) { + long[] count = new long[MAX_TRIADS]; - /** - * Returns an array whose ith element (for i in [1,16]) is the number of - * occurrences of the corresponding triad type in g. - * (The 0th element is not meaningful; this array is effectively 1-based.) - * - * @param g the graph whose properties are being measured - * @param the vertex type - * @param the edge type - * @return an array encoding the number of occurrences of each triad type - */ - public static long[] getCounts(DirectedGraph g) { - long[] count = new long[MAX_TRIADS]; + List id = new ArrayList(g.getVertices()); - List id = new ArrayList(g.getVertices()); + // apply algorithm to each edge, one at at time + for (int i_v = 0; i_v < g.getVertexCount(); i_v++) { + V v = id.get(i_v); + for (V u : g.getNeighbors(v)) { + int triType = -1; + if (id.indexOf(u) <= i_v) { + continue; + } + Set neighbors = new HashSet(g.getNeighbors(u)); + neighbors.addAll(g.getNeighbors(v)); + neighbors.remove(u); + neighbors.remove(v); + if (g.isSuccessor(v, u) && g.isSuccessor(u, v)) { + triType = 3; + } else { + triType = 2; + } + count[triType] += g.getVertexCount() - neighbors.size() - 2; + for (V w : neighbors) { + if (shouldCount(g, id, u, v, w)) { + count[triType(triCode(g, u, v, w))]++; + } + } + } + } + int sum = 0; + for (int i = 2; i <= 16; i++) { + sum += count[i]; + } + int n = g.getVertexCount(); + count[1] = n * (n - 1) * (n - 2) / 6 - sum; + return count; + } - // apply algorithm to each edge, one at at time - for (int i_v = 0; i_v < g.getVertexCount(); i_v++) { - V v = id.get(i_v); - for(V u : g.getNeighbors(v)) { - int triType = -1; - if (id.indexOf(u) <= i_v) - continue; - Set neighbors = new HashSet(g.getNeighbors(u)); - neighbors.addAll(g.getNeighbors(v)); - neighbors.remove(u); - neighbors.remove(v); - if (g.isSuccessor(v,u) && g.isSuccessor(u,v)) { - triType = 3; - } else { - triType = 2; - } - count[triType] += g.getVertexCount() - neighbors.size() - 2; - for (V w : neighbors) { - if (shouldCount(g, id, u, v, w)) { - count [ triType ( triCode(g, u, v, w) ) ] ++; - } - } - } - } - int sum = 0; - for (int i = 2; i <= 16; i++) { - sum += count[i]; - } - int n = g.getVertexCount(); - count[1] = n * (n-1) * (n-2) / 6 - sum; - return count; - } + /** + * This is the core of the technique in the paper. Returns an int from 0 to 63 which encodes the + * presence of all possible links between u, v, and w as bit flags: WU = 32, UW = 16, WV = 8, VW = + * 4, UV = 2, VU = 1 + * + * @param g the graph for which the calculation is being made + * @param u a vertex in g + * @param v a vertex in g + * @param w a vertex in g + * @param the vertex type + * @param the edge type + * @return an int encoding the presence of all links between u, v, and w + */ + public static int triCode(Graph g, V u, V v, V w) { + int i = 0; + i += link(g, v, u) ? 1 : 0; + i += link(g, u, v) ? 2 : 0; + i += link(g, v, w) ? 4 : 0; + i += link(g, w, v) ? 8 : 0; + i += link(g, u, w) ? 16 : 0; + i += link(g, w, u) ? 32 : 0; + return i; + } - /** - * This is the core of the technique in the paper. Returns an int from 0 to - * 63 which encodes the presence of all possible links between u, v, and w - * as bit flags: WU = 32, UW = 16, WV = 8, VW = 4, UV = 2, VU = 1 - * - * @param g the graph for which the calculation is being made - * @param u a vertex in g - * @param v a vertex in g - * @param w a vertex in g - * @param the vertex type - * @param the edge type - * @return an int encoding the presence of all links between u, v, and w - */ - public static int triCode(Graph g, V u, V v, V w) { - int i = 0; - i += link(g, v, u ) ? 1 : 0; - i += link(g, u, v ) ? 2 : 0; - i += link(g, v, w ) ? 4 : 0; - i += link(g, w, v ) ? 8 : 0; - i += link(g, u, w ) ? 16 : 0; - i += link(g, w, u ) ? 32 : 0; - return i; - } + protected static boolean link(Graph g, V a, V b) { + return g.isPredecessor(b, a); + } - protected static boolean link(Graph g, V a, V b) { - return g.isPredecessor(b, a); - } - - - /** - * @param triCode the code returned by {@code triCode()} - * @return the string code associated with the numeric type - */ - public static int triType( int triCode ) { - return codeToType[ triCode ]; - } + /** + * @param triCode the code returned by {@code triCode()} + * @return the string code associated with the numeric type + */ + public static int triType(int triCode) { + return codeToType[triCode]; + } - /** - * For debugging purposes, this is copied straight out of the paper which - * means that they refer to triad types 1-16. - */ - protected static final int[] codeToType = { 1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, - 7, 11, 2, 6, 4, 8, 5, 9, 9, 13, 6, 10, 9, 14, 7, 14, 12, 15, 2, 5, - 6, 7, 6, 9, 10, 14, 4, 9, 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, - 14, 15, 8, 14, 13, 15, 11, 15, 15, 16 }; + /** + * For debugging purposes, this is copied straight out of the paper which means that they refer to + * triad types 1-16. + */ + protected static final int[] codeToType = { + 1, 2, 2, 3, 2, 4, 6, 8, 2, 6, 5, 7, 3, 8, 7, 11, 2, 6, 4, 8, 5, 9, 9, 13, 6, 10, 9, 14, 7, 14, + 12, 15, 2, 5, 6, 7, 6, 9, 10, 14, 4, 9, 9, 12, 8, 13, 14, 15, 3, 7, 8, 11, 7, 12, 14, 15, 8, 14, + 13, 15, 11, 15, 15, 16 + }; - /** - * Return true iff this ordering is canonical and therefore we should build statistics for it. - * - * @param g the graph whose properties are being examined - * @param id a list of the vertices in g; used to assign an index to each - * @param u a vertex in g - * @param v a vertex in g - * @param w a vertex in g - * @param the vertex type - * @param the edge type - * @return true if index(u) < index(w), or if index(v) < index(w) < index(u) - * and v doesn't link to w; false otherwise - */ - protected static boolean shouldCount(Graph g, List id, V u, V v, V w) { - int i_u = id.indexOf(u); - int i_w = id.indexOf(w); - if (i_u < i_w) - return true; - int i_v = id.indexOf(v); - if ((i_v < i_w) && (i_w < i_u) && (!g.isNeighbor(w,v))) - return true; - return false; - } + /** + * Return true iff this ordering is canonical and therefore we should build statistics for it. + * + * @param g the graph whose properties are being examined + * @param id a list of the vertices in g; used to assign an index to each + * @param u a vertex in g + * @param v a vertex in g + * @param w a vertex in g + * @param the vertex type + * @param the edge type + * @return true if index(u) < index(w), or if index(v) < index(w) < index(u) and v + * doesn't link to w; false otherwise + */ + protected static boolean shouldCount(Graph g, List id, V u, V v, V w) { + int i_u = id.indexOf(u); + int i_w = id.indexOf(w); + if (i_u < i_w) { + return true; + } + int i_v = id.indexOf(v); + if ((i_v < i_w) && (i_w < i_u) && (!g.isNeighbor(w, v))) { + return true; + } + return false; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java index 26c89ab4..c84823e6 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 6, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,360 +11,326 @@ */ package edu.uci.ics.jung.algorithms.scoring; -import java.util.HashMap; -import java.util.Map; - import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.util.DelegateToEdgeTransformer; import edu.uci.ics.jung.algorithms.scoring.util.VEPair; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Hypergraph; +import java.util.HashMap; +import java.util.Map; /** * An abstract class for algorithms that assign scores to vertices based on iterative methods. * Generally, any (concrete) subclass will function by creating an instance, and then either calling - * evaluate (if the user wants to iterate until the algorithms is 'done') or - * repeatedly call step (if the user wants to observe the values at each step). + * evaluate (if the user wants to iterate until the algorithms is 'done') or repeatedly + * call step (if the user wants to observe the values at each step). */ -public abstract class AbstractIterativeScorer implements IterativeContext, VertexScorer -{ - /** - * Maximum number of iterations to use before terminating. Defaults to 100. - */ - protected int max_iterations; - - /** - * Minimum change from one step to the next; if all changes are ≤ tolerance, - * no further updates will occur. - * Defaults to 0.001. - */ - protected double tolerance; - - /** - * The graph on which the calculations are to be made. - */ - protected Hypergraph graph; - - /** - * The total number of iterations used so far. - */ - protected int total_iterations; - - /** - * The edge weights used by this algorithm. - */ - protected Function, ? extends Number> edge_weights; - - /** - * Indicates whether the output and current values are in a 'swapped' state. - * Intended for internal use only. - */ - protected boolean output_reversed; - - /** - * The map in which the output values are stored. - */ - private Map output; - - /** - * The map in which the current values are stored. - */ - private Map current_values; - - /** - * A flag representing whether this instance tolerates disconnected graphs. - * Instances that do not accept disconnected graphs may have unexpected behavior - * on disconnected graphs; they are not guaranteed to do an explicit check. - * Defaults to true. - */ - private boolean accept_disconnected_graph; - - - protected boolean hyperedges_are_self_loops = false; - - /** - * Sets the output value for this vertex. - * @param v the vertex whose output value is to be set - * @param value the value to set - */ - protected void setOutputValue(V v, T value) - { - output.put(v, value); - } - - /** - * Gets the output value for this vertex. - * @param v the vertex whose output value is to be retrieved - * @return the output value for this vertex - */ - protected T getOutputValue(V v) - { - return output.get(v); - } - - /** - * Gets the current value for this vertex - * @param v the vertex whose current value is to be retrieved - * @return the current value for this vertex - */ - protected T getCurrentValue(V v) - { - return current_values.get(v); - } - - /** - * Sets the current value for this vertex. - * @param v the vertex whose current value is to be set - * @param value the current value to set - */ - protected void setCurrentValue(V v, T value) - { - current_values.put(v, value); - } - - /** - * The largest change seen so far among all vertex scores. - */ - protected double max_delta; - - /** - * Creates an instance for the specified graph and edge weights. - * @param g the graph for which the instance is to be created - * @param edge_weights the edge weights for this instance - */ - public AbstractIterativeScorer(Hypergraph g, - Function edge_weights) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - setEdgeWeights(edge_weights); - } - - /** - * Creates an instance for the specified graph g. - * NOTE: This constructor does not set the internal - * edge_weights variable. If this variable is used by - * the subclass which invoked this constructor, it must be initialized - * by that subclass. - * @param g the graph for which the instance is to be created - */ - public AbstractIterativeScorer(Hypergraph g) - { - this.graph = g; - this.max_iterations = 100; - this.tolerance = 0.001; - this.accept_disconnected_graph = true; - } - - /** - * Initializes the internal state for this instance. - */ - protected void initialize() - { - this.total_iterations = 0; - this.max_delta = Double.MIN_VALUE; - this.output_reversed = true; - this.current_values = new HashMap(); - this.output = new HashMap(); - } - - /** - * Steps through this scoring algorithm until a termination condition is reached. - */ - public void evaluate() - { - do - step(); - while (!done()); - } - - /** - * Returns true if the total number of iterations is greater than or equal to - * max_iterations - * or if the maximum value change observed is less than tolerance. - */ - public boolean done() - { - return total_iterations >= max_iterations || max_delta < tolerance; - } +public abstract class AbstractIterativeScorer + implements IterativeContext, VertexScorer { + /** Maximum number of iterations to use before terminating. Defaults to 100. */ + protected int max_iterations; - /** - * Performs one step of this algorithm; updates the state (value) for each vertex. - */ - public void step() - { - swapOutputForCurrent(); - - for (V v : graph.getVertices()) - { - double diff = update(v); - updateMaxDelta(v, diff); - } - total_iterations++; - afterStep(); - } + /** + * Minimum change from one step to the next; if all changes are ≤ tolerance, no further updates + * will occur. Defaults to 0.001. + */ + protected double tolerance; - /** - * - */ - protected void swapOutputForCurrent() - { - Map tmp = output; - output = current_values; - current_values = tmp; - output_reversed = !output_reversed; - } + /** The graph on which the calculations are to be made. */ + protected Hypergraph graph; - /** - * Updates the value for v. - * @param v the vertex whose value is to be updated - * @return the updated value - */ - protected abstract double update(V v); + /** The total number of iterations used so far. */ + protected int total_iterations; - protected void updateMaxDelta(V v, double diff) - { - max_delta = Math.max(max_delta, diff); - } - - protected void afterStep() {} - - public T getVertexScore(V v) - { - if (!graph.containsVertex(v)) - throw new IllegalArgumentException("Vertex " + v + " not an element of this graph"); - - return output.get(v); - } + /** The edge weights used by this algorithm. */ + protected Function, ? extends Number> edge_weights; - /** - * Returns the maximum number of iterations that this instance will use. - * @return the maximum number of iterations that evaluate will use - * prior to terminating - */ - public int getMaxIterations() - { - return max_iterations; - } + /** + * Indicates whether the output and current values are in a 'swapped' state. Intended for internal + * use only. + */ + protected boolean output_reversed; - /** - * Returns the number of iterations that this instance has used so far. - * @return the number of iterations that this instance has used so far - */ - public int getIterations() - { - return total_iterations; - } - - /** - * Sets the maximum number of times that evaluate will call step. - * @param max_iterations the maximum - */ - public void setMaxIterations(int max_iterations) - { - this.max_iterations = max_iterations; - } + /** The map in which the output values are stored. */ + private Map output; - /** - * Gets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. Once all changes are less than this value, - * evaluate will terminate. - * @return the size of the largest change that evaluate() will permit - */ - public double getTolerance() - { - return tolerance; - } + /** The map in which the current values are stored. */ + private Map current_values; - /** - * Sets the size of the largest change (difference between the current and previous values) - * for any vertex that can be tolerated. - * @param tolerance the size of the largest change that evaluate() will permit - */ - public void setTolerance(double tolerance) - { - this.tolerance = tolerance; - } - - /** - * Returns the Function that this instance uses to associate edge weights with each edge. - * @return the Function that associates an edge weight with each edge - */ - public Function, ? extends Number> getEdgeWeights() - { - return edge_weights; - } + /** + * A flag representing whether this instance tolerates disconnected graphs. Instances that do not + * accept disconnected graphs may have unexpected behavior on disconnected graphs; they are not + * guaranteed to do an explicit check. Defaults to true. + */ + private boolean accept_disconnected_graph; - /** - * Sets the Function that this instance uses to associate edge weights with each edge - * @param edge_weights the Function to use to associate an edge weight with each edge - * @see edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight - */ - public void setEdgeWeights(Function edge_weights) - { - this.edge_weights = new DelegateToEdgeTransformer(edge_weights); - } - - /** - * Gets the edge weight for e in the context of its (incident) vertex v. - * @param v the vertex incident to e as a context in which the edge weight is to be calculated - * @param e the edge whose weight is to be returned - * @return the edge weight for e in the context of its (incident) vertex v - */ - protected Number getEdgeWeight(V v, E e) - { - return edge_weights.apply(new VEPair(v,e)); - } - - /** - * Collects the 'potential' from v (its current value) if it has no outgoing edges; this - * can then be redistributed among the other vertices as a means of normalization. - * @param v the vertex whose potential is being collected - */ - protected void collectDisappearingPotential(V v) {} - - /** - * Specifies whether this instance should accept vertices with no outgoing edges. - * @param accept true if this instance should accept vertices with no outgoing edges, false otherwise - */ - public void acceptDisconnectedGraph(boolean accept) - { - this.accept_disconnected_graph = accept; - } - - /** - * Returns true if this instance accepts vertices with no outgoing edges, and false otherwise. - * @return true if this instance accepts vertices with no outgoing edges, otherwise false - */ - public boolean isDisconnectedGraphOK() - { - return this.accept_disconnected_graph; - } - - /** - * Specifies whether hyperedges are to be treated as self-loops. If they - * are, then potential will flow along a hyperedge a vertex to itself, - * just as it does to all other vertices incident to that hyperedge. - * @param arg if {@code true}, hyperedges are treated as self-loops - */ - public void setHyperedgesAreSelfLoops(boolean arg) - { - this.hyperedges_are_self_loops = arg; + protected boolean hyperedges_are_self_loops = false; + + /** + * Sets the output value for this vertex. + * + * @param v the vertex whose output value is to be set + * @param value the value to set + */ + protected void setOutputValue(V v, T value) { + output.put(v, value); + } + + /** + * Gets the output value for this vertex. + * + * @param v the vertex whose output value is to be retrieved + * @return the output value for this vertex + */ + protected T getOutputValue(V v) { + return output.get(v); + } + + /** + * Gets the current value for this vertex + * + * @param v the vertex whose current value is to be retrieved + * @return the current value for this vertex + */ + protected T getCurrentValue(V v) { + return current_values.get(v); + } + + /** + * Sets the current value for this vertex. + * + * @param v the vertex whose current value is to be set + * @param value the current value to set + */ + protected void setCurrentValue(V v, T value) { + current_values.put(v, value); + } + + /** The largest change seen so far among all vertex scores. */ + protected double max_delta; + + /** + * Creates an instance for the specified graph and edge weights. + * + * @param g the graph for which the instance is to be created + * @param edge_weights the edge weights for this instance + */ + public AbstractIterativeScorer( + Hypergraph g, Function edge_weights) { + this.graph = g; + this.max_iterations = 100; + this.tolerance = 0.001; + this.accept_disconnected_graph = true; + setEdgeWeights(edge_weights); + } + + /** + * Creates an instance for the specified graph g. NOTE: This constructor does not set + * the internal edge_weights variable. If this variable is used by the subclass which + * invoked this constructor, it must be initialized by that subclass. + * + * @param g the graph for which the instance is to be created + */ + public AbstractIterativeScorer(Hypergraph g) { + this.graph = g; + this.max_iterations = 100; + this.tolerance = 0.001; + this.accept_disconnected_graph = true; + } + + /** Initializes the internal state for this instance. */ + protected void initialize() { + this.total_iterations = 0; + this.max_delta = Double.MIN_VALUE; + this.output_reversed = true; + this.current_values = new HashMap(); + this.output = new HashMap(); + } + + /** Steps through this scoring algorithm until a termination condition is reached. */ + public void evaluate() { + do { + step(); + } while (!done()); + } + + /** + * Returns true if the total number of iterations is greater than or equal to max_iterations + * or if the maximum value change observed is less than tolerance. + */ + public boolean done() { + return total_iterations >= max_iterations || max_delta < tolerance; + } + + /** Performs one step of this algorithm; updates the state (value) for each vertex. */ + public void step() { + swapOutputForCurrent(); + + for (V v : graph.getVertices()) { + double diff = update(v); + updateMaxDelta(v, diff); } + total_iterations++; + afterStep(); + } + + /** */ + protected void swapOutputForCurrent() { + Map tmp = output; + output = current_values; + current_values = tmp; + output_reversed = !output_reversed; + } + + /** + * Updates the value for v. + * + * @param v the vertex whose value is to be updated + * @return the updated value + */ + protected abstract double update(V v); + + protected void updateMaxDelta(V v, double diff) { + max_delta = Math.max(max_delta, diff); + } + + protected void afterStep() {} - /** - * Returns the effective number of vertices incident to this edge. If - * the graph is a binary relation or if hyperedges are treated as self-loops, - * the value returned is {@code graph.getIncidentCount(e)}; otherwise it is - * {@code graph.getIncidentCount(e) - 1}. - * @param e the edge whose incident edge count is requested - * @return the edge count, adjusted based on how hyperedges are treated - */ - protected int getAdjustedIncidentCount(E e) - { - return graph.getIncidentCount(e) - (hyperedges_are_self_loops ? 0 : 1); + public T getVertexScore(V v) { + if (!graph.containsVertex(v)) { + throw new IllegalArgumentException("Vertex " + v + " not an element of this graph"); } + + return output.get(v); + } + + /** + * Returns the maximum number of iterations that this instance will use. + * + * @return the maximum number of iterations that evaluate will use prior to + * terminating + */ + public int getMaxIterations() { + return max_iterations; + } + + /** + * Returns the number of iterations that this instance has used so far. + * + * @return the number of iterations that this instance has used so far + */ + public int getIterations() { + return total_iterations; + } + + /** + * Sets the maximum number of times that evaluate will call step. + * + * @param max_iterations the maximum + */ + public void setMaxIterations(int max_iterations) { + this.max_iterations = max_iterations; + } + + /** + * Gets the size of the largest change (difference between the current and previous values) for + * any vertex that can be tolerated. Once all changes are less than this value, evaluate + * will terminate. + * + * @return the size of the largest change that evaluate() will permit + */ + public double getTolerance() { + return tolerance; + } + + /** + * Sets the size of the largest change (difference between the current and previous values) for + * any vertex that can be tolerated. + * + * @param tolerance the size of the largest change that evaluate() will permit + */ + public void setTolerance(double tolerance) { + this.tolerance = tolerance; + } + + /** + * Returns the Function that this instance uses to associate edge weights with each edge. + * + * @return the Function that associates an edge weight with each edge + */ + public Function, ? extends Number> getEdgeWeights() { + return edge_weights; + } + + /** + * Sets the Function that this instance uses to associate edge weights with each edge + * + * @param edge_weights the Function to use to associate an edge weight with each edge + * @see edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight + */ + public void setEdgeWeights(Function edge_weights) { + this.edge_weights = new DelegateToEdgeTransformer(edge_weights); + } + + /** + * Gets the edge weight for e in the context of its (incident) vertex v. + * + * @param v the vertex incident to e as a context in which the edge weight is to be calculated + * @param e the edge whose weight is to be returned + * @return the edge weight for e in the context of its (incident) vertex v + * + */ + protected Number getEdgeWeight(V v, E e) { + return edge_weights.apply(new VEPair(v, e)); + } + + /** + * Collects the 'potential' from v (its current value) if it has no outgoing edges; this can then + * be redistributed among the other vertices as a means of normalization. + * + * @param v the vertex whose potential is being collected + */ + protected void collectDisappearingPotential(V v) {} + + /** + * Specifies whether this instance should accept vertices with no outgoing edges. + * + * @param accept true if this instance should accept vertices with no outgoing edges, false + * otherwise + */ + public void acceptDisconnectedGraph(boolean accept) { + this.accept_disconnected_graph = accept; + } + + /** + * Returns true if this instance accepts vertices with no outgoing edges, and false otherwise. + * + * @return true if this instance accepts vertices with no outgoing edges, otherwise false + */ + public boolean isDisconnectedGraphOK() { + return this.accept_disconnected_graph; + } + + /** + * Specifies whether hyperedges are to be treated as self-loops. If they are, then potential will + * flow along a hyperedge a vertex to itself, just as it does to all other vertices incident to + * that hyperedge. + * + * @param arg if {@code true}, hyperedges are treated as self-loops + */ + public void setHyperedgesAreSelfLoops(boolean arg) { + this.hyperedges_are_self_loops = arg; + } + + /** + * Returns the effective number of vertices incident to this edge. If the graph is a binary + * relation or if hyperedges are treated as self-loops, the value returned is {@code + * graph.getIncidentCount(e)}; otherwise it is {@code graph.getIncidentCount(e) - 1}. + * + * @param e the edge whose incident edge count is requested + * @return the edge count, adjusted based on how hyperedges are treated + */ + protected int getAdjustedIncidentCount(E e) { + return graph.getIncidentCount(e) - (hyperedges_are_self_loops ? 0 : 1); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java index 35c5b23a..7464087f 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/AbstractIterativeScorerWithPriors.java @@ -1,7 +1,7 @@ /* * Created on Jul 14, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,106 +12,97 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Hypergraph; /** - * An abstract class for iterative random-walk-based vertex scoring algorithms - * that have a - * fixed probability, for each vertex, of 'jumping' to that vertex at each - * step in the algorithm (rather than following a link out of that vertex). + * An abstract class for iterative random-walk-based vertex scoring algorithms that have a fixed + * probability, for each vertex, of 'jumping' to that vertex at each step in the algorithm (rather + * than following a link out of that vertex). * * @param the vertex type * @param the edge type * @param the score type */ -public abstract class AbstractIterativeScorerWithPriors extends - AbstractIterativeScorer implements VertexScorer -{ - /** - * The prior probability of each vertex being visited on a given - * 'jump' (non-link-following) step. - */ - protected Function vertex_priors; +public abstract class AbstractIterativeScorerWithPriors + extends AbstractIterativeScorer implements VertexScorer { + /** + * The prior probability of each vertex being visited on a given 'jump' (non-link-following) step. + */ + protected Function vertex_priors; + + /** The probability of making a 'jump' at each step. */ + protected double alpha; - /** - * The probability of making a 'jump' at each step. - */ - protected double alpha; + /** + * Creates an instance for the specified graph, edge weights, vertex priors, and jump probability. + * + * @param g the graph whose vertices are to be assigned scores + * @param edge_weights the edge weights to use in the score assignment + * @param vertex_priors the prior probabilities of each vertex being 'jumped' to + * @param alpha the probability of making a 'jump' at each step + */ + public AbstractIterativeScorerWithPriors( + Hypergraph g, + Function edge_weights, + Function vertex_priors, + double alpha) { + super(g, edge_weights); + this.vertex_priors = vertex_priors; + this.alpha = alpha; + initialize(); + } - /** - * Creates an instance for the specified graph, edge weights, vertex - * priors, and jump probability. - * @param g the graph whose vertices are to be assigned scores - * @param edge_weights the edge weights to use in the score assignment - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Function edge_weights, - Function vertex_priors, double alpha) - { - super(g, edge_weights); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } + /** + * Creates an instance for the specified graph, vertex priors, and jump probability, with edge + * weights specified by the subclass. + * + * @param g the graph whose vertices are to be assigned scores + * @param vertex_priors the prior probabilities of each vertex being 'jumped' to + * @param alpha the probability of making a 'jump' at each step + */ + public AbstractIterativeScorerWithPriors( + Hypergraph g, Function vertex_priors, double alpha) { + super(g); + this.vertex_priors = vertex_priors; + this.alpha = alpha; + initialize(); + } - /** - * Creates an instance for the specified graph, vertex priors, and jump - * probability, with edge weights specified by the subclass. - * @param g the graph whose vertices are to be assigned scores - * @param vertex_priors the prior probabilities of each vertex being 'jumped' to - * @param alpha the probability of making a 'jump' at each step - */ - public AbstractIterativeScorerWithPriors(Hypergraph g, - Function vertex_priors, double alpha) - { - super(g); - this.vertex_priors = vertex_priors; - this.alpha = alpha; - initialize(); - } + /** Initializes the state of this instance. */ + @Override + public void initialize() { + super.initialize(); + // initialize output values to priors + // (output and current are swapped before each step(), so current will + // have priors when update()s start happening) + for (V v : graph.getVertices()) setOutputValue(v, getVertexPrior(v)); + } - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - // initialize output values to priors - // (output and current are swapped before each step(), so current will - // have priors when update()s start happening) - for (V v : graph.getVertices()) - setOutputValue(v, getVertexPrior(v)); - } - - /** - * Returns the prior probability for v. - * @param v the vertex whose prior probability is being queried - * @return the prior probability for v - */ - protected S getVertexPrior(V v) - { - return vertex_priors.apply(v); - } + /** + * Returns the prior probability for v. + * + * @param v the vertex whose prior probability is being queried + * @return the prior probability for v + */ + protected S getVertexPrior(V v) { + return vertex_priors.apply(v); + } - /** - * Returns a Function which maps each vertex to its prior probability. - * @return a Function which maps each vertex to its prior probability - */ - public Function getVertexPriors() - { - return vertex_priors; - } + /** + * Returns a Function which maps each vertex to its prior probability. + * + * @return a Function which maps each vertex to its prior probability + */ + public Function getVertexPriors() { + return vertex_priors; + } - /** - * Returns the probability of making a 'jump' (non-link-following step). - * @return the probability of making a 'jump' (non-link-following step) - */ - public double getAlpha() - { - return alpha; - } + /** + * Returns the probability of making a 'jump' (non-link-following step). + * + * @return the probability of making a 'jump' (non-link-following step) + */ + public double getAlpha() { + return alpha; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java index 28a1f1db..b4422d9e 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BarycenterScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 12, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,44 +12,39 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.shortestpath.Distance; import edu.uci.ics.jung.graph.Hypergraph; -/** - * Assigns scores to each vertex according to the sum of its distances to all other vertices. - */ -public class BarycenterScorer extends DistanceCentralityScorer -{ - /** - * Creates an instance with the specified graph and distance metric. - * @param graph the input graph - * @param distance the distance metric to use - */ - public BarycenterScorer(Hypergraph graph, Distance distance) - { - super(graph, distance, false); - } - - /** - * Creates an instance with the specified graph and edge weights. - * Will generate a Distance metric internally based on the edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to use to calculate vertex/vertex distances - */ - public BarycenterScorer(Hypergraph graph, Function edge_weights) - { - super(graph, edge_weights, false); - } +/** Assigns scores to each vertex according to the sum of its distances to all other vertices. */ +public class BarycenterScorer extends DistanceCentralityScorer { + /** + * Creates an instance with the specified graph and distance metric. + * + * @param graph the input graph + * @param distance the distance metric to use + */ + public BarycenterScorer(Hypergraph graph, Distance distance) { + super(graph, distance, false); + } + + /** + * Creates an instance with the specified graph and edge weights. Will generate a Distance + * metric internally based on the edge weights. + * + * @param graph the input graph + * @param edge_weights the edge weights to use to calculate vertex/vertex distances + */ + public BarycenterScorer(Hypergraph graph, Function edge_weights) { + super(graph, edge_weights, false); + } - /** - * Creates an instance with the specified graph. - * Will generate a Distance metric internally assuming that the - * graph is unweighted. - * @param graph the input graph - */ - public BarycenterScorer(Hypergraph graph) - { - super(graph, false); - } + /** + * Creates an instance with the specified graph. Will generate a Distance metric + * internally assuming that the graph is unweighted. + * + * @param graph the input graph + */ + public BarycenterScorer(Hypergraph graph) { + super(graph, false); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java index b6162f5d..a34e013f 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/BetweennessCentrality.java @@ -1,16 +1,18 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Sep 16, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Sep 16, 2008 */ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Function; +import com.google.common.base.Functions; +import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.UndirectedGraph; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; @@ -20,331 +22,305 @@ import java.util.Queue; import java.util.Stack; -import com.google.common.base.Function; -import com.google.common.base.Functions; - -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; - /** * Computes betweenness centrality for each vertex and edge in the graph. - * - * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001." + * + * @see "Ulrik Brandes: A Faster Algorithm for Betweenness Centrality. Journal of Mathematical + * Sociology 25(2):163-177, 2001." */ -public class BetweennessCentrality - implements VertexScorer, EdgeScorer -{ - protected Graph graph; - protected Map vertex_scores; - protected Map edge_scores; - protected Map vertex_data; - - /** - * Calculates betweenness scores based on the all-pairs unweighted shortest paths - * in the graph. - * @param graph the graph for which the scores are to be calculated - */ - public BetweennessCentrality(Graph graph) - { - initialize(graph); - computeBetweenness(new LinkedList(), Functions.constant(1)); - } +public class BetweennessCentrality implements VertexScorer, EdgeScorer { + protected Graph graph; + protected Map vertex_scores; + protected Map edge_scores; + protected Map vertex_data; - /** - * Calculates betweenness scores based on the all-pairs weighted shortest paths in the - * graph. - * - *

NOTE: This version of the algorithm may not work correctly on all graphs; we're still - * working out the bugs. Use at your own risk. - * @param graph the graph for which the scores are to be calculated - * @param edge_weights the edge weights to be used in the path length calculations - */ - public BetweennessCentrality(Graph graph, - Function edge_weights) - { - // reject negative-weight edges up front - for (E e : graph.getEdges()) - { - double e_weight = edge_weights.apply(e).doubleValue(); - if (e_weight < 0) - throw new IllegalArgumentException(String.format( - "Weight for edge '%s' is < 0: %d", e, e_weight)); - } - - initialize(graph); - computeBetweenness(new MapBinaryHeap(new BetweennessComparator()), - edge_weights); - } + /** + * Calculates betweenness scores based on the all-pairs unweighted shortest paths in the graph. + * + * @param graph the graph for which the scores are to be calculated + */ + public BetweennessCentrality(Graph graph) { + initialize(graph); + computeBetweenness(new LinkedList(), Functions.constant(1)); + } - protected void initialize(Graph graph) - { - this.graph = graph; - this.vertex_scores = new HashMap(); - this.edge_scores = new HashMap(); - this.vertex_data = new HashMap(); - - for (V v : graph.getVertices()) - this.vertex_scores.put(v, 0.0); - - for (E e : graph.getEdges()) - this.edge_scores.put(e, 0.0); - } - - protected void computeBetweenness(Queue queue, - Function edge_weights) - { - for (V v : graph.getVertices()) - { - // initialize the betweenness data for this new vertex - for (V s : graph.getVertices()) - this.vertex_data.put(s, new BetweennessData()); + /** + * Calculates betweenness scores based on the all-pairs weighted shortest paths in the graph. + * + *

NOTE: This version of the algorithm may not work correctly on all graphs; we're still + * working out the bugs. Use at your own risk. + * + * @param graph the graph for which the scores are to be calculated + * @param edge_weights the edge weights to be used in the path length calculations + */ + public BetweennessCentrality( + Graph graph, Function edge_weights) { + // reject negative-weight edges up front + for (E e : graph.getEdges()) { + double e_weight = edge_weights.apply(e).doubleValue(); + if (e_weight < 0) + throw new IllegalArgumentException( + String.format("Weight for edge '%s' is < 0: %d", e, e_weight)); + } -// if (v.equals(new Integer(0))) -// System.out.println("pause"); - - vertex_data.get(v).numSPs = 1; - vertex_data.get(v).distance = 0; + initialize(graph); + computeBetweenness(new MapBinaryHeap(new BetweennessComparator()), edge_weights); + } - Stack stack = new Stack(); -// Buffer queue = new UnboundedFifoBuffer(); -// queue.add(v); - queue.offer(v); + protected void initialize(Graph graph) { + this.graph = graph; + this.vertex_scores = new HashMap(); + this.edge_scores = new HashMap(); + this.vertex_data = new HashMap(); - while (!queue.isEmpty()) - { -// V w = queue.remove(); - V w = queue.poll(); - stack.push(w); - BetweennessData w_data = vertex_data.get(w); - - for (E e : graph.getOutEdges(w)) - { - // TODO (jrtom): change this to getOtherVertices(w, e) - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double wx_weight = edge_weights.apply(e).doubleValue(); - - -// for(V x : graph.getSuccessors(w)) -// { -// if (x.equals(w)) -// continue; - - // FIXME: the other problem is that I need to - // keep putting the neighbors of things we've just - // discovered in the queue, if they're undiscovered or - // at greater distance. - - // FIXME: this is the problem, right here, I think: - // need to update position in queue if distance changes - // (which can only happen with weighted edges). - // for each outgoing edge e from w, get other end x - // if x not already visited (dist x < 0) - // set x's distance to w's dist + edge weight - // add x to queue; pri in queue is x's dist - // if w's dist + edge weight < x's dist - // update x's dist - // update x in queue (MapBinaryHeap) - // clear x's incoming edge list - // if w's dist + edge weight = x's dist - // add e to x's incoming edge list - - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + wx_weight; - - if (x_data.distance < 0) - { -// queue.add(x); -// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; - x_data.distance = x_potential_dist; - queue.offer(x); - } - - // note: - // (1) this can only happen with weighted edges - // (2) x's SP count and incoming edges are updated below - if (x_data.distance > x_potential_dist) - { - x_data.distance = x_potential_dist; - // invalidate previously identified incoming edges - // (we have a new shortest path distance to x) - x_data.incomingEdges.clear(); - // update x's position in queue - ((MapBinaryHeap)queue).update(x); - } -// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) - // -// if (x_data.distance == x_potential_dist) -// { -// x_data.numSPs += w_data.numSPs; -//// vertex_data.get(x).predecessors.add(w); -// x_data.incomingEdges.add(e); -// } - } - for (E e: graph.getOutEdges(w)) - { - V x = graph.getOpposite(w, e); - if (x.equals(w)) - continue; - double e_weight = edge_weights.apply(e).doubleValue(); - BetweennessData x_data = vertex_data.get(x); - double x_potential_dist = w_data.distance + e_weight; - if (x_data.distance == x_potential_dist) - { - x_data.numSPs += w_data.numSPs; -// vertex_data.get(x).predecessors.add(w); - x_data.incomingEdges.add(e); - } - } - } - while (!stack.isEmpty()) - { - V x = stack.pop(); + for (V v : graph.getVertices()) this.vertex_scores.put(v, 0.0); -// for (V w : vertex_data.get(x).predecessors) - for (E e : vertex_data.get(x).incomingEdges) - { - V w = graph.getOpposite(x, e); - double partialDependency = - vertex_data.get(w).numSPs / vertex_data.get(x).numSPs * - (1.0 + vertex_data.get(x).dependency); - vertex_data.get(w).dependency += partialDependency; -// E w_x = graph.findEdge(w, x); -// double w_x_score = edge_scores.get(w_x).doubleValue(); -// w_x_score += partialDependency; -// edge_scores.put(w_x, w_x_score); - double e_score = edge_scores.get(e).doubleValue(); - edge_scores.put(e, e_score + partialDependency); - } - if (!x.equals(v)) - { - double x_score = vertex_scores.get(x).doubleValue(); - x_score += vertex_data.get(x).dependency; - vertex_scores.put(x, x_score); - } - } - } + for (E e : graph.getEdges()) this.edge_scores.put(e, 0.0); + } - if(graph instanceof UndirectedGraph) - { - for (V v : graph.getVertices()) { - double v_score = vertex_scores.get(v).doubleValue(); - v_score /= 2.0; - vertex_scores.put(v, v_score); - } - for (E e : graph.getEdges()) { - double e_score = edge_scores.get(e).doubleValue(); - e_score /= 2.0; - edge_scores.put(e, e_score); - } - } + protected void computeBetweenness( + Queue queue, Function edge_weights) { + for (V v : graph.getVertices()) { + // initialize the betweenness data for this new vertex + for (V s : graph.getVertices()) this.vertex_data.put(s, new BetweennessData()); + + // if (v.equals(new Integer(0))) + // System.out.println("pause"); + + vertex_data.get(v).numSPs = 1; + vertex_data.get(v).distance = 0; + + Stack stack = new Stack(); + // Buffer queue = new UnboundedFifoBuffer(); + // queue.add(v); + queue.offer(v); - vertex_data.clear(); - } + while (!queue.isEmpty()) { + // V w = queue.remove(); + V w = queue.poll(); + stack.push(w); + BetweennessData w_data = vertex_data.get(w); -// protected void computeWeightedBetweenness(Function edge_weights) -// { -// for (V v : graph.getVertices()) -// { -// // initialize the betweenness data for this new vertex -// for (V s : graph.getVertices()) -// this.vertex_data.put(s, new BetweennessData()); -// vertex_data.get(v).numSPs = 1; -// vertex_data.get(v).distance = 0; -// -// Stack stack = new Stack(); -//// Buffer queue = new UnboundedFifoBuffer(); -// SortedSet pqueue = new TreeSet(new BetweennessComparator()); -//// queue.add(v); -// pqueue.add(v); -// -//// while (!queue.isEmpty()) -// while (!pqueue.isEmpty()) -// { -//// V w = queue.remove(); -// V w = pqueue.first(); -// pqueue.remove(w); -// stack.push(w); -// -//// for(V x : graph.getSuccessors(w)) -// for (E e : graph.getOutEdges(w)) -// { -// // TODO (jrtom): change this to getOtherVertices(w, e) -// V x = graph.getOpposite(w, e); -// if (x.equals(w)) -// continue; -// double e_weight = edge_weights.transform(e).doubleValue(); -// -// if (vertex_data.get(x).distance < 0) -// { -//// queue.add(x); -// pqueue.add(v); -//// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; -// vertex_data.get(x).distance = -// vertex_data.get(w).distance + e_weight; -// } -// -//// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) -// if (vertex_data.get(x).distance == -// vertex_data.get(w).distance + e_weight) -// { -// vertex_data.get(x).numSPs += vertex_data.get(w).numSPs; -// vertex_data.get(x).predecessors.add(w); -// } -// } -// } -// updateScores(v, stack); -// } -// -// if(graph instanceof UndirectedGraph) -// adjustUndirectedScores(); -// -// vertex_data.clear(); -// } - - public Double getVertexScore(V v) - { - return vertex_scores.get(v); - } + for (E e : graph.getOutEdges(w)) { + // TODO (jrtom): change this to getOtherVertices(w, e) + V x = graph.getOpposite(w, e); + if (x.equals(w)) { + continue; + } + double wx_weight = edge_weights.apply(e).doubleValue(); - public Double getEdgeScore(E e) - { - return edge_scores.get(e); - } + // for(V x : graph.getSuccessors(w)) + // { + // if (x.equals(w)) + // continue; - private class BetweennessData - { - double distance; - double numSPs; -// List predecessors; - List incomingEdges; - double dependency; + // FIXME: the other problem is that I need to + // keep putting the neighbors of things we've just + // discovered in the queue, if they're undiscovered or + // at greater distance. - BetweennessData() - { - distance = -1; - numSPs = 0; -// predecessors = new ArrayList(); - incomingEdges = new ArrayList(); - dependency = 0; + // FIXME: this is the problem, right here, I think: + // need to update position in queue if distance changes + // (which can only happen with weighted edges). + // for each outgoing edge e from w, get other end x + // if x not already visited (dist x < 0) + // set x's distance to w's dist + edge weight + // add x to queue; pri in queue is x's dist + // if w's dist + edge weight < x's dist + // update x's dist + // update x in queue (MapBinaryHeap) + // clear x's incoming edge list + // if w's dist + edge weight = x's dist + // add e to x's incoming edge list + + BetweennessData x_data = vertex_data.get(x); + double x_potential_dist = w_data.distance + wx_weight; + + if (x_data.distance < 0) { + // queue.add(x); + // vertex_data.get(x).distance = vertex_data.get(w).distance + 1; + x_data.distance = x_potential_dist; + queue.offer(x); + } + + // note: + // (1) this can only happen with weighted edges + // (2) x's SP count and incoming edges are updated below + if (x_data.distance > x_potential_dist) { + x_data.distance = x_potential_dist; + // invalidate previously identified incoming edges + // (we have a new shortest path distance to x) + x_data.incomingEdges.clear(); + // update x's position in queue + ((MapBinaryHeap) queue).update(x); + } + // if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) + // + // if (x_data.distance == x_potential_dist) + // { + // x_data.numSPs += w_data.numSPs; + //// vertex_data.get(x).predecessors.add(w); + // x_data.incomingEdges.add(e); + // } } - - @Override - public String toString() - { - return "[d:" + distance + ", sp:" + numSPs + - ", p:" + incomingEdges + ", d:" + dependency + "]\n"; -// ", p:" + predecessors + ", d:" + dependency + "]\n"; + for (E e : graph.getOutEdges(w)) { + V x = graph.getOpposite(w, e); + if (x.equals(w)) { + continue; + } + double e_weight = edge_weights.apply(e).doubleValue(); + BetweennessData x_data = vertex_data.get(x); + double x_potential_dist = w_data.distance + e_weight; + if (x_data.distance == x_potential_dist) { + x_data.numSPs += w_data.numSPs; + // vertex_data.get(x).predecessors.add(w); + x_data.incomingEdges.add(e); + } + } + } + while (!stack.isEmpty()) { + V x = stack.pop(); + + // for (V w : vertex_data.get(x).predecessors) + for (E e : vertex_data.get(x).incomingEdges) { + V w = graph.getOpposite(x, e); + double partialDependency = + vertex_data.get(w).numSPs + / vertex_data.get(x).numSPs + * (1.0 + vertex_data.get(x).dependency); + vertex_data.get(w).dependency += partialDependency; + // E w_x = graph.findEdge(w, x); + // double w_x_score = edge_scores.get(w_x).doubleValue(); + // w_x_score += partialDependency; + // edge_scores.put(w_x, w_x_score); + double e_score = edge_scores.get(e).doubleValue(); + edge_scores.put(e, e_score + partialDependency); } + if (!x.equals(v)) { + double x_score = vertex_scores.get(x).doubleValue(); + x_score += vertex_data.get(x).dependency; + vertex_scores.put(x, x_score); + } + } + } + + if (graph instanceof UndirectedGraph) { + for (V v : graph.getVertices()) { + double v_score = vertex_scores.get(v).doubleValue(); + v_score /= 2.0; + vertex_scores.put(v, v_score); + } + for (E e : graph.getEdges()) { + double e_score = edge_scores.get(e).doubleValue(); + e_score /= 2.0; + edge_scores.put(e, e_score); + } + } + + vertex_data.clear(); + } + + // protected void computeWeightedBetweenness(Function edge_weights) + // { + // for (V v : graph.getVertices()) + // { + // // initialize the betweenness data for this new vertex + // for (V s : graph.getVertices()) + // this.vertex_data.put(s, new BetweennessData()); + // vertex_data.get(v).numSPs = 1; + // vertex_data.get(v).distance = 0; + // + // Stack stack = new Stack(); + //// Buffer queue = new UnboundedFifoBuffer(); + // SortedSet pqueue = new TreeSet(new BetweennessComparator()); + //// queue.add(v); + // pqueue.add(v); + // + //// while (!queue.isEmpty()) + // while (!pqueue.isEmpty()) + // { + //// V w = queue.remove(); + // V w = pqueue.first(); + // pqueue.remove(w); + // stack.push(w); + // + //// for(V x : graph.getSuccessors(w)) + // for (E e : graph.getOutEdges(w)) + // { + // // TODO (jrtom): change this to getOtherVertices(w, e) + // V x = graph.getOpposite(w, e); + // if (x.equals(w)) + // continue; + // double e_weight = edge_weights.transform(e).doubleValue(); + // + // if (vertex_data.get(x).distance < 0) + // { + //// queue.add(x); + // pqueue.add(v); + //// vertex_data.get(x).distance = vertex_data.get(w).distance + 1; + // vertex_data.get(x).distance = + // vertex_data.get(w).distance + e_weight; + // } + // + //// if (vertex_data.get(x).distance == vertex_data.get(w).distance + 1) + // if (vertex_data.get(x).distance == + // vertex_data.get(w).distance + e_weight) + // { + // vertex_data.get(x).numSPs += vertex_data.get(w).numSPs; + // vertex_data.get(x).predecessors.add(w); + // } + // } + // } + // updateScores(v, stack); + // } + // + // if(graph instanceof UndirectedGraph) + // adjustUndirectedScores(); + // + // vertex_data.clear(); + // } + + public Double getVertexScore(V v) { + return vertex_scores.get(v); + } + + public Double getEdgeScore(E e) { + return edge_scores.get(e); + } + + private class BetweennessData { + double distance; + double numSPs; + // List predecessors; + List incomingEdges; + double dependency; + + BetweennessData() { + distance = -1; + numSPs = 0; + // predecessors = new ArrayList(); + incomingEdges = new ArrayList(); + dependency = 0; + } + + @Override + public String toString() { + return "[d:" + + distance + + ", sp:" + + numSPs + + ", p:" + + incomingEdges + + ", d:" + + dependency + + "]\n"; + // ", p:" + predecessors + ", d:" + dependency + "]\n"; } - - private class BetweennessComparator implements Comparator - { - public int compare(V v1, V v2) - { - return vertex_data.get(v1).distance > vertex_data.get(v2).distance ? 1 : -1; - } + } + + private class BetweennessComparator implements Comparator { + public int compare(V v1, V v2) { + return vertex_data.get(v1).distance > vertex_data.get(v2).distance ? 1 : -1; } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java index bfcfe33e..604ba961 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/ClosenessCentrality.java @@ -1,7 +1,7 @@ /* * Created on Jul 12, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,44 +12,42 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.shortestpath.Distance; import edu.uci.ics.jung.graph.Hypergraph; /** * Assigns scores to each vertex based on the mean distance to each other vertex. - * + * * @author Joshua O'Madadhain */ -public class ClosenessCentrality extends DistanceCentralityScorer -{ +public class ClosenessCentrality extends DistanceCentralityScorer { - /** - * Creates an instance using the specified vertex/vertex distance metric. - * @param graph the input - * @param distance the vertex/vertex distance metric. - */ - public ClosenessCentrality(Hypergraph graph, Distance distance) - { - super(graph, distance, true); - } + /** + * Creates an instance using the specified vertex/vertex distance metric. + * + * @param graph the input + * @param distance the vertex/vertex distance metric. + */ + public ClosenessCentrality(Hypergraph graph, Distance distance) { + super(graph, distance, true); + } - /** - * Creates an instance which measures distance using the specified edge weights. - * @param graph the input graph - * @param edge_weights the edge weights to be used to determine vertex/vertex distances - */ - public ClosenessCentrality(Hypergraph graph, Function edge_weights) - { - super(graph, edge_weights, true); - } + /** + * Creates an instance which measures distance using the specified edge weights. + * + * @param graph the input graph + * @param edge_weights the edge weights to be used to determine vertex/vertex distances + */ + public ClosenessCentrality(Hypergraph graph, Function edge_weights) { + super(graph, edge_weights, true); + } - /** - * Creates an instance which measures distance on the graph without edge weights. - * @param graph the graph whose vertices' centrality scores will be calculated - */ - public ClosenessCentrality(Hypergraph graph) - { - super(graph, true); - } + /** + * Creates an instance which measures distance on the graph without edge weights. + * + * @param graph the graph whose vertices' centrality scores will be calculated + */ + public ClosenessCentrality(Hypergraph graph) { + super(graph, true); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java index 838a5c57..e1af59e7 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DegreeScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 6, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -18,28 +18,25 @@ * * @param the vertex type */ -public class DegreeScorer implements VertexScorer -{ - /** - * The graph for which scores are to be generated. - */ - protected Hypergraph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the input graph - */ - public DegreeScorer(Hypergraph graph) - { - this.graph = graph; - } - - /** - * Returns the degree of the vertex. - * @return the degree of the vertex - */ - public Integer getVertexScore(V v) - { - return graph.degree(v); - } +public class DegreeScorer implements VertexScorer { + /** The graph for which scores are to be generated. */ + protected Hypergraph graph; + + /** + * Creates an instance for the specified graph. + * + * @param graph the input graph + */ + public DegreeScorer(Hypergraph graph) { + this.graph = graph; + } + + /** + * Returns the degree of the vertex. + * + * @return the degree of the vertex + */ + public Integer getVertexScore(V v) { + return graph.degree(v); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java index 9ac488b2..d7b9cea7 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/DistanceCentralityScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 10, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,239 +11,224 @@ */ package edu.uci.ics.jung.algorithms.scoring; -import java.util.HashMap; -import java.util.Map; - import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance; import edu.uci.ics.jung.algorithms.shortestpath.Distance; import edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath; import edu.uci.ics.jung.graph.Hypergraph; +import java.util.HashMap; +import java.util.Map; /** - * Assigns scores to vertices based on their distances to each other vertex - * in the graph. - * - * This class optionally normalizes its results based on the value of its - * 'averaging' constructor parameter. If it is true, - * then the value returned for vertex v is 1 / (_average_ distance from v to all other vertices); - * this is sometimes called closeness centrality. - * If it is false, then the value returned is 1 / (_total_ distance from - * v to all other vertices); this is sometimes referred to as barycenter centrality. - * (If the average/total distance is 0, the value returned is {@code Double.POSITIVE_INFINITY}.) - * + * Assigns scores to vertices based on their distances to each other vertex in the graph. + * + *

This class optionally normalizes its results based on the value of its 'averaging' constructor + * parameter. If it is true, then the value returned for vertex v is 1 / (_average_ + * distance from v to all other vertices); this is sometimes called closeness centrality. If + * it is false, then the value returned is 1 / (_total_ distance from v to all other + * vertices); this is sometimes referred to as barycenter centrality. (If the average/total + * distance is 0, the value returned is {@code Double.POSITIVE_INFINITY}.) + * * @see BarycenterScorer * @see ClosenessCentrality */ -public class DistanceCentralityScorer implements VertexScorer -{ - /** - * The graph on which the vertex scores are to be calculated. - */ - protected Hypergraph graph; - - /** - * The metric to use for specifying the distance between pairs of vertices. - */ - protected Distance distance; - - /** - * The cache for the output results. Null encodes "not yet calculated", - * < 0 encodes "no such distance exists". - */ - protected Map output; - - /** - * Specifies whether the values returned are the sum of the v-distances - * or the mean v-distance. - */ - protected boolean averaging; - - /** - * Specifies whether, for a vertex v with missing (null) distances, - * v's score should ignore the missing values or be set to 'null'. - * Defaults to 'true'. - */ - protected boolean ignore_missing; - - /** - * Specifies whether the values returned should ignore self-distances - * (distances from v to itself). - * Defaults to 'true'. - */ - protected boolean ignore_self_distances; - - /** - * Creates an instance with the specified graph, distance metric, and - * averaging behavior. - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging, boolean ignore_missing, - boolean ignore_self_distances) - { - this.graph = graph; - this.distance = distance; - this.averaging = averaging; - this.ignore_missing = ignore_missing; - this.ignore_self_distances = ignore_self_distances; - this.output = new HashMap(); - } +public class DistanceCentralityScorer implements VertexScorer { + /** The graph on which the vertex scores are to be calculated. */ + protected Hypergraph graph; + + /** The metric to use for specifying the distance between pairs of vertices. */ + protected Distance distance; + + /** + * The cache for the output results. Null encodes "not yet calculated", < 0 encodes "no such + * distance exists". + */ + protected Map output; + + /** + * Specifies whether the values returned are the sum of the v-distances or the mean v-distance. + */ + protected boolean averaging; - /** - * Equivalent to this(graph, distance, averaging, true, true). - * - * @param graph The graph on which the vertex scores are to be calculated. - * @param distance The metric to use for specifying the distance between - * pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of all - * v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, Distance distance, - boolean averaging) - { - this(graph, distance, averaging, true, true); + /** + * Specifies whether, for a vertex v with missing (null) distances, v's + * score should ignore the missing values or be set to 'null'. Defaults to 'true'. + */ + protected boolean ignore_missing; + + /** + * Specifies whether the values returned should ignore self-distances (distances from v + * to itself). Defaults to 'true'. + */ + protected boolean ignore_self_distances; + + /** + * Creates an instance with the specified graph, distance metric, and averaging behavior. + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param distance The metric to use for specifying the distance between pairs of vertices. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + * @param ignore_missing Specifies whether scores for missing distances are to ignore missing + * distances or be set to null. + * @param ignore_self_distances Specifies whether distances from a vertex to itself should be + * included in its score. + */ + public DistanceCentralityScorer( + Hypergraph graph, + Distance distance, + boolean averaging, + boolean ignore_missing, + boolean ignore_self_distances) { + this.graph = graph; + this.distance = distance; + this.averaging = averaging; + this.ignore_missing = ignore_missing; + this.ignore_self_distances = ignore_self_distances; + this.output = new HashMap(); + } + + /** + * Equivalent to this(graph, distance, averaging, true, true). + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param distance The metric to use for specifying the distance between pairs of vertices. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + */ + public DistanceCentralityScorer(Hypergraph graph, Distance distance, boolean averaging) { + this(graph, distance, averaging, true, true); + } + + /** + * Creates an instance with the specified graph and averaging behavior whose vertex distances are + * calculated based on the specified edge weights. + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param edge_weights The edge weights to use for specifying the distance between pairs of + * vertices. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + * @param ignore_missing Specifies whether scores for missing distances are to ignore missing + * distances or be set to null. + * @param ignore_self_distances Specifies whether distances from a vertex to itself should be + * included in its score. + */ + public DistanceCentralityScorer( + Hypergraph graph, + Function edge_weights, + boolean averaging, + boolean ignore_missing, + boolean ignore_self_distances) { + this( + graph, + new DijkstraDistance(graph, edge_weights), + averaging, + ignore_missing, + ignore_self_distances); + } + + /** + * Equivalent to this(graph, edge_weights, averaging, true, true). + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param edge_weights The edge weights to use for specifying the distance between pairs of + * vertices. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + */ + public DistanceCentralityScorer( + Hypergraph graph, Function edge_weights, boolean averaging) { + this(graph, new DijkstraDistance(graph, edge_weights), averaging, true, true); + } + + /** + * Creates an instance with the specified graph and averaging behavior whose vertex distances are + * calculated on the unweighted graph. + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + * @param ignore_missing Specifies whether scores for missing distances are to ignore missing + * distances or be set to null. + * @param ignore_self_distances Specifies whether distances from a vertex to itself should be + * included in its score. + */ + public DistanceCentralityScorer( + Hypergraph graph, + boolean averaging, + boolean ignore_missing, + boolean ignore_self_distances) { + this( + graph, + new UnweightedShortestPath(graph), + averaging, + ignore_missing, + ignore_self_distances); + } + + /** + * Equivalent to this(graph, averaging, true, true). + * + * @param graph The graph on which the vertex scores are to be calculated. + * @param averaging Specifies whether the values returned is the sum of all v-distances or the + * mean v-distance. + */ + public DistanceCentralityScorer(Hypergraph graph, boolean averaging) { + this(graph, new UnweightedShortestPath(graph), averaging, true, true); + } + + /** + * Calculates the score for the specified vertex. Returns {@code null} if there are missing + * distances and such are not ignored by this instance. + */ + public Double getVertexScore(V v) { + Double value = output.get(v); + if (value != null) { + if (value < 0) { + return null; + } + return value; } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated based on the specified edge - * weights. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, - Function edge_weights, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - ignore_missing, ignore_self_distances); + + Map v_distances = new HashMap(distance.getDistanceMap(v)); + if (ignore_self_distances) v_distances.remove(v); + + // if we don't ignore missing distances and there aren't enough + // distances, output null (shortcut) + if (!ignore_missing) { + int num_dests = graph.getVertexCount() - (ignore_self_distances ? 1 : 0); + if (v_distances.size() != num_dests) { + output.put(v, -1.0); + return null; + } } - - /** - * Equivalent to this(graph, edge_weights, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param edge_weights The edge weights to use for specifying the distance - * between pairs of vertices. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, - Function edge_weights, boolean averaging) - { - this(graph, new DijkstraDistance(graph, edge_weights), averaging, - true, true); + + Double sum = 0.0; + for (V w : graph.getVertices()) { + if (w.equals(v) && ignore_self_distances) { + continue; + } + Number w_distance = v_distances.get(w); + if (w_distance == null) + if (ignore_missing) { + continue; + } else { + output.put(v, -1.0); + return null; + } + else { + sum += w_distance.doubleValue(); + } } - - /** - * Creates an instance with the specified graph and averaging behavior - * whose vertex distances are calculated on the unweighted graph. - * - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - * @param ignore_missing Specifies whether scores for missing distances - * are to ignore missing distances or be set to null. - * @param ignore_self_distances Specifies whether distances from a vertex - * to itself should be included in its score. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging, - boolean ignore_missing, boolean ignore_self_distances) - { - this(graph, new UnweightedShortestPath(graph), averaging, - ignore_missing, ignore_self_distances); + value = sum; + if (averaging) { + value /= v_distances.size(); } - /** - * Equivalent to this(graph, averaging, true, true). - * @param graph The graph on which the vertex scores are to be - * calculated. - * @param averaging Specifies whether the values returned is the sum of - * all v-distances or the mean v-distance. - */ - public DistanceCentralityScorer(Hypergraph graph, boolean averaging) - { - this(graph, new UnweightedShortestPath(graph), averaging, true, true); - } + double score = value == 0 ? Double.POSITIVE_INFINITY : 1.0 / value; + output.put(v, score); - /** - * Calculates the score for the specified vertex. Returns {@code null} if - * there are missing distances and such are not ignored by this instance. - */ - public Double getVertexScore(V v) - { - Double value = output.get(v); - if (value != null) - { - if (value < 0) - return null; - return value; - } - - Map v_distances = new HashMap(distance.getDistanceMap(v)); - if (ignore_self_distances) - v_distances.remove(v); - - // if we don't ignore missing distances and there aren't enough - // distances, output null (shortcut) - if (!ignore_missing) - { - int num_dests = graph.getVertexCount() - - (ignore_self_distances ? 1 : 0); - if (v_distances.size() != num_dests) - { - output.put(v, -1.0); - return null; - } - } - - Double sum = 0.0; - for (V w : graph.getVertices()) - { - if (w.equals(v) && ignore_self_distances) - continue; - Number w_distance = v_distances.get(w); - if (w_distance == null) - if (ignore_missing) - continue; - else - { - output.put(v, -1.0); - return null; - } - else - sum += w_distance.doubleValue(); - } - value = sum; - if (averaging) - value /= v_distances.size(); - - double score = value == 0 ? - Double.POSITIVE_INFINITY : - 1.0 / value; - output.put(v, score); - - return score; - } + return score; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java index 1a4fcf39..3b7bff04 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EdgeScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 6, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,18 +11,16 @@ */ package edu.uci.ics.jung.algorithms.scoring; - /** * An interface for algorithms that assign scores to edges. * * @param the edge type * @param the score type */ -public interface EdgeScorer -{ - /** - * @param e the edge whose score is requested - * @return the algorithm's score for this edge - */ - public S getEdgeScore(E e); +public interface EdgeScorer { + /** + * @param e the edge whose score is requested + * @return the algorithm's score for this edge + */ + public S getEdgeScore(E e); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java index ff303af9..345752dd 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/EigenvectorCentrality.java @@ -1,7 +1,7 @@ /* * Created on Jul 12, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,41 +12,35 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Hypergraph; /** - * Calculates eigenvector centrality for each vertex in the graph. - * The 'eigenvector centrality' for a vertex is defined as the fraction of - * time that a random walk(er) will spend at that vertex over an infinite - * time horizon. - * Assumes that the graph is strongly connected. + * Calculates eigenvector centrality for each vertex in the graph. The 'eigenvector centrality' for + * a vertex is defined as the fraction of time that a random walk(er) will spend at that vertex over + * an infinite time horizon. Assumes that the graph is strongly connected. */ -public class EigenvectorCentrality extends PageRank -{ - /** - * Creates an instance with the specified graph and edge weights. - * The outgoing edge weights for each edge must sum to 1. - * (See UniformDegreeWeight for one way to handle this for - * undirected graphs.) - * @param graph the graph for which the centrality is to be calculated - * @param edge_weights the edge weights - */ - public EigenvectorCentrality(Hypergraph graph, - Function edge_weights) - { - super(graph, edge_weights, 0); - acceptDisconnectedGraph(false); - } +public class EigenvectorCentrality extends PageRank { + /** + * Creates an instance with the specified graph and edge weights. The outgoing edge weights for + * each edge must sum to 1. (See UniformDegreeWeight for one way to handle this for + * undirected graphs.) + * + * @param graph the graph for which the centrality is to be calculated + * @param edge_weights the edge weights + */ + public EigenvectorCentrality(Hypergraph graph, Function edge_weights) { + super(graph, edge_weights, 0); + acceptDisconnectedGraph(false); + } - /** - * Creates an instance with the specified graph and default edge weights. - * (Default edge weights: UniformDegreeWeight.) - * @param graph the graph for which the centrality is to be calculated. - */ - public EigenvectorCentrality(Hypergraph graph) - { - super(graph, 0); - acceptDisconnectedGraph(false); - } + /** + * Creates an instance with the specified graph and default edge weights. (Default edge weights: + * UniformDegreeWeight.) + * + * @param graph the graph for which the centrality is to be calculated. + */ + public EigenvectorCentrality(Hypergraph graph) { + super(graph, 0); + acceptDisconnectedGraph(false); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java index 957f3b4a..651daf05 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITS.java @@ -1,7 +1,7 @@ /* * Created on Jul 15, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,17 +12,16 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; import edu.uci.ics.jung.graph.Graph; /** - * Assigns hub and authority scores to each vertex depending on the topology of - * the network. The essential idea is that a vertex is a hub to the extent - * that it links to authoritative vertices, and is an authority to the extent - * that it links to 'hub' vertices. - * + * Assigns hub and authority scores to each vertex depending on the topology of the network. The + * essential idea is that a vertex is a hub to the extent that it links to authoritative vertices, + * and is an authority to the extent that it links to 'hub' vertices. + * *

The classic HITS algorithm essentially proceeds as follows: + * *

  * assign equal initial hub and authority values to each vertex
  * repeat
@@ -32,116 +31,101 @@
  *   normalize hub and authority scores so that the sum of the squares of each = 1
  * until scores converge
  * 
- * - * HITS is somewhat different from random walk/eigenvector-based algorithms - * such as PageRank in that: + * + * HITS is somewhat different from random walk/eigenvector-based algorithms such as PageRank in + * that: + * *
    - *
  • there are two mutually recursive scores being calculated, rather than - * a single value - *
  • the edge weights are effectively all 1, i.e., they can't be interpreted - * as transition probabilities. This means that the more inlinks and outlinks - * that a vertex has, the better, since adding an inlink (or outlink) does - * not dilute the influence of the other inlinks (or outlinks) as in - * random walk-based algorithms. - *
  • the scores cannot be interpreted as posterior probabilities (due to the different - * normalization) + *
  • there are two mutually recursive scores being calculated, rather than a single value + *
  • the edge weights are effectively all 1, i.e., they can't be interpreted as transition + * probabilities. This means that the more inlinks and outlinks that a vertex has, the better, + * since adding an inlink (or outlink) does not dilute the influence of the other inlinks (or + * outlinks) as in random walk-based algorithms. + *
  • the scores cannot be interpreted as posterior probabilities (due to the different + * normalization) *
- * - * This implementation has the classic behavior by default. However, it has - * been generalized somewhat so that it can act in a more "PageRank-like" fashion: + * + * This implementation has the classic behavior by default. However, it has been generalized + * somewhat so that it can act in a more "PageRank-like" fashion: + * *
    - *
  • this implementation has an optional 'random jump probability' parameter analogous - * to the 'alpha' parameter used by PageRank. Varying this value between 0 and 1 - * allows the user to vary between the classic HITS behavior and one in which the - * scores are smoothed to a uniform distribution. - * The default value for this parameter is 0 (no random jumps possible). - *
  • the edge weights can be set to anything the user likes, and in - * particular they can be set up (e.g. using UniformDegreeWeight) - * so that the weights of the relevant edges incident to a vertex sum to 1. - *
  • The vertex score normalization has been factored into its own method - * so that it can be overridden by a subclass. Thus, for example, - * since the vertices' values are set to sum to 1 initially, if the weights of the - * relevant edges incident to a vertex sum to 1, then the vertices' values - * will continue to sum to 1 if the "sum-of-squares" normalization code - * is overridden to a no-op. (Other normalization methods may also be employed.) + *
  • this implementation has an optional 'random jump probability' parameter analogous to the + * 'alpha' parameter used by PageRank. Varying this value between 0 and 1 allows the user to + * vary between the classic HITS behavior and one in which the scores are smoothed to a + * uniform distribution. The default value for this parameter is 0 (no random jumps possible). + *
  • the edge weights can be set to anything the user likes, and in particular they can be set + * up (e.g. using UniformDegreeWeight) so that the weights of the relevant edges + * incident to a vertex sum to 1. + *
  • The vertex score normalization has been factored into its own method so that it can be + * overridden by a subclass. Thus, for example, since the vertices' values are set to sum to 1 + * initially, if the weights of the relevant edges incident to a vertex sum to 1, then the + * vertices' values will continue to sum to 1 if the "sum-of-squares" normalization code is + * overridden to a no-op. (Other normalization methods may also be employed.) *
- * + * * @param the vertex type * @param the edge type - * * @see "'Authoritative sources in a hyperlinked environment' by Jon Kleinberg, 1997" */ -public class HITS extends HITSWithPriors -{ +public class HITS extends HITSWithPriors { - /** - * Creates an instance for the specified graph, edge weights, and alpha - * (random jump probability) parameter. - * @param g the input graph - * @param edge_weights the weights to use for each edge - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, Function edge_weights, double alpha) - { - super(g, edge_weights, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } + /** + * Creates an instance for the specified graph, edge weights, and alpha (random jump probability) + * parameter. + * + * @param g the input graph + * @param edge_weights the weights to use for each edge + * @param alpha the probability of a hub giving some authority to all vertices, and of an + * authority increasing the score of all hubs (not just those connected via links) + */ + public HITS(Graph g, Function edge_weights, double alpha) { + super(g, edge_weights, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); + } - /** - * Creates an instance for the specified graph and alpha (random jump probability) - * parameter. The edge weights are all set to 1. - * @param g the input graph - * @param alpha the probability of a hub giving some authority to all vertices, - * and of an authority increasing the score of all hubs (not just those connected - * via links) - */ - public HITS(Graph g, double alpha) - { - super(g, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); - } + /** + * Creates an instance for the specified graph and alpha (random jump probability) parameter. The + * edge weights are all set to 1. + * + * @param g the input graph + * @param alpha the probability of a hub giving some authority to all vertices, and of an + * authority increasing the score of all hubs (not just those connected via links) + */ + public HITS(Graph g, double alpha) { + super(g, ScoringUtils.getHITSUniformRootPrior(g.getVertices()), alpha); + } + + /** + * Creates an instance for the specified graph. The edge weights are all set to 1 and alpha is set + * to 0. + * + * @param g the input graph + */ + public HITS(Graph g) { + this(g, 0.0); + } + + /** Maintains hub and authority score information for a vertex. */ + public static class Scores { + /** The hub score for a vertex. */ + public double hub; + + /** The authority score for a vertex. */ + public double authority; /** - * Creates an instance for the specified graph. The edge weights are all set to 1 - * and alpha is set to 0. - * @param g the input graph + * Creates an instance with the specified hub and authority score. + * + * @param hub the hub score + * @param authority the authority score */ - public HITS(Graph g) - { - this(g, 0.0); + public Scores(double hub, double authority) { + this.hub = hub; + this.authority = authority; } - - /** - * Maintains hub and authority score information for a vertex. - */ - public static class Scores - { - /** - * The hub score for a vertex. - */ - public double hub; - - /** - * The authority score for a vertex. - */ - public double authority; - - /** - * Creates an instance with the specified hub and authority score. - * @param hub the hub score - * @param authority the authority score - */ - public Scores(double hub, double authority) - { - this.hub = hub; - this.authority = authority; - } - - @Override - public String toString() - { - return String.format("[h:%.4f,a:%.4f]", this.hub, this.authority); - } + @Override + public String toString() { + return String.format("[h:%.4f,a:%.4f]", this.hub, this.authority); } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java index b7cc64a8..2c430b83 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/HITSWithPriors.java @@ -1,7 +1,7 @@ /* * Created on Jul 14, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -13,187 +13,162 @@ import com.google.common.base.Function; import com.google.common.base.Functions; - import edu.uci.ics.jung.graph.Hypergraph; /** - * A generalization of HITS that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' (hub or authority score) - * that is assigned to that vertex out of the portion that is assigned according - * to random jumps. - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" + * A generalization of HITS that permits non-uniformly-distributed random jumps. The 'vertex_priors' + * (that is, prior probabilities for each vertex) may be thought of as the fraction of the total + * 'potential' (hub or authority score) that is assigned to that vertex out of the portion that is + * assigned according to random jumps. + * + * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, + * 2003" */ -public class HITSWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * The sum of the potential, at each step, associated with vertices with no outedges (authority) - * or no inedges (hub). - */ - protected HITS.Scores disappearing_potential; - - /** - * Creates an instance for the specified graph, edge weights, vertex prior probabilities, - * and random jump probability (alpha). - * @param g the input graph - * @param edge_weights the edge weights - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - public HITSWithPriors(Hypergraph g, - Function edge_weights, - Function vertex_priors, double alpha) - { - super(g, edge_weights, vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); +public class HITSWithPriors extends AbstractIterativeScorerWithPriors { + /** + * The sum of the potential, at each step, associated with vertices with no outedges (authority) + * or no inedges (hub). + */ + protected HITS.Scores disappearing_potential; + + /** + * Creates an instance for the specified graph, edge weights, vertex prior probabilities, and + * random jump probability (alpha). + * + * @param g the input graph + * @param edge_weights the edge weights + * @param vertex_priors the prior probability for each vertex + * @param alpha the probability of a random jump at each step + */ + public HITSWithPriors( + Hypergraph g, + Function edge_weights, + Function vertex_priors, + double alpha) { + super(g, edge_weights, vertex_priors, alpha); + disappearing_potential = new HITS.Scores(0, 0); + } + + /** + * Creates an instance for the specified graph, vertex priors, and random jump probability + * (alpha). The edge weights default to 1.0. + * + * @param g the input graph + * @param vertex_priors the prior probability for each vertex + * @param alpha the probability of a random jump at each step + */ + public HITSWithPriors(Hypergraph g, Function vertex_priors, double alpha) { + super(g, Functions.constant(1.0), vertex_priors, alpha); + disappearing_potential = new HITS.Scores(0, 0); + } + + /** Updates the value for this vertex. */ + @Override + protected double update(V v) { + collectDisappearingPotential(v); + + double v_auth = 0; + for (E e : graph.getInEdges(v)) { + int incident_count = getAdjustedIncidentCount(e); + for (V w : graph.getIncidentVertices(e)) { + if (!w.equals(v) || hyperedges_are_self_loops) + v_auth += (getCurrentValue(w).hub * getEdgeWeight(w, e).doubleValue() / incident_count); + } + // V w = graph.getOpposite(v, e); + // auth += (getCurrentValue(w).hub * getEdgeWeight(w, e).doubleValue()); + } + + double v_hub = 0; + for (E e : graph.getOutEdges(v)) { + int incident_count = getAdjustedIncidentCount(e); + for (V w : graph.getIncidentVertices(e)) { + if (!w.equals(v) || hyperedges_are_self_loops) + v_hub += + (getCurrentValue(w).authority * getEdgeWeight(w, e).doubleValue() / incident_count); + } + // V x = graph.getOpposite(v,e); + // hub += (getCurrentValue(x).authority * getEdgeWeight(x, e).doubleValue()); } - /** - * Creates an instance for the specified graph, vertex priors, and random - * jump probability (alpha). The edge weights default to 1.0. - * @param g the input graph - * @param vertex_priors the prior probability for each vertex - * @param alpha the probability of a random jump at each step - */ - public HITSWithPriors(Hypergraph g, - Function vertex_priors, double alpha) - { - super(g, Functions.constant(1.0), vertex_priors, alpha); - disappearing_potential = new HITS.Scores(0,0); + // modify total_input according to alpha + if (alpha > 0) { + v_auth = v_auth * (1 - alpha) + getVertexPrior(v).authority * alpha; + v_hub = v_hub * (1 - alpha) + getVertexPrior(v).hub * alpha; } + setOutputValue(v, new HITS.Scores(v_hub, v_auth)); + + return Math.max( + Math.abs(getCurrentValue(v).hub - v_hub), Math.abs(getCurrentValue(v).authority - v_auth)); + } - /** - * Updates the value for this vertex. - */ - @Override - protected double update(V v) - { - collectDisappearingPotential(v); - - double v_auth = 0; - for (E e : graph.getInEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_auth += (getCurrentValue(w).hub * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V w = graph.getOpposite(v, e); -// auth += (getCurrentValue(w).hub * getEdgeWeight(w, e).doubleValue()); - } - - double v_hub = 0; - for (E e : graph.getOutEdges(v)) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_hub += (getCurrentValue(w).authority * - getEdgeWeight(w,e).doubleValue() / incident_count); - } -// V x = graph.getOpposite(v,e); -// hub += (getCurrentValue(x).authority * getEdgeWeight(x, e).doubleValue()); - } - - // modify total_input according to alpha - if (alpha > 0) - { - v_auth = v_auth * (1 - alpha) + getVertexPrior(v).authority * alpha; - v_hub = v_hub * (1 - alpha) + getVertexPrior(v).hub * alpha; - } - setOutputValue(v, new HITS.Scores(v_hub, v_auth)); - - return Math.max(Math.abs(getCurrentValue(v).hub - v_hub), - Math.abs(getCurrentValue(v).authority - v_auth)); + /** + * Code which is executed after each step. In this case, deals with the 'disappearing potential', + * normalizes the scores, and then calls super.afterStep(). + * + * @see #collectDisappearingPotential(Object) + */ + @Override + protected void afterStep() { + if (disappearing_potential.hub > 0 || disappearing_potential.authority > 0) { + for (V v : graph.getVertices()) { + double new_hub = + getOutputValue(v).hub + + (1 - alpha) * (disappearing_potential.hub * getVertexPrior(v).hub); + double new_auth = + getOutputValue(v).authority + + (1 - alpha) * (disappearing_potential.authority * getVertexPrior(v).authority); + setOutputValue(v, new HITS.Scores(new_hub, new_auth)); + } + disappearing_potential.hub = 0; + disappearing_potential.authority = 0; } - /** - * Code which is executed after each step. In this case, deals with the - * 'disappearing potential', normalizes the scores, and then calls - * super.afterStep(). - * @see #collectDisappearingPotential(Object) - */ - @Override - protected void afterStep() - { - if (disappearing_potential.hub > 0 || disappearing_potential.authority > 0) - { - for (V v : graph.getVertices()) - { - double new_hub = getOutputValue(v).hub + - (1 - alpha) * (disappearing_potential.hub * getVertexPrior(v).hub); - double new_auth = getOutputValue(v).authority + - (1 - alpha) * (disappearing_potential.authority * getVertexPrior(v).authority); - setOutputValue(v, new HITS.Scores(new_hub, new_auth)); - } - disappearing_potential.hub = 0; - disappearing_potential.authority = 0; - } - - normalizeScores(); - - super.afterStep(); + normalizeScores(); + + super.afterStep(); + } + + /** + * Normalizes scores so that sum of their squares = 1. This method may be overridden so as to + * yield different normalizations. + */ + protected void normalizeScores() { + double hub_ssum = 0; + double auth_ssum = 0; + for (V v : graph.getVertices()) { + double hub_val = getOutputValue(v).hub; + double auth_val = getOutputValue(v).authority; + hub_ssum += (hub_val * hub_val); + auth_ssum += (auth_val * auth_val); } - /** - * Normalizes scores so that sum of their squares = 1. - * This method may be overridden so as to yield different - * normalizations. - */ - protected void normalizeScores() { - double hub_ssum = 0; - double auth_ssum = 0; - for (V v : graph.getVertices()) - { - double hub_val = getOutputValue(v).hub; - double auth_val = getOutputValue(v).authority; - hub_ssum += (hub_val * hub_val); - auth_ssum += (auth_val * auth_val); - } - - hub_ssum = Math.sqrt(hub_ssum); - auth_ssum = Math.sqrt(auth_ssum); - - for (V v : graph.getVertices()) - { - HITS.Scores values = getOutputValue(v); - setOutputValue(v, new HITS.Scores( - values.hub / hub_ssum, - values.authority / auth_ssum)); - } - } - - /** - * Collects the "disappearing potential" associated with vertices that have either - * no incoming edges, no outgoing edges, or both. Vertices that have no incoming edges - * do not directly contribute to the hub scores of other vertices; similarly, vertices - * that have no outgoing edges do not directly contribute to the authority scores of - * other vertices. These values are collected at each step and then distributed across all vertices - * as a part of the normalization process. (This process is not required for, and does - * not affect, the 'sum-of-squares'-style normalization.) - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.hub += getCurrentValue(v).authority; - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } - if (graph.inDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential.authority += getCurrentValue(v).hub; - else - throw new IllegalArgumentException("Indegree of " + v + " must be > 0"); - } + hub_ssum = Math.sqrt(hub_ssum); + auth_ssum = Math.sqrt(auth_ssum); + + for (V v : graph.getVertices()) { + HITS.Scores values = getOutputValue(v); + setOutputValue(v, new HITS.Scores(values.hub / hub_ssum, values.authority / auth_ssum)); } + } + /** + * Collects the "disappearing potential" associated with vertices that have either no incoming + * edges, no outgoing edges, or both. Vertices that have no incoming edges do not directly + * contribute to the hub scores of other vertices; similarly, vertices that have no outgoing edges + * do not directly contribute to the authority scores of other vertices. These values are + * collected at each step and then distributed across all vertices as a part of the normalization + * process. (This process is not required for, and does not affect, the 'sum-of-squares'-style + * normalization.) + */ + @Override + protected void collectDisappearingPotential(V v) { + if (graph.outDegree(v) == 0) { + if (isDisconnectedGraphOK()) disappearing_potential.hub += getCurrentValue(v).authority; + else throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); + } + if (graph.inDegree(v) == 0) { + if (isDisconnectedGraphOK()) disappearing_potential.authority += getCurrentValue(v).hub; + else throw new IllegalArgumentException("Indegree of " + v + " must be > 0"); + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java index e591c578..03201735 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/KStepMarkov.java @@ -1,157 +1,143 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Aug 22, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Aug 22, 2008 */ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; import edu.uci.ics.jung.graph.Hypergraph; /** - * A special case of {@code PageRankWithPriors} in which the final scores - * represent a probability distribution over position assuming a random (Markovian) - * walk of exactly k steps, based on the initial distribution specified by the priors. - * - *

NOTE: The version of {@code KStepMarkov} in {@code algorithms.importance} - * (and in JUNG 1.x) is believed to be incorrect: rather than returning - * a score which represents a probability distribution over position assuming - * a k-step random walk, it returns a score which represents the sum over all steps - * of the probability for each step. If you want that behavior, set the + * A special case of {@code PageRankWithPriors} in which the final scores represent a probability + * distribution over position assuming a random (Markovian) walk of exactly k steps, based on the + * initial distribution specified by the priors. + * + *

NOTE: The version of {@code KStepMarkov} in {@code algorithms.importance} (and in JUNG + * 1.x) is believed to be incorrect: rather than returning a score which represents a probability + * distribution over position assuming a k-step random walk, it returns a score which represents the + * sum over all steps of the probability for each step. If you want that behavior, set the * 'cumulative' flag as follows before calling {@code evaluate()}: + * *

  *     KStepMarkov ksm = new KStepMarkov(...);
  *     ksm.setCumulative(true);
  *     ksm.evaluate();
  * 
- * + * * By default, the 'cumulative' flag is set to false. - * - * NOTE: THIS CLASS IS NOT YET COMPLETE. USE AT YOUR OWN RISK. (The original behavior - * is captured by the version still available in {@code algorithms.importance}.) - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" + * + *

NOTE: THIS CLASS IS NOT YET COMPLETE. USE AT YOUR OWN RISK. (The original behavior is captured + * by the version still available in {@code algorithms.importance}.) + * + * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, + * 2003" * @see PageRank * @see PageRankWithPriors */ -public class KStepMarkov extends PageRankWithPriors -{ - private boolean cumulative; - - /** - * Creates an instance based on the specified graph, edge weights, vertex - * priors (initial scores), and number of steps to take. - * @param graph the input graph - * @param edge_weights the edge weights (transition probabilities) - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Function edge_weights, - Function vertex_priors, int steps) - { - super(graph, edge_weights, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph, vertex - * priors (initial scores), and number of steps to take. The edge - * weights (transition probabilities) are set to default values (a uniform - * distribution over all outgoing edges). - * @param graph the input graph - * @param vertex_priors the initial probability distribution (score assignment) - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, Function vertex_priors, int steps) - { - super(graph, vertex_priors, 0); - initialize(steps); - } - - /** - * Creates an instance based on the specified graph and number of steps to - * take. The edge weights (transition probabilities) and vertex initial scores - * (prior probabilities) are set to default values (a uniform - * distribution over all outgoing edges, and a uniform distribution over - * all vertices, respectively). - * @param graph the input graph - * @param steps the number of times that {@code step()} will be called by {@code evaluate} - */ - public KStepMarkov(Hypergraph graph, int steps) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), 0); - initialize(steps); - } - - private void initialize(int steps) - { - this.acceptDisconnectedGraph(false); - - if (steps <= 0) - throw new IllegalArgumentException("Number of steps must be > 0"); - - this.max_iterations = steps; - this.tolerance = -1.0; - - this.cumulative = false; - } - - /** - * Specifies whether this instance should assign a score to each vertex - * based on the sum over all steps of the probability for each step. - * See the class-level documentation for details. - * @param cumulative true if this instance should assign a cumulative score to each vertex - */ - public void setCumulative(boolean cumulative) - { - this.cumulative = cumulative; - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - if (!cumulative) - return super.update(v); - - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value + getCurrentValue(v)); - - // FIXME: DO WE NEED TO CHANGE HOW DISAPPEARING IS COUNTED? NORMALIZE? - - return Math.abs(getCurrentValue(v) - new_value); +public class KStepMarkov extends PageRankWithPriors { + private boolean cumulative; + + /** + * Creates an instance based on the specified graph, edge weights, vertex priors (initial scores), + * and number of steps to take. + * + * @param graph the input graph + * @param edge_weights the edge weights (transition probabilities) + * @param vertex_priors the initial probability distribution (score assignment) + * @param steps the number of times that {@code step()} will be called by {@code evaluate} + */ + public KStepMarkov( + Hypergraph graph, + Function edge_weights, + Function vertex_priors, + int steps) { + super(graph, edge_weights, vertex_priors, 0); + initialize(steps); + } + + /** + * Creates an instance based on the specified graph, vertex priors (initial scores), and number of + * steps to take. The edge weights (transition probabilities) are set to default values (a uniform + * distribution over all outgoing edges). + * + * @param graph the input graph + * @param vertex_priors the initial probability distribution (score assignment) + * @param steps the number of times that {@code step()} will be called by {@code evaluate} + */ + public KStepMarkov(Hypergraph graph, Function vertex_priors, int steps) { + super(graph, vertex_priors, 0); + initialize(steps); + } + + /** + * Creates an instance based on the specified graph and number of steps to take. The edge weights + * (transition probabilities) and vertex initial scores (prior probabilities) are set to default + * values (a uniform distribution over all outgoing edges, and a uniform distribution over all + * vertices, respectively). + * + * @param graph the input graph + * @param steps the number of times that {@code step()} will be called by {@code evaluate} + */ + public KStepMarkov(Hypergraph graph, int steps) { + super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), 0); + initialize(steps); + } + + private void initialize(int steps) { + this.acceptDisconnectedGraph(false); + + if (steps <= 0) throw new IllegalArgumentException("Number of steps must be > 0"); + + this.max_iterations = steps; + this.tolerance = -1.0; + + this.cumulative = false; + } + + /** + * Specifies whether this instance should assign a score to each vertex based on the sum over all + * steps of the probability for each step. See the class-level documentation for details. + * + * @param cumulative true if this instance should assign a cumulative score to each vertex + */ + public void setCumulative(boolean cumulative) { + this.cumulative = cumulative; + } + + /** Updates the value for this vertex. Called by step(). */ + @Override + public double update(V v) { + if (!cumulative) { + return super.update(v); } + collectDisappearingPotential(v); + + double v_input = 0; + for (E e : graph.getInEdges(v)) { + // For graphs, the code below is equivalent to + // V w = graph.getOpposite(v, e); + // total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); + // For hypergraphs, this divides the potential coming from w + // by the number of vertices in the connecting edge e. + int incident_count = getAdjustedIncidentCount(e); + for (V w : graph.getIncidentVertices(e)) { + if (!w.equals(v) || hyperedges_are_self_loops) + v_input += (getCurrentValue(w) * getEdgeWeight(w, e).doubleValue() / incident_count); + } + } + + // modify total_input according to alpha + double new_value = alpha > 0 ? v_input * (1 - alpha) + getVertexPrior(v) * alpha : v_input; + setOutputValue(v, new_value + getCurrentValue(v)); + + // FIXME: DO WE NEED TO CHANGE HOW DISAPPEARING IS COUNTED? NORMALIZE? + + return Math.abs(getCurrentValue(v) - new_value); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java index baefab92..0e6b1674 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRank.java @@ -1,7 +1,7 @@ /* * Created on Jul 12, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,59 +12,56 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; import edu.uci.ics.jung.graph.Hypergraph; /** - * Assigns scores to each vertex according to the PageRank algorithm. - * - *

PageRank is an eigenvector-based algorithm. The score for a given vertex may be thought of - * as the fraction of time spent 'visiting' that vertex (measured over all time) - * in a random walk over the vertices (following outgoing edges from each vertex). - * PageRank modifies this random walk by adding to the model a probability (specified as 'alpha' - * in the constructor) of jumping to any vertex. If alpha is 0, this is equivalent to the - * eigenvector centrality algorithm; if alpha is 1, all vertices will receive the same score - * (1/|V|). Thus, alpha acts as a sort of score smoothing parameter. - * - *

The original algorithm assumed that, for a given vertex, the probability of following any - * outgoing edge was the same; this is the default if edge weights are not specified. - * This implementation generalizes the original by permitting - * the user to specify edge weights; in order to maintain the original semantics, however, - * the weights on the outgoing edges for a given vertex must represent transition probabilities; - * that is, they must sum to 1. - * + * Assigns scores to each vertex according to the PageRank algorithm. + * + *

PageRank is an eigenvector-based algorithm. The score for a given vertex may be thought of as + * the fraction of time spent 'visiting' that vertex (measured over all time) in a random walk over + * the vertices (following outgoing edges from each vertex). PageRank modifies this random walk by + * adding to the model a probability (specified as 'alpha' in the constructor) of jumping to any + * vertex. If alpha is 0, this is equivalent to the eigenvector centrality algorithm; if alpha is 1, + * all vertices will receive the same score (1/|V|). Thus, alpha acts as a sort of score smoothing + * parameter. + * + *

The original algorithm assumed that, for a given vertex, the probability of following any + * outgoing edge was the same; this is the default if edge weights are not specified. This + * implementation generalizes the original by permitting the user to specify edge weights; in order + * to maintain the original semantics, however, the weights on the outgoing edges for a given vertex + * must represent transition probabilities; that is, they must sum to 1. + * *

If a vertex has no outgoing edges, then the probability of taking a random jump from that - * vertex is (by default) effectively 1. If the user wishes to instead throw an exception when this happens, - * call acceptDisconnectedGraph(false) on this instance. - * - *

Typical values for alpha (according to the original paper) are in the range [0.1, 0.2] - * but may be any value between 0 and 1 inclusive. - * + * vertex is (by default) effectively 1. If the user wishes to instead throw an exception when this + * happens, call acceptDisconnectedGraph(false) on this instance. + * + *

Typical values for alpha (according to the original paper) are in the range [0.1, 0.2] but may + * be any value between 0 and 1 inclusive. + * * @see "The Anatomy of a Large-Scale Hypertextual Web Search Engine by L. Page and S. Brin, 1999" */ -public class PageRank extends PageRankWithPriors -{ +public class PageRank extends PageRankWithPriors { - /** - * Creates an instance for the specified graph, edge weights, and random jump probability. - * @param graph the input graph - * @param edge_weight the edge weights (transition probabilities) - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, Function edge_weight, double alpha) - { - super(graph, edge_weight, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } + /** + * Creates an instance for the specified graph, edge weights, and random jump probability. + * + * @param graph the input graph + * @param edge_weight the edge weights (transition probabilities) + * @param alpha the probability of taking a random jump to an arbitrary vertex + */ + public PageRank(Hypergraph graph, Function edge_weight, double alpha) { + super(graph, edge_weight, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); + } - /** - * Creates an instance for the specified graph and random jump probability; the probability - * of following any outgoing edge from a given vertex is the same. - * @param graph the input graph - * @param alpha the probability of taking a random jump to an arbitrary vertex - */ - public PageRank(Hypergraph graph, double alpha) - { - super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); - } + /** + * Creates an instance for the specified graph and random jump probability; the probability of + * following any outgoing edge from a given vertex is the same. + * + * @param graph the input graph + * @param alpha the probability of taking a random jump to an arbitrary vertex + */ + public PageRank(Hypergraph graph, double alpha) { + super(graph, ScoringUtils.getUniformRootPrior(graph.getVertices()), alpha); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java index fa870a39..bdc94ab5 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/PageRankWithPriors.java @@ -1,7 +1,7 @@ /* * Created on Jul 6, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,131 +12,111 @@ package edu.uci.ics.jung.algorithms.scoring; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; import edu.uci.ics.jung.graph.Hypergraph; /** - * A generalization of PageRank that permits non-uniformly-distributed random jumps. - * The 'vertex_priors' (that is, prior probabilities for each vertex) may be - * thought of as the fraction of the total 'potential' that is assigned to that - * vertex at each step out of the portion that is assigned according - * to random jumps (this portion is specified by 'alpha'). - * - * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, 2003" + * A generalization of PageRank that permits non-uniformly-distributed random jumps. The + * 'vertex_priors' (that is, prior probabilities for each vertex) may be thought of as the fraction + * of the total 'potential' that is assigned to that vertex at each step out of the portion that is + * assigned according to random jumps (this portion is specified by 'alpha'). + * + * @see "Algorithms for Estimating Relative Importance in Graphs by Scott White and Padhraic Smyth, + * 2003" * @see PageRank */ -public class PageRankWithPriors - extends AbstractIterativeScorerWithPriors -{ - /** - * Maintains the amount of potential associated with vertices with no out-edges. - */ - protected double disappearing_potential = 0.0; - - /** - * Creates an instance with the specified graph, edge weights, vertex priors, and - * 'random jump' probability (alpha). - * @param graph the input graph - * @param edge_weights the edge weights, denoting transition probabilities from source to destination - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Function edge_weights, - Function vertex_priors, double alpha) - { - super(graph, edge_weights, vertex_priors, alpha); - } - - /** - * Creates an instance with the specified graph, vertex priors, and - * 'random jump' probability (alpha). The outgoing edge weights for each - * vertex will be equal and sum to 1. - * @param graph the input graph - * @param vertex_priors the prior probabilities for each vertex - * @param alpha the probability of executing a 'random jump' at each step - */ - public PageRankWithPriors(Hypergraph graph, - Function vertex_priors, double alpha) - { - super(graph, vertex_priors, alpha); - this.edge_weights = new UniformDegreeWeight(graph); - } - - /** - * Updates the value for this vertex. Called by step(). - */ - @Override - public double update(V v) - { - collectDisappearingPotential(v); - - double v_input = 0; - for (E e : graph.getInEdges(v)) - { - // For graphs, the code below is equivalent to -// V w = graph.getOpposite(v, e); -// total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); - // For hypergraphs, this divides the potential coming from w - // by the number of vertices in the connecting edge e. - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - v_input += (getCurrentValue(w) * - getEdgeWeight(w,e).doubleValue() / incident_count); - } - } - - // modify total_input according to alpha - double new_value = alpha > 0 ? - v_input * (1 - alpha) + getVertexPrior(v) * alpha : - v_input; - setOutputValue(v, new_value); - - return Math.abs(getCurrentValue(v) - new_value); +public class PageRankWithPriors extends AbstractIterativeScorerWithPriors { + /** Maintains the amount of potential associated with vertices with no out-edges. */ + protected double disappearing_potential = 0.0; + + /** + * Creates an instance with the specified graph, edge weights, vertex priors, and 'random jump' + * probability (alpha). + * + * @param graph the input graph + * @param edge_weights the edge weights, denoting transition probabilities from source to + * destination + * @param vertex_priors the prior probabilities for each vertex + * @param alpha the probability of executing a 'random jump' at each step + */ + public PageRankWithPriors( + Hypergraph graph, + Function edge_weights, + Function vertex_priors, + double alpha) { + super(graph, edge_weights, vertex_priors, alpha); + } + + /** + * Creates an instance with the specified graph, vertex priors, and 'random jump' probability + * (alpha). The outgoing edge weights for each vertex will be equal and sum to 1. + * + * @param graph the input graph + * @param vertex_priors the prior probabilities for each vertex + * @param alpha the probability of executing a 'random jump' at each step + */ + public PageRankWithPriors( + Hypergraph graph, Function vertex_priors, double alpha) { + super(graph, vertex_priors, alpha); + this.edge_weights = new UniformDegreeWeight(graph); + } + + /** Updates the value for this vertex. Called by step(). */ + @Override + public double update(V v) { + collectDisappearingPotential(v); + + double v_input = 0; + for (E e : graph.getInEdges(v)) { + // For graphs, the code below is equivalent to + // V w = graph.getOpposite(v, e); + // total_input += (getCurrentValue(w) * getEdgeWeight(w,e).doubleValue()); + // For hypergraphs, this divides the potential coming from w + // by the number of vertices in the connecting edge e. + int incident_count = getAdjustedIncidentCount(e); + for (V w : graph.getIncidentVertices(e)) { + if (!w.equals(v) || hyperedges_are_self_loops) + v_input += (getCurrentValue(w) * getEdgeWeight(w, e).doubleValue() / incident_count); + } } - /** - * Cleans up after each step. In this case that involves allocating the disappearing - * potential (thus maintaining normalization of the scores) according to the vertex - * probability priors, and then calling - * super.afterStep. - */ - @Override - protected void afterStep() - { - // distribute disappearing potential according to priors - if (disappearing_potential > 0) - { - for (V v : graph.getVertices()) - { - setOutputValue(v, getOutputValue(v) + - (1 - alpha) * (disappearing_potential * getVertexPrior(v))); - } - disappearing_potential = 0; - } - - super.afterStep(); + // modify total_input according to alpha + double new_value = alpha > 0 ? v_input * (1 - alpha) + getVertexPrior(v) * alpha : v_input; + setOutputValue(v, new_value); + + return Math.abs(getCurrentValue(v) - new_value); + } + + /** + * Cleans up after each step. In this case that involves allocating the disappearing potential + * (thus maintaining normalization of the scores) according to the vertex probability priors, and + * then calling super.afterStep. + */ + @Override + protected void afterStep() { + // distribute disappearing potential according to priors + if (disappearing_potential > 0) { + for (V v : graph.getVertices()) { + setOutputValue( + v, getOutputValue(v) + (1 - alpha) * (disappearing_potential * getVertexPrior(v))); + } + disappearing_potential = 0; } - - /** - * Collects the "disappearing potential" associated with vertices that have - * no outgoing edges. Vertices that have no outgoing edges do not directly - * contribute to the scores of other vertices. These values are collected - * at each step and then distributed across all vertices - * as a part of the normalization process. - */ - @Override - protected void collectDisappearingPotential(V v) - { - if (graph.outDegree(v) == 0) - { - if (isDisconnectedGraphOK()) - disappearing_potential += getCurrentValue(v); - else - throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); - } + + super.afterStep(); + } + + /** + * Collects the "disappearing potential" associated with vertices that have no outgoing edges. + * Vertices that have no outgoing edges do not directly contribute to the scores of other + * vertices. These values are collected at each step and then distributed across all vertices as a + * part of the normalization process. + */ + @Override + protected void collectDisappearingPotential(V v) { + if (graph.outDegree(v) == 0) { + if (isDisconnectedGraphOK()) disappearing_potential += getCurrentValue(v); + else throw new IllegalArgumentException("Outdegree of " + v + " must be > 0"); } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java index b18b4a08..7551cacb 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VertexScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 6, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,18 +11,16 @@ */ package edu.uci.ics.jung.algorithms.scoring; - /** * An interface for algorithms that assign scores to vertices. * * @param the vertex type * @param the score type */ -public interface VertexScorer -{ - /** - * @param v the vertex whose score is requested - * @return the algorithm's score for this vertex - */ - public S getVertexScore(V v); +public interface VertexScorer { + /** + * @param v the vertex whose score is requested + * @return the algorithm's score for this vertex + */ + public S getVertexScore(V v); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java index f312b433..3fe90fc8 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/VoltageScorer.java @@ -1,7 +1,7 @@ /* * Created on Jul 15, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,240 +11,217 @@ */ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Function; +import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; +import edu.uci.ics.jung.graph.Hypergraph; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import com.google.common.base.Function; - -import edu.uci.ics.jung.algorithms.scoring.util.UniformDegreeWeight; -import edu.uci.ics.jung.graph.Hypergraph; - /** - * Assigns scores to vertices according to their 'voltage' in an approximate - * solution to the Kirchoff equations. This is accomplished by tying "source" - * vertices to specified positive voltages, "sink" vertices to 0 V, and - * iteratively updating the voltage of each other vertex to the (weighted) - * average of the voltages of its neighbors. - * - *

The resultant voltages will all be in the range [0, max] - * where max is the largest voltage of any source vertex (in the - * absence of negative source voltages; see below). - * - *

A few notes about this algorithm's interpretation of the graph data: + * Assigns scores to vertices according to their 'voltage' in an approximate solution to the + * Kirchoff equations. This is accomplished by tying "source" vertices to specified positive + * voltages, "sink" vertices to 0 V, and iteratively updating the voltage of each other vertex to + * the (weighted) average of the voltages of its neighbors. + * + *

The resultant voltages will all be in the range [0, max] where max + * is the largest voltage of any source vertex (in the absence of negative source voltages; see + * below). + * + *

A few notes about this algorithm's interpretation of the graph data: + * *

    - *
  • Higher edge weights are interpreted as indicative of greater - * influence/effect than lower edge weights. - *
  • Negative edge weights (and negative "source" voltages) invalidate - * the interpretation of the resultant values as voltages. However, this - * algorithm will not reject graphs with negative edge weights or source voltages. - *
  • Parallel edges are equivalent to a single edge whose weight is the - * sum of the weights on the parallel edges. - *
  • Current flows along undirected edges in both directions, - * but only flows along directed edges in the direction of the edge. + *
  • Higher edge weights are interpreted as indicative of greater influence/effect than lower + * edge weights. + *
  • Negative edge weights (and negative "source" voltages) invalidate the interpretation of the + * resultant values as voltages. However, this algorithm will not reject graphs with negative + * edge weights or source voltages. + *
  • Parallel edges are equivalent to a single edge whose weight is the sum of the weights on + * the parallel edges. + *
  • Current flows along undirected edges in both directions, but only flows along directed + * edges in the direction of the edge. *
- * */ public class VoltageScorer extends AbstractIterativeScorer - implements VertexScorer -{ - protected Map source_voltages; - protected Collection sinks; - - /** - * Creates an instance with the specified graph, edge weights, source voltages, - * and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Function edge_weights, - Map source_voltages, Collection sinks) - { - super(g, edge_weights); - this.source_voltages = source_voltages; - this.sinks = sinks; - initialize(); - } + implements VertexScorer { + protected Map source_voltages; + protected Collection sinks; - /** - * Creates an instance with the specified graph, edge weights, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Function edge_weights, - Collection sources, Collection sinks) - { - super(g, edge_weights); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } + /** + * Creates an instance with the specified graph, edge weights, source voltages, and sinks. + * + * @param g the input graph + * @param edge_weights the edge weights, representing conductivity + * @param source_voltages the (fixed) voltage for each source + * @param sinks the vertices whose voltages are tied to 0 + */ + public VoltageScorer( + Hypergraph g, + Function edge_weights, + Map source_voltages, + Collection sinks) { + super(g, edge_weights); + this.source_voltages = source_voltages; + this.sinks = sinks; + initialize(); + } - /** - * Creates an instance with the specified graph, source vertices - * (each of whose 'voltages' are tied to 1), and sinks. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param sources the vertices whose voltages are tied to 1 - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Collection sources, Collection sinks) - { - super(g); - - Map unit_voltages = new HashMap(); - for(V v : sources) - unit_voltages.put(v, new Double(1.0)); - this.source_voltages = unit_voltages; - this.sinks = sinks; - initialize(); - } - - /** - * Creates an instance with the specified graph, source voltages, - * and sinks. The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source_voltages the (fixed) voltage for each source - * @param sinks the vertices whose voltages are tied to 0 - */ - public VoltageScorer(Hypergraph g, Map source_voltages, - Collection sinks) - { - super(g); - this.source_voltages = source_voltages; - this.sinks = sinks; - this.edge_weights = new UniformDegreeWeight(g); - initialize(); + /** + * Creates an instance with the specified graph, edge weights, source vertices (each of whose + * 'voltages' are tied to 1), and sinks. + * + * @param g the input graph + * @param edge_weights the edge weights, representing conductivity + * @param sources the vertices whose voltages are tied to 1 + * @param sinks the vertices whose voltages are tied to 0 + */ + public VoltageScorer( + Hypergraph g, + Function edge_weights, + Collection sources, + Collection sinks) { + super(g, edge_weights); + + Map unit_voltages = new HashMap(); + for (V v : sources) unit_voltages.put(v, new Double(1.0)); + this.source_voltages = unit_voltages; + this.sinks = sinks; + initialize(); + } + + /** + * Creates an instance with the specified graph, source vertices (each of whose 'voltages' are + * tied to 1), and sinks. The outgoing edges for each vertex are assigned weights that sum to 1. + * + * @param g the input graph + * @param sources the vertices whose voltages are tied to 1 + * @param sinks the vertices whose voltages are tied to 0 + */ + public VoltageScorer(Hypergraph g, Collection sources, Collection sinks) { + super(g); + + Map unit_voltages = new HashMap(); + for (V v : sources) unit_voltages.put(v, new Double(1.0)); + this.source_voltages = unit_voltages; + this.sinks = sinks; + initialize(); + } + + /** + * Creates an instance with the specified graph, source voltages, and sinks. The outgoing edges + * for each vertex are assigned weights that sum to 1. + * + * @param g the input graph + * @param source_voltages the (fixed) voltage for each source + * @param sinks the vertices whose voltages are tied to 0 + */ + public VoltageScorer( + Hypergraph g, Map source_voltages, Collection sinks) { + super(g); + this.source_voltages = source_voltages; + this.sinks = sinks; + this.edge_weights = new UniformDegreeWeight(g); + initialize(); + } + + /** + * Creates an instance with the specified graph, edge weights, source, and sink. The source vertex + * voltage is tied to 1. + * + * @param g the input graph + * @param edge_weights the edge weights, representing conductivity + * @param source the vertex whose voltage is tied to 1 + * @param sink the vertex whose voltage is tied to 0 + */ + public VoltageScorer( + Hypergraph g, Function edge_weights, V source, V sink) { + this(g, edge_weights, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); + initialize(); + } + + /** + * Creates an instance with the specified graph, edge weights, source, and sink. The source vertex + * voltage is tied to 1. The outgoing edges for each vertex are assigned weights that sum to 1. + * + * @param g the input graph + * @param source the vertex whose voltage is tied to 1 + * @param sink the vertex whose voltage is tied to 0 + */ + public VoltageScorer(Hypergraph g, V source, V sink) { + this(g, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); + initialize(); + } + + /** Initializes the state of this instance. */ + @Override + public void initialize() { + super.initialize(); + + // sanity check + if (source_voltages.isEmpty() || sinks.isEmpty()) + throw new IllegalArgumentException("Both sources and sinks (grounds) must be defined"); + + if (source_voltages.size() + sinks.size() > graph.getVertexCount()) + throw new IllegalArgumentException( + "Source/sink sets overlap, or contain vertices not in graph"); + + for (Map.Entry entry : source_voltages.entrySet()) { + V v = entry.getKey(); + if (sinks.contains(v)) + throw new IllegalArgumentException( + "Vertex " + v + " is incorrectly specified as both source and sink"); + double value = entry.getValue().doubleValue(); + if (value <= 0) + throw new IllegalArgumentException("Source vertex " + v + " has negative voltage"); } - - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * @param g the input graph - * @param edge_weights the edge weights, representing conductivity - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, Function edge_weights, - V source, V sink) - { - this(g, edge_weights, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); + + // set up initial voltages + for (V v : graph.getVertices()) { + if (source_voltages.containsKey(v)) setOutputValue(v, source_voltages.get(v).doubleValue()); + else setOutputValue(v, 0.0); } + } - /** - * Creates an instance with the specified graph, edge weights, source, and - * sink. The source vertex voltage is tied to 1. - * The outgoing edges for each vertex are assigned - * weights that sum to 1. - * @param g the input graph - * @param source the vertex whose voltage is tied to 1 - * @param sink the vertex whose voltage is tied to 0 - */ - public VoltageScorer(Hypergraph g, V source, V sink) - { - this(g, Collections.singletonMap(source, 1.0), Collections.singletonList(sink)); - initialize(); + /** @see edu.uci.ics.jung.algorithms.scoring.AbstractIterativeScorer#update(Object) */ + @Override + public double update(V v) { + // if it's a voltage source or sink, we're done + Number source_volts = source_voltages.get(v); + if (source_volts != null) { + setOutputValue(v, source_volts.doubleValue()); + return 0.0; + } + if (sinks.contains(v)) { + setOutputValue(v, 0.0); + return 0.0; } - - /** - * Initializes the state of this instance. - */ - @Override - public void initialize() - { - super.initialize(); - - // sanity check - if (source_voltages.isEmpty() || sinks.isEmpty()) - throw new IllegalArgumentException("Both sources and sinks (grounds) must be defined"); - - if (source_voltages.size() + sinks.size() > graph.getVertexCount()) - throw new IllegalArgumentException("Source/sink sets overlap, or contain vertices not in graph"); - - for (Map.Entry entry : source_voltages.entrySet()) - { - V v = entry.getKey(); - if (sinks.contains(v)) - throw new IllegalArgumentException("Vertex " + v + " is incorrectly specified as both source and sink"); - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Source vertex " + v + " has negative voltage"); - } - - // set up initial voltages - for (V v : graph.getVertices()) - { - if (source_voltages.containsKey(v)) - setOutputValue(v, source_voltages.get(v).doubleValue()); - else - setOutputValue(v, 0.0); + Collection edges = graph.getInEdges(v); + double voltage_sum = 0; + double weight_sum = 0; + for (E e : edges) { + int incident_count = getAdjustedIncidentCount(e); + for (V w : graph.getIncidentVertices(e)) { + if (!w.equals(v) || hyperedges_are_self_loops) { + double weight = getEdgeWeight(w, e).doubleValue() / incident_count; + voltage_sum += getCurrentValue(w).doubleValue() * weight; + weight_sum += weight; } + } + // V w = graph.getOpposite(v, e); + // double weight = getEdgeWeight(w,e).doubleValue(); + // voltage_sum += getCurrentValue(w).doubleValue() * weight; + // weight_sum += weight; } - - /** - * @see edu.uci.ics.jung.algorithms.scoring.AbstractIterativeScorer#update(Object) - */ - @Override - public double update(V v) - { - // if it's a voltage source or sink, we're done - Number source_volts = source_voltages.get(v); - if (source_volts != null) - { - setOutputValue(v, source_volts.doubleValue()); - return 0.0; - } - if (sinks.contains(v)) - { - setOutputValue(v, 0.0); - return 0.0; - } - - Collection edges = graph.getInEdges(v); - double voltage_sum = 0; - double weight_sum = 0; - for (E e: edges) - { - int incident_count = getAdjustedIncidentCount(e); - for (V w : graph.getIncidentVertices(e)) - { - if (!w.equals(v) || hyperedges_are_self_loops) - { - double weight = getEdgeWeight(w,e).doubleValue() / incident_count; - voltage_sum += getCurrentValue(w).doubleValue() * weight; - weight_sum += weight; - } - } -// V w = graph.getOpposite(v, e); -// double weight = getEdgeWeight(w,e).doubleValue(); -// voltage_sum += getCurrentValue(w).doubleValue() * weight; -// weight_sum += weight; - } - // if either is 0, new value is 0 - if (voltage_sum == 0 || weight_sum == 0) - { - setOutputValue(v, 0.0); - return getCurrentValue(v).doubleValue(); - } - - setOutputValue(v, voltage_sum / weight_sum); - return Math.abs(getCurrentValue(v).doubleValue() - voltage_sum / weight_sum); + // if either is 0, new value is 0 + if (voltage_sum == 0 || weight_sum == 0) { + setOutputValue(v, 0.0); + return getCurrentValue(v).doubleValue(); } + setOutputValue(v, voltage_sum / weight_sum); + return Math.abs(getCurrentValue(v).doubleValue() - voltage_sum / weight_sum); + } } - diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java index b31db4c6..85ad2c65 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/DelegateToEdgeTransformer.java @@ -1,7 +1,7 @@ /* * Created on Jul 11, 2008 * - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * * All rights reserved. * @@ -14,35 +14,26 @@ import com.google.common.base.Function; /** - * A {@code Transformer}. Mainly useful for technical reasons inside - * AbstractIterativeScorer; in essence it allows the edge weight instance - * variable to be of type VEPair,W even if the edge weight - * Transformer only operates on edges. + * A {@code Transformer}. Mainly useful for technical reasons inside AbstractIterativeScorer; in + * essence it allows the edge weight instance variable to be of type VEPair,W even if + * the edge weight Transformer only operates on edges. */ -public class DelegateToEdgeTransformer implements - Function,Number> -{ - /** - * The Function to which this instance delegates its function. - */ - protected Function delegate; - - /** - * Creates an instance with the specified delegate Function. - * @param delegate the Function to which this instance will delegate - */ - public DelegateToEdgeTransformer(Function delegate) - { - this.delegate = delegate; - } - - /** - * @see Function#apply(Object) - */ - public Number apply(VEPair arg0) - { - return delegate.apply(arg0.getE()); - } +public class DelegateToEdgeTransformer implements Function, Number> { + /** The Function to which this instance delegates its function. */ + protected Function delegate; + /** + * Creates an instance with the specified delegate Function. + * + * @param delegate the Function to which this instance will delegate + */ + public DelegateToEdgeTransformer(Function delegate) { + this.delegate = delegate; + } + + /** @see Function#apply(Object) */ + public Number apply(VEPair arg0) { + return delegate.apply(arg0.getE()); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java index ed8c33e1..259ed9ca 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/ScoringUtils.java @@ -1,7 +1,7 @@ /* * Created on Jul 12, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -11,62 +11,57 @@ */ package edu.uci.ics.jung.algorithms.scoring.util; -import java.util.Collection; - import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.HITS; +import java.util.Collection; /** - * Methods for assigning values (to be interpreted as prior probabilities) to vertices in the context - * of random-walk-based scoring algorithms. + * Methods for assigning values (to be interpreted as prior probabilities) to vertices in the + * context of random-walk-based scoring algorithms. */ -public class ScoringUtils -{ - /** - * Assigns a probability of 1/roots.size() to each of the elements of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero prior probabilities - * @return a Function assigning a uniform prior to each element in {@code roots} - */ - public static Function getUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Function distribution = new Function() - { - public Double apply(V input) - { - if (inner_roots.contains(input)) - return new Double(1.0 / inner_roots.size()); - else - return 0.0; +public class ScoringUtils { + /** + * Assigns a probability of 1/roots.size() to each of the elements of roots + * . + * + * @param the vertex type + * @param roots the vertices to be assigned nonzero prior probabilities + * @return a Function assigning a uniform prior to each element in {@code roots} + */ + public static Function getUniformRootPrior(Collection roots) { + final Collection inner_roots = roots; + Function distribution = + new Function() { + public Double apply(V input) { + if (inner_roots.contains(input)) { + return new Double(1.0 / inner_roots.size()); + } else { + return 0.0; } + } }; - - return distribution; - } - - /** - * Returns a Function that hub and authority values of 1/roots.size() to each - * element of roots. - * @param the vertex type - * @param roots the vertices to be assigned nonzero scores - * @return a Function that assigns uniform prior hub/authority probabilities to each root - */ - public static Function getHITSUniformRootPrior(Collection roots) - { - final Collection inner_roots = roots; - Function distribution = - new Function() - { - public HITS.Scores apply(V input) - { - if (inner_roots.contains(input)) - return new HITS.Scores(1.0 / inner_roots.size(), 1.0 / inner_roots.size()); - else - return new HITS.Scores(0.0, 0.0); - } + + return distribution; + } + + /** + * Returns a Function that hub and authority values of 1/roots.size() to each element + * of roots. + * + * @param the vertex type + * @param roots the vertices to be assigned nonzero scores + * @return a Function that assigns uniform prior hub/authority probabilities to each root + */ + public static Function getHITSUniformRootPrior(Collection roots) { + final Collection inner_roots = roots; + Function distribution = + new Function() { + public HITS.Scores apply(V input) { + if (inner_roots.contains(input)) + return new HITS.Scores(1.0 / inner_roots.size(), 1.0 / inner_roots.size()); + else return new HITS.Scores(0.0, 0.0); + } }; - return distribution; - } + return distribution; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java index fd1f5270..c768253d 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformDegreeWeight.java @@ -1,55 +1,42 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jul 14, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jul 14, 2008 */ package edu.uci.ics.jung.algorithms.scoring.util; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Hypergraph; import edu.uci.ics.jung.graph.util.EdgeType; /** - * An edge weight function that assigns weights as uniform - * transition probabilities. - * For undirected edges, returns 1/degree(v) (where 'v' is the - * vertex in the VEPair. - * For directed edges, returns 1/outdegree(source(e)) (where 'e' - * is the edge in the VEPair). - * Throws an IllegalArgumentException if the input - * edge is neither EdgeType.UNDIRECTED nor EdgeType.DIRECTED. - * + * An edge weight function that assigns weights as uniform transition probabilities. For undirected + * edges, returns 1/degree(v) (where 'v' is the vertex in the VEPair. For directed edges, returns + * 1/outdegree(source(e)) (where 'e' is the edge in the VEPair). Throws an + * IllegalArgumentException if the input edge is neither EdgeType.UNDIRECTED nor + * EdgeType.DIRECTED. */ -public class UniformDegreeWeight implements - Function, Double> -{ - private Hypergraph graph; - - /** - * @param graph the graph for which an instance is being created - */ - public UniformDegreeWeight(Hypergraph graph) - { - this.graph = graph; - } +public class UniformDegreeWeight implements Function, Double> { + private Hypergraph graph; - public Double apply(VEPair ve_pair) - { - E e = ve_pair.getE(); - V v = ve_pair.getV(); - EdgeType edge_type = graph.getEdgeType(e); - if (edge_type == EdgeType.UNDIRECTED) - return 1.0 / graph.degree(v); - if (edge_type == EdgeType.DIRECTED) - return 1.0 / graph.outDegree(graph.getSource(e)); - throw new IllegalArgumentException("can't handle edge type: " + edge_type); - } + /** @param graph the graph for which an instance is being created */ + public UniformDegreeWeight(Hypergraph graph) { + this.graph = graph; + } + public Double apply(VEPair ve_pair) { + E e = ve_pair.getE(); + V v = ve_pair.getV(); + EdgeType edge_type = graph.getEdgeType(e); + if (edge_type == EdgeType.UNDIRECTED) { + return 1.0 / graph.degree(v); + } + if (edge_type == EdgeType.DIRECTED) { + return 1.0 / graph.outDegree(graph.getSource(e)); + } + throw new IllegalArgumentException("can't handle edge type: " + edge_type); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java index 9201c60e..e5755d19 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/UniformInOut.java @@ -1,7 +1,7 @@ /* * Created on Jul 11, 2008 * - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * * All rights reserved. * @@ -12,42 +12,33 @@ package edu.uci.ics.jung.algorithms.scoring.util; import com.google.common.base.Function; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.EdgeType; /** - * Assigns weights to directed edges (the edge of the vertex/edge pair) depending on - * whether the vertex is the edge's source or its destination. - * If the vertex v is the edge's source, assigns 1/outdegree(v). - * Otherwise, assigns 1/indegree(w). - * Throws IllegalArgumentException if the edge is not directed. + * Assigns weights to directed edges (the edge of the vertex/edge pair) depending on whether the + * vertex is the edge's source or its destination. If the vertex v is the edge's source, assigns + * 1/outdegree(v). Otherwise, assigns 1/indegree(w). Throws IllegalArgumentException if + * the edge is not directed. */ -public class UniformInOut implements Function, Double> -{ - /** - * The graph for which the edge weights are defined. - */ - protected Graph graph; - - /** - * Creates an instance for the specified graph. - * @param graph the graph for which the edge weights will be defined - */ - public UniformInOut(Graph graph) - { - this.graph = graph; - } - - public Double apply(VEPair ve_pair) - { - V v = ve_pair.getV(); - E e = ve_pair.getE(); - if (graph.getEdgeType(e) != EdgeType.DIRECTED) - throw new IllegalArgumentException("This Function only" + - " operates on directed edges"); - return 1.0 / (graph.isSource(v, e) ? - graph.outDegree(v) : - graph.inDegree(v)); - } +public class UniformInOut implements Function, Double> { + /** The graph for which the edge weights are defined. */ + protected Graph graph; + + /** + * Creates an instance for the specified graph. + * + * @param graph the graph for which the edge weights will be defined + */ + public UniformInOut(Graph graph) { + this.graph = graph; + } + + public Double apply(VEPair ve_pair) { + V v = ve_pair.getV(); + E e = ve_pair.getE(); + if (graph.getEdgeType(e) != EdgeType.DIRECTED) + throw new IllegalArgumentException("This Function only" + " operates on directed edges"); + return 1.0 / (graph.isSource(v, e) ? graph.outDegree(v) : graph.inDegree(v)); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java index b1f2ee26..2e9a6bbb 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VEPair.java @@ -1,7 +1,7 @@ /* * Created on Jul 8, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -12,45 +12,37 @@ package edu.uci.ics.jung.algorithms.scoring.util; /** - * Convenience class for associating a vertex and an edge. Used, for example, - * in contexts in which it is necessary to know the origin for an edge traversal - * (that is, the direction in which an (undirected) edge is being traversed). + * Convenience class for associating a vertex and an edge. Used, for example, in contexts in which + * it is necessary to know the origin for an edge traversal (that is, the direction in which an + * (undirected) edge is being traversed). * * @param the vertex type * @param the edge type */ -public class VEPair -{ - private V v; - private E e; - - /** - * Creates an instance with the specified vertex and edge - * @param v the vertex to add - * @param e the edge to add - */ - public VEPair(V v, E e) - { - if (v == null || e == null) - throw new IllegalArgumentException("elements must be non-null"); - - this.v = v; - this.e = e; - } - - /** - * @return the vertex of this pair - */ - public V getV() - { - return v; - } - - /** - * @return the edge of this pair - */ - public E getE() - { - return e; - } +public class VEPair { + private V v; + private E e; + + /** + * Creates an instance with the specified vertex and edge + * + * @param v the vertex to add + * @param e the edge to add + */ + public VEPair(V v, E e) { + if (v == null || e == null) throw new IllegalArgumentException("elements must be non-null"); + + this.v = v; + this.e = e; + } + + /** @return the vertex of this pair */ + public V getV() { + return v; + } + + /** @return the edge of this pair */ + public E getE() { + return e; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java index c6bc9952..182be29e 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/scoring/util/VertexScoreTransformer.java @@ -1,7 +1,7 @@ /* * Created on Jul 18, 2008 * - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * * All rights reserved. * @@ -12,35 +12,27 @@ package edu.uci.ics.jung.algorithms.scoring.util; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.VertexScorer; -/** - * A Function convenience wrapper around VertexScorer. - */ -public class VertexScoreTransformer implements Function -{ - /** - * The VertexScorer instance that provides the values returned by transform. - */ - protected VertexScorer vs; - - /** - * Creates an instance based on the specified VertexScorer. - * @param vs the VertexScorer which will retrieve the score for each vertex - */ - public VertexScoreTransformer(VertexScorer vs) - { - this.vs = vs; - } +/** A Function convenience wrapper around VertexScorer. */ +public class VertexScoreTransformer implements Function { + /** The VertexScorer instance that provides the values returned by transform. */ + protected VertexScorer vs; - /** - * @param v the vertex whose score is being returned - * @return the score for this vertex. - */ - public S apply(V v) - { - return vs.getVertexScore(v); - } + /** + * Creates an instance based on the specified VertexScorer. + * + * @param vs the VertexScorer which will retrieve the score for each vertex + */ + public VertexScoreTransformer(VertexScorer vs) { + this.vs = vs; + } + /** + * @param v the vertex whose score is being returned + * @return the score for this vertex. + */ + public S apply(V v) { + return vs.getVertexScore(v); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java index dbfe7503..d1e7686e 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/BFSDistanceLabeler.java @@ -1,15 +1,15 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.shortestpath; - +import edu.uci.ics.jung.graph.Hypergraph; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -18,153 +18,162 @@ import java.util.Map; import java.util.Set; -import edu.uci.ics.jung.graph.Hypergraph; - /** - * Labels each node in the graph according to the BFS distance from the start node(s). If nodes are unreachable, then - * they are assigned a distance of -1. - * All nodes traversed at step k are marked as predecessors of their successors traversed at step k+1. - *

- * Running time is: O(m) + * Labels each node in the graph according to the BFS distance from the start node(s). If nodes are + * unreachable, then they are assigned a distance of -1. All nodes traversed at step k are marked as + * predecessors of their successors traversed at step k+1. + * + *

Running time is: O(m) + * * @author Scott White */ public class BFSDistanceLabeler { - private Map distanceDecorator = new HashMap(); - private List mCurrentList; - private Set mUnvisitedVertices; - private List mVerticesInOrderVisited; - private Map> mPredecessorMap; - - /** - * Creates a new BFS labeler for the specified graph and root set - * The distances are stored in the corresponding Vertex objects and are of type MutableInteger - */ - public BFSDistanceLabeler() { - mPredecessorMap = new HashMap>(); - } - - /** - * Returns the list of vertices visited in order of traversal - * @return the list of vertices - */ - public List getVerticesInOrderVisited() { - return mVerticesInOrderVisited; - } - - /** - * Returns the set of all vertices that were not visited - * @return the list of unvisited vertices - */ - public Set getUnvisitedVertices() { - return mUnvisitedVertices; + private Map distanceDecorator = new HashMap(); + private List mCurrentList; + private Set mUnvisitedVertices; + private List mVerticesInOrderVisited; + private Map> mPredecessorMap; + + /** + * Creates a new BFS labeler for the specified graph and root set The distances are stored in the + * corresponding Vertex objects and are of type MutableInteger + */ + public BFSDistanceLabeler() { + mPredecessorMap = new HashMap>(); + } + + /** + * Returns the list of vertices visited in order of traversal + * + * @return the list of vertices + */ + public List getVerticesInOrderVisited() { + return mVerticesInOrderVisited; + } + + /** + * Returns the set of all vertices that were not visited + * + * @return the list of unvisited vertices + */ + public Set getUnvisitedVertices() { + return mUnvisitedVertices; + } + + /** + * Given a vertex, returns the shortest distance from any node in the root set to v + * + * @param g the graph in which the distances are to be measured + * @param v the vertex whose distance is to be retrieved + * @return the shortest distance from any node in the root set to v + */ + public int getDistance(Hypergraph g, V v) { + if (!g.getVertices().contains(v)) { + throw new IllegalArgumentException("Vertex is not contained in the graph."); } - /** - * Given a vertex, returns the shortest distance from any node in the root set to v - * @param g the graph in which the distances are to be measured - * @param v the vertex whose distance is to be retrieved - * @return the shortest distance from any node in the root set to v - */ - public int getDistance(Hypergraph g, V v) { - if (!g.getVertices().contains(v)) { - throw new IllegalArgumentException("Vertex is not contained in the graph."); - } - - return distanceDecorator.get(v).intValue(); + return distanceDecorator.get(v).intValue(); + } + + /** + * Returns set of predecessors of the given vertex + * + * @param v the vertex whose predecessors are to be retrieved + * @return the set of predecessors + */ + public Set getPredecessors(V v) { + return mPredecessorMap.get(v); + } + + protected void initialize(Hypergraph g, Set rootSet) { + mVerticesInOrderVisited = new ArrayList(); + mUnvisitedVertices = new HashSet(); + for (V currentVertex : g.getVertices()) { + mUnvisitedVertices.add(currentVertex); + mPredecessorMap.put(currentVertex, new HashSet()); } - /** - * Returns set of predecessors of the given vertex - * @param v the vertex whose predecessors are to be retrieved - * @return the set of predecessors - */ - public Set getPredecessors(V v) { - return mPredecessorMap.get(v); + mCurrentList = new ArrayList(); + for (V v : rootSet) { + distanceDecorator.put(v, new Integer(0)); + mCurrentList.add(v); + mUnvisitedVertices.remove(v); + mVerticesInOrderVisited.add(v); } - - protected void initialize(Hypergraph g, Set rootSet) { - mVerticesInOrderVisited = new ArrayList(); - mUnvisitedVertices = new HashSet(); - for(V currentVertex : g.getVertices()) { - mUnvisitedVertices.add(currentVertex); - mPredecessorMap.put(currentVertex,new HashSet()); - } - - mCurrentList = new ArrayList(); - for(V v : rootSet) { - distanceDecorator.put(v, new Integer(0)); - mCurrentList.add(v); - mUnvisitedVertices.remove(v); - mVerticesInOrderVisited.add(v); + } + + private void addPredecessor(V predecessor, V sucessor) { + HashSet predecessors = mPredecessorMap.get(sucessor); + predecessors.add(predecessor); + } + + /** + * Computes the distances of all the node from the starting root nodes. If there is more than one + * root node the minimum distance from each root node is used as the designated distance to a + * given node. Also keeps track of the predecessors of each node traversed as well as the order of + * nodes traversed. + * + * @param graph the graph to label + * @param rootSet the set of starting vertices to traverse from + */ + public void labelDistances(Hypergraph graph, Set rootSet) { + + initialize(graph, rootSet); + + int distance = 1; + while (true) { + List newList = new ArrayList(); + for (V currentVertex : mCurrentList) { + if (graph.containsVertex(currentVertex)) { + for (V next : graph.getSuccessors(currentVertex)) { + visitNewVertex(currentVertex, next, distance, newList); + } } + } + if (newList.size() == 0) { + break; + } + mCurrentList = newList; + distance++; } - private void addPredecessor(V predecessor,V sucessor) { - HashSet predecessors = mPredecessorMap.get(sucessor); - predecessors.add(predecessor); + for (V v : mUnvisitedVertices) { + distanceDecorator.put(v, new Integer(-1)); } - - /** - * Computes the distances of all the node from the starting root nodes. If there is more than one root node - * the minimum distance from each root node is used as the designated distance to a given node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param rootSet the set of starting vertices to traverse from - */ - public void labelDistances(Hypergraph graph, Set rootSet) { - - initialize(graph,rootSet); - - int distance = 1; - while (true) { - List newList = new ArrayList(); - for(V currentVertex : mCurrentList) { - if(graph.containsVertex(currentVertex)) { - for(V next : graph.getSuccessors(currentVertex)) { - visitNewVertex(currentVertex,next, distance, newList); - } - } - } - if (newList.size() == 0) break; - mCurrentList = newList; - distance++; - } - - for(V v : mUnvisitedVertices) { - distanceDecorator.put(v,new Integer(-1)); - } - } - - /** - * Computes the distances of all the node from the specified root node. Also keeps track - * of the predecessors of each node traversed as well as the order of nodes traversed. - * @param graph the graph to label - * @param root the single starting vertex to traverse from - */ - public void labelDistances(Hypergraph graph, V root) { - labelDistances(graph, Collections.singleton(root)); + } + + /** + * Computes the distances of all the node from the specified root node. Also keeps track of the + * predecessors of each node traversed as well as the order of nodes traversed. + * + * @param graph the graph to label + * @param root the single starting vertex to traverse from + */ + public void labelDistances(Hypergraph graph, V root) { + labelDistances(graph, Collections.singleton(root)); + } + + private void visitNewVertex(V predecessor, V neighbor, int distance, List newList) { + if (mUnvisitedVertices.contains(neighbor)) { + distanceDecorator.put(neighbor, new Integer(distance)); + newList.add(neighbor); + mVerticesInOrderVisited.add(neighbor); + mUnvisitedVertices.remove(neighbor); } - - private void visitNewVertex(V predecessor, V neighbor, int distance, List newList) { - if (mUnvisitedVertices.contains(neighbor)) { - distanceDecorator.put(neighbor, new Integer(distance)); - newList.add(neighbor); - mVerticesInOrderVisited.add(neighbor); - mUnvisitedVertices.remove(neighbor); - } - int predecessorDistance = distanceDecorator.get(predecessor).intValue(); - int successorDistance = distanceDecorator.get(neighbor).intValue(); - if (predecessorDistance < successorDistance) { - addPredecessor(predecessor,neighbor); - } - } - - /** - * Must be called after {@code labelDistances} in order to contain valid data. - * @return a map from vertices to minimum distances from the original source(s) - */ - public Map getDistanceDecorator() { - return distanceDecorator; + int predecessorDistance = distanceDecorator.get(predecessor).intValue(); + int successorDistance = distanceDecorator.get(neighbor).intValue(); + if (predecessorDistance < successorDistance) { + addPredecessor(predecessor, neighbor); } + } + + /** + * Must be called after {@code labelDistances} in order to contain valid data. + * + * @return a map from vertices to minimum distances from the original source(s) + */ + public Map getDistanceDecorator() { + return distanceDecorator; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java index 01509d22..26c62c9a 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraDistance.java @@ -1,7 +1,7 @@ /* * Created on Jul 9, 2005 * - * Copyright (c) 2005, The JUNG Authors + * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * @@ -11,6 +11,12 @@ */ package edu.uci.ics.jung.algorithms.shortestpath; +import com.google.common.base.Function; +import com.google.common.base.Functions; +import edu.uci.ics.jung.algorithms.util.BasicMapEntry; +import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.Hypergraph; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; @@ -19,527 +25,447 @@ import java.util.Map; import java.util.Set; -import com.google.common.base.Function; -import com.google.common.base.Functions; - -import edu.uci.ics.jung.algorithms.util.BasicMapEntry; -import edu.uci.ics.jung.algorithms.util.MapBinaryHeap; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.Hypergraph; - /** - *

Calculates distances in a specified graph, using - * Dijkstra's single-source-shortest-path algorithm. All edge weights - * in the graph must be nonnegative; if any edge with negative weight is - * found in the course of calculating distances, an - * IllegalArgumentException will be thrown. - * (Note: this exception will only be thrown when such an edge would be - * used to update a given tentative distance; - * the algorithm does not check for negative-weight edges "up front".) - * - *

Distances and partial results are optionally cached (by this instance) - * for later reference. Thus, if the 10 closest vertices to a specified source - * vertex are known, calculating the 20 closest vertices does not require - * starting Dijkstra's algorithm over from scratch. - * - *

Distances are stored as double-precision values. - * If a vertex is not reachable from the specified source vertex, no - * distance is stored. This is new behavior with version 1.4; - * the previous behavior was to store a value of - * Double.POSITIVE_INFINITY. This change gives the algorithm - * an approximate complexity of O(kD log k), where k is either the number of - * requested targets or the number of reachable vertices (whichever is smaller), - * and D is the average degree of a vertex. - * - *

The elements in the maps returned by getDistanceMap - * are ordered (that is, returned - * by the iterator) by nondecreasing distance from source. - * - *

Users are cautioned that distances calculated should be assumed to - * be invalidated by changes to the graph, and should invoke reset() - * when appropriate so that the distances can be recalculated. - * + * Calculates distances in a specified graph, using Dijkstra's single-source-shortest-path + * algorithm. All edge weights in the graph must be nonnegative; if any edge with negative weight is + * found in the course of calculating distances, an IllegalArgumentException will be + * thrown. (Note: this exception will only be thrown when such an edge would be used to update a + * given tentative distance; the algorithm does not check for negative-weight edges "up front".) + * + *

Distances and partial results are optionally cached (by this instance) for later reference. + * Thus, if the 10 closest vertices to a specified source vertex are known, calculating the 20 + * closest vertices does not require starting Dijkstra's algorithm over from scratch. + * + *

Distances are stored as double-precision values. If a vertex is not reachable from the + * specified source vertex, no distance is stored. This is new behavior with version 1.4; the + * previous behavior was to store a value of Double.POSITIVE_INFINITY. This change + * gives the algorithm an approximate complexity of O(kD log k), where k is either the number of + * requested targets or the number of reachable vertices (whichever is smaller), and D is the + * average degree of a vertex. + * + *

The elements in the maps returned by getDistanceMap are ordered (that is, + * returned by the iterator) by nondecreasing distance from source. + * + *

Users are cautioned that distances calculated should be assumed to be invalidated by changes + * to the graph, and should invoke reset() when appropriate so that the distances can + * be recalculated. + * * @author Joshua O'Madadhain * @author Tom Nelson converted to jung2 */ -public class DijkstraDistance implements Distance -{ - protected Hypergraph g; - protected Function nev; - protected Map sourceMap; // a map of source vertices to an instance of SourceData - protected boolean cached; - protected double max_distance; - protected int max_targets; - - /** - *

Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraDistance(Hypergraph g, Function nev, boolean cached) { - this.g = g; - this.nev = nev; - this.sourceMap = new HashMap(); - this.cached = cached; - this.max_distance = Double.POSITIVE_INFINITY; - this.max_targets = Integer.MAX_VALUE; - } - - /** - *

Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraDistance(Hypergraph g, Function nev) { - this(g, nev, true); - } - - /** - *

Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - public DijkstraDistance(Graph g) { - this(g, Functions.constant(1), true); +public class DijkstraDistance implements Distance { + protected Hypergraph g; + protected Function nev; + protected Map sourceMap; // a map of source vertices to an instance of SourceData + protected boolean cached; + protected double max_distance; + protected int max_targets; + + /** + * Creates an instance of DijkstraShortestPath for the specified graph and the + * specified method of extracting weights from edges, which caches results locally if and only if + * cached is true. + * + * @param g the graph on which distances will be calculated + * @param nev the class responsible for returning weights for edges + * @param cached specifies whether the results are to be cached + */ + public DijkstraDistance( + Hypergraph g, Function nev, boolean cached) { + this.g = g; + this.nev = nev; + this.sourceMap = new HashMap(); + this.cached = cached; + this.max_distance = Double.POSITIVE_INFINITY; + this.max_targets = Integer.MAX_VALUE; + } + + /** + * Creates an instance of DijkstraShortestPath for the specified graph and the + * specified method of extracting weights from edges, which caches results locally. + * + * @param g the graph on which distances will be calculated + * @param nev the class responsible for returning weights for edges + */ + public DijkstraDistance(Hypergraph g, Function nev) { + this(g, nev, true); + } + + /** + * Creates an instance of DijkstraShortestPath for the specified unweighted graph + * (that is, all weights 1) which caches results locally. + * + * @param g the graph on which distances will be calculated + */ + public DijkstraDistance(Graph g) { + this(g, Functions.constant(1), true); + } + + /** + * Creates an instance of DijkstraShortestPath for the specified unweighted graph + * (that is, all weights 1) which caches results locally. + * + * @param g the graph on which distances will be calculated + * @param cached specifies whether the results are to be cached + */ + public DijkstraDistance(Graph g, boolean cached) { + this(g, Functions.constant(1), cached); + } + + /** + * Implements Dijkstra's single-source shortest-path algorithm for weighted graphs. Uses a + * MapBinaryHeap as the priority queue, which gives this algorithm a time complexity of O(m + * lg n) (m = # of edges, n = # of vertices). This algorithm will terminate when any of the + * following have occurred (in order of priority): + * + *

    + *
  • the distance to the specified target (if any) has been found + *
  • no more vertices are reachable + *
  • the specified # of distances have been found, or the maximum distance desired has been + * exceeded + *
  • all distances have been found + *
+ * + * @param source the vertex from which distances are to be measured + * @param numDests the number of distances to measure + * @param targets the set of vertices to which distances are to be measured + * @return a mapping from vertex to the shortest distance from the source to each target + */ + protected LinkedHashMap singleSourceShortestPath( + V source, Collection targets, int numDests) { + SourceData sd = getSourceData(source); + + Set to_get = new HashSet(); + if (targets != null) { + to_get.addAll(targets); + Set existing_dists = sd.distances.keySet(); + for (V o : targets) { + if (existing_dists.contains(o)) to_get.remove(o); + } } - /** - *

Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - public DijkstraDistance(Graph g, boolean cached) { - this(g, Functions.constant(1), cached); + // if we've exceeded the max distance or max # of distances we're willing to calculate, or + // if we already have all the distances we need, + // terminate + if (sd.reached_max + || (targets != null && to_get.isEmpty()) + || (sd.distances.size() >= numDests)) { + return sd.distances; } - - /** - * Implements Dijkstra's single-source shortest-path algorithm for - * weighted graphs. Uses a MapBinaryHeap as the priority queue, - * which gives this algorithm a time complexity of O(m lg n) (m = # of edges, n = - * # of vertices). - * This algorithm will terminate when any of the following have occurred (in order - * of priority): - *

    - *
  • the distance to the specified target (if any) has been found - *
  • no more vertices are reachable - *
  • the specified # of distances have been found, or the maximum distance - * desired has been exceeded - *
  • all distances have been found - *
- * - * @param source the vertex from which distances are to be measured - * @param numDests the number of distances to measure - * @param targets the set of vertices to which distances are to be measured - * @return a mapping from vertex to the shortest distance from the source to each target - */ - protected LinkedHashMap singleSourceShortestPath(V source, Collection targets, int numDests) - { - SourceData sd = getSourceData(source); - - Set to_get = new HashSet(); - if (targets != null) { - to_get.addAll(targets); - Set existing_dists = sd.distances.keySet(); - for(V o : targets) { - if (existing_dists.contains(o)) - to_get.remove(o); - } - } - - // if we've exceeded the max distance or max # of distances we're willing to calculate, or - // if we already have all the distances we need, - // terminate - if (sd.reached_max || - (targets != null && to_get.isEmpty()) || - (sd.distances.size() >= numDests)) - { - return sd.distances; - } - - while (!sd.unknownVertices.isEmpty() && (sd.distances.size() < numDests || !to_get.isEmpty())) - { - Map.Entry p = sd.getNextVertex(); - V v = p.getKey(); - double v_dist = p.getValue().doubleValue(); - to_get.remove(v); - if (v_dist > this.max_distance) - { - // we're done; put this vertex back in so that we're not including - // a distance beyond what we specified - sd.restoreVertex(v, v_dist); - sd.reached_max = true; - break; - } - sd.dist_reached = v_dist; - if (sd.distances.size() >= this.max_targets) - { - sd.reached_max = true; - break; - } - - for (E e : getEdgesToCheck(v) ) - { - for (V w : g.getIncidentVertices(e)) - { - if (!sd.distances.containsKey(w)) - { - double edge_weight = nev.apply(e).doubleValue(); - if (edge_weight < 0) - throw new IllegalArgumentException("Edges weights must be non-negative"); - double new_dist = v_dist + edge_weight; - if (!sd.estimatedDistances.containsKey(w)) - { - sd.createRecord(w, e, new_dist); - } - else - { - double w_dist = ((Double)sd.estimatedDistances.get(w)).doubleValue(); - if (new_dist < w_dist) // update tentative distance & path for w - sd.update(w, e, new_dist); - } - } - } + while (!sd.unknownVertices.isEmpty() && (sd.distances.size() < numDests || !to_get.isEmpty())) { + Map.Entry p = sd.getNextVertex(); + V v = p.getKey(); + double v_dist = p.getValue().doubleValue(); + to_get.remove(v); + if (v_dist > this.max_distance) { + // we're done; put this vertex back in so that we're not including + // a distance beyond what we specified + sd.restoreVertex(v, v_dist); + sd.reached_max = true; + break; + } + sd.dist_reached = v_dist; + + if (sd.distances.size() >= this.max_targets) { + sd.reached_max = true; + break; + } + + for (E e : getEdgesToCheck(v)) { + for (V w : g.getIncidentVertices(e)) { + if (!sd.distances.containsKey(w)) { + double edge_weight = nev.apply(e).doubleValue(); + if (edge_weight < 0) + throw new IllegalArgumentException("Edges weights must be non-negative"); + double new_dist = v_dist + edge_weight; + if (!sd.estimatedDistances.containsKey(w)) { + sd.createRecord(w, e, new_dist); + } else { + double w_dist = ((Double) sd.estimatedDistances.get(w)).doubleValue(); + if (new_dist < w_dist) // update tentative distance & path for w + sd.update(w, e, new_dist); } + } } - return sd.distances; + } } + return sd.distances; + } - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourceData(source); - return sd; - } - - /** - * Returns the set of edges incident to v that should be tested. - * By default, this is the set of outgoing edges for instances of Graph, - * the set of incident edges for instances of Hypergraph, - * and is otherwise undefined. - * @param v the vertex whose edges are to be checked - * @return the set of edges incident to {@code v} that should be tested - */ - protected Collection getEdgesToCheck(V v) - { - if (g instanceof Graph) - return ((Graph)g).getOutEdges(v); - else - return g.getIncidentEdges(v); + protected SourceData getSourceData(V source) { + SourceData sd = sourceMap.get(source); + if (sd == null) sd = new SourceData(source); + return sd; + } + /** + * Returns the set of edges incident to v that should be tested. By default, this is + * the set of outgoing edges for instances of Graph, the set of incident edges for + * instances of Hypergraph, and is otherwise undefined. + * + * @param v the vertex whose edges are to be checked + * @return the set of edges incident to {@code v} that should be tested + */ + protected Collection getEdgesToCheck(V v) { + if (g instanceof Graph) { + return ((Graph) g).getOutEdges(v); + } else { + return g.getIncidentEdges(v); } + } - - /** - * Returns the length of a shortest path from the source to the target vertex, - * or null if the target is not reachable from the source. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - * - * @param source the vertex from which the distance to {@code target} is to be measured - * @param target the vertex to which the distance from {@code source} is to be measured - * @return the distance between {@code source} and {@code target} - * - * @see #getDistanceMap(Object) - * @see #getDistanceMap(Object,int) - */ - public Number getDistance(V source, V target) - { - if (g.containsVertex(target) == false) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - Set targets = new HashSet(); - targets.add(target); - Map distanceMap = getDistanceMap(source, targets); - return distanceMap.get(target); - } - - - /** - * Returns a {@code Map} from each element {@code t} of {@code targets} to the - * shortest-path distance from {@code source} to {@code t}. - * @param source the vertex from which the distance to each target is to be measured - * @param targets the vertices to which the distance from the source is to be measured - * @return {@code Map} from each element of {@code targets} to its distance from {@code source} - */ - public Map getDistanceMap(V source, Collection targets) - { - if (g.containsVertex(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - if (targets.size() > max_targets) - throw new IllegalArgumentException("size of target set exceeds maximum " + - "number of targets allowed: " + this.max_targets); - - Map distanceMap = - singleSourceShortestPath(source, targets, - Math.min(g.getVertexCount(), max_targets)); - if (!cached) - reset(source); - - return distanceMap; - } - - /** - *

Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source. - * - *

The size of the map returned will be the number of - * vertices reachable from source. - * - * @see #getDistanceMap(Object,int) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - * @return a mapping from each vertex in the graph to its distance from {@code source} - */ - public Map getDistanceMap(V source) - { - return getDistanceMap(source, Math.min(g.getVertexCount(), max_targets)); + /** + * Returns the length of a shortest path from the source to the target vertex, or null if the + * target is not reachable from the source. If either vertex is not in the graph for which this + * instance was created, throws IllegalArgumentException. + * + * @param source the vertex from which the distance to {@code target} is to be measured + * @param target the vertex to which the distance from {@code source} is to be measured + * @return the distance between {@code source} and {@code target} + * @see #getDistanceMap(Object) + * @see #getDistanceMap(Object,int) + */ + public Number getDistance(V source, V target) { + if (g.containsVertex(target) == false) + throw new IllegalArgumentException( + "Specified target vertex " + target + " is not part of graph " + g); + if (g.containsVertex(source) == false) + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); + + Set targets = new HashSet(); + targets.add(target); + Map distanceMap = getDistanceMap(source, targets); + return distanceMap.get(target); + } + + /** + * Returns a {@code Map} from each element {@code t} of {@code targets} to the shortest-path + * distance from {@code source} to {@code t}. + * + * @param source the vertex from which the distance to each target is to be measured + * @param targets the vertices to which the distance from the source is to be measured + * @return {@code Map} from each element of {@code targets} to its distance from {@code source} + */ + public Map getDistanceMap(V source, Collection targets) { + if (g.containsVertex(source) == false) + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); + if (targets.size() > max_targets) + throw new IllegalArgumentException( + "size of target set exceeds maximum " + "number of targets allowed: " + this.max_targets); + + Map distanceMap = + singleSourceShortestPath(source, targets, Math.min(g.getVertexCount(), max_targets)); + if (!cached) reset(source); + + return distanceMap; + } + + /** + * Returns a LinkedHashMap which maps each vertex in the graph (including the + * source vertex) to its distance from the source vertex. The map's iterator + * will return the elements in order of increasing distance from source. + * + *

The size of the map returned will be the number of vertices reachable from source + * . + * + * @see #getDistanceMap(Object,int) + * @see #getDistance(Object,Object) + * @param source the vertex from which distances are measured + * @return a mapping from each vertex in the graph to its distance from {@code source} + */ + public Map getDistanceMap(V source) { + return getDistanceMap(source, Math.min(g.getVertexCount(), max_targets)); + } + + /** + * Returns a LinkedHashMap which maps each of the closest numDist + * vertices to the source vertex in the graph (including the source + * vertex) to its distance from the source vertex. Throws an + * IllegalArgumentException if source is not in this instance's graph, or if + * numDests is either less than 1 or greater than the number of vertices in the + * graph. + * + *

The size of the map returned will be the smaller of numDests and the number of + * vertices reachable from source. + * + * @see #getDistanceMap(Object) + * @see #getDistance(Object,Object) + * @param source the vertex from which distances are measured + * @param numDests the number of vertices for which to measure distances + * @return a mapping from the {@code numDests} vertices in the graph closest to {@code source}, to + * their distance from {@code source} + */ + public LinkedHashMap getDistanceMap(V source, int numDests) { + + if (g.getVertices().contains(source) == false) { + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); } - - - - /** - *

Returns a LinkedHashMap which maps each of the closest - * numDist vertices to the source vertex - * in the graph (including the source vertex) - * to its distance from the source vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph. - * - *

The size of the map returned will be the smaller of - * numDests and the number of vertices reachable from - * source. - * - * @see #getDistanceMap(Object) - * @see #getDistance(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertices for which to measure distances - * @return a mapping from the {@code numDests} vertices in the graph - * closest to {@code source}, to their distance from {@code source} - * - */ - public LinkedHashMap getDistanceMap(V source, int numDests) - { - - if(g.getVertices().contains(source) == false) { - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - } - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); - - if (numDests > max_targets) - throw new IllegalArgumentException("numDests must be <= the maximum " + - "number of targets allowed: " + this.max_targets); - - LinkedHashMap distanceMap = - singleSourceShortestPath(source, null, numDests); - - if (!cached) - reset(source); - - return distanceMap; + if (numDests < 1 || numDests > g.getVertexCount()) + throw new IllegalArgumentException("numDests must be >= 1 " + "and <= g.numVertices()"); + + if (numDests > max_targets) + throw new IllegalArgumentException( + "numDests must be <= the maximum " + "number of targets allowed: " + this.max_targets); + + LinkedHashMap distanceMap = singleSourceShortestPath(source, null, numDests); + + if (!cached) reset(source); + + return distanceMap; + } + + /** + * Allows the user to specify the maximum distance that this instance will calculate. Any vertices + * past this distance will effectively be unreachable from the source, in the sense that the + * algorithm will not calculate the distance to any vertices which are farther away than this + * distance. A negative value for max_dist will ensure that no further distances are + * calculated. + * + *

This can be useful for limiting the amount of time and space used by this algorithm if the + * graph is very large. + * + *

Note: if this instance has already calculated distances greater than max_dist, + * and the results are cached, those results will still be valid and available; this limit applies + * only to subsequent distance calculations. + * + * @param max_dist the maximum distance that this instance will calculate + * @see #setMaxTargets(int) + */ + public void setMaxDistance(double max_dist) { + this.max_distance = max_dist; + for (V v : sourceMap.keySet()) { + SourceData sd = sourceMap.get(v); + sd.reached_max = + (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); } - - /** - * Allows the user to specify the maximum distance that this instance will calculate. - * Any vertices past this distance will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any vertices which - * are farther away than this distance. A negative value for max_dist - * will ensure that no further distances are calculated. - * - *

This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large. - * - *

Note: if this instance has already calculated distances greater than max_dist, - * and the results are cached, those results will still be valid and available; this limit - * applies only to subsequent distance calculations. - * - * @param max_dist the maximum distance that this instance will calculate - * - * @see #setMaxTargets(int) - */ - public void setMaxDistance(double max_dist) - { - this.max_distance = max_dist; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } + } + + /** + * Allows the user to specify the maximum number of target vertices per source vertex for which + * this instance will calculate distances. Once this threshold is reached, any further vertices + * will effectively be unreachable from the source, in the sense that the algorithm will not + * calculate the distance to any more vertices. A negative value for max_targets will + * ensure that no further distances are calculated. + * + *

This can be useful for limiting the amount of time and space used by this algorithm if the + * graph is very large. + * + *

Note: if this instance has already calculated distances to a greater number of targets than + * max_targets, and the results are cached, those results will still be valid and + * available; this limit applies only to subsequent distance calculations. + * + * @param max_targets the maximum number of targets for which this instance will calculate + * distances + * @see #setMaxDistance(double) + */ + public void setMaxTargets(int max_targets) { + this.max_targets = max_targets; + for (V v : sourceMap.keySet()) { + SourceData sd = sourceMap.get(v); + sd.reached_max = + (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); } - - /** - * Allows the user to specify the maximum number of target vertices per source vertex - * for which this instance will calculate distances. Once this threshold is reached, - * any further vertices will effectively be unreachable from the source, in - * the sense that the algorithm will not calculate the distance to any more vertices. - * A negative value for max_targets will ensure that no further distances are calculated. - * - *

This can be useful for limiting the amount of time and space used by this algorithm - * if the graph is very large. - * - *

Note: if this instance has already calculated distances to a greater number of - * targets than max_targets, and the results are cached, those results - * will still be valid and available; this limit applies only to subsequent distance - * calculations. - * - * @param max_targets the maximum number of targets for which this instance will calculate - * distances - * - * @see #setMaxDistance(double) - */ - public void setMaxTargets(int max_targets) - { - this.max_targets = max_targets; - for (V v : sourceMap.keySet()) - { - SourceData sd = sourceMap.get(v); - sd.reached_max = (this.max_distance <= sd.dist_reached) || (sd.distances.size() >= max_targets); - } + } + + /** + * Clears all stored distances for this instance. Should be called whenever the graph is modified + * (edge weights changed or edges added/removed). If the user knows that some currently calculated + * distances are unaffected by a change, reset(V) may be appropriate instead. + * + * @see #reset(Object) + */ + public void reset() { + sourceMap = new HashMap(); + } + + /** + * Specifies whether or not this instance of DijkstraShortestPath should cache its + * results (final and partial) for future reference. + * + * @param enable true if the results are to be cached, and false + * otherwise + */ + public void enableCaching(boolean enable) { + this.cached = enable; + } + + /** + * Clears all stored distances for the specified source vertex source. Should be + * called whenever the stored distances from this vertex are invalidated by changes to the graph. + * + * @param source the vertex for which stored distances should be cleared + * @see #reset() + */ + public void reset(V source) { + sourceMap.put(source, null); + } + + /** Compares according to distances, so that the BinaryHeap knows how to order the tree. */ + protected static class VertexComparator implements Comparator { + private Map distances; + + protected VertexComparator(Map distances) { + this.distances = distances; } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - sourceMap = new HashMap(); + + public int compare(V o1, V o2) { + return ((Double) distances.get(o1)).compareTo((Double) distances.get(o2)); } - - /** - * Specifies whether or not this instance of DijkstraShortestPath - * should cache its results (final and partial) for future reference. - * - * @param enable true if the results are to be cached, and - * false otherwise - */ - public void enableCaching(boolean enable) - { - this.cached = enable; + } + + /** + * For a given source vertex, holds the estimated and final distances, tentative and final + * assignments of incoming edges on the shortest path from the source vertex, and a priority queue + * (ordered by estimated distance) of the vertices for which distances are unknown. + * + * @author Joshua O'Madadhain + */ + protected class SourceData { + protected LinkedHashMap distances; + protected Map estimatedDistances; + protected MapBinaryHeap unknownVertices; + protected boolean reached_max = false; + protected double dist_reached = 0; + + protected SourceData(V source) { + distances = new LinkedHashMap(); + estimatedDistances = new HashMap(); + unknownVertices = new MapBinaryHeap(new VertexComparator(estimatedDistances)); + + sourceMap.put(source, this); + + // initialize priority queue + estimatedDistances.put(source, new Double(0)); // distance from source to itself is 0 + unknownVertices.add(source); + reached_max = false; + dist_reached = 0; } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @param source the vertex for which stored distances should be cleared - * - * @see #reset() - */ - public void reset(V source) - { - sourceMap.put(source, null); + + protected Map.Entry getNextVertex() { + V v = unknownVertices.remove(); + Double dist = (Double) estimatedDistances.remove(v); + distances.put(v, dist); + return new BasicMapEntry(v, dist); } - /** - * Compares according to distances, so that the BinaryHeap knows how to - * order the tree. - */ - protected static class VertexComparator implements Comparator - { - private Map distances; - - protected VertexComparator(Map distances) - { - this.distances = distances; - } + protected void update(V dest, E tentative_edge, double new_dist) { + estimatedDistances.put(dest, new_dist); + unknownVertices.update(dest); + } - public int compare(V o1, V o2) - { - return ((Double) distances.get(o1)).compareTo((Double) distances.get(o2)); - } + protected void createRecord(V w, E e, double new_dist) { + estimatedDistances.put(w, new_dist); + unknownVertices.add(w); } - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimated distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourceData - { - protected LinkedHashMap distances; - protected Map estimatedDistances; - protected MapBinaryHeap unknownVertices; - protected boolean reached_max = false; - protected double dist_reached = 0; - - protected SourceData(V source) - { - distances = new LinkedHashMap(); - estimatedDistances = new HashMap(); - unknownVertices = new MapBinaryHeap(new VertexComparator(estimatedDistances)); - - sourceMap.put(source, this); - - // initialize priority queue - estimatedDistances.put(source, new Double(0)); // distance from source to itself is 0 - unknownVertices.add(source); - reached_max = false; - dist_reached = 0; - } - - protected Map.Entry getNextVertex() - { - V v = unknownVertices.remove(); - Double dist = (Double)estimatedDistances.remove(v); - distances.put(v, dist); - return new BasicMapEntry(v, dist); - } - - protected void update(V dest, E tentative_edge, double new_dist) - { - estimatedDistances.put(dest, new_dist); - unknownVertices.update(dest); - } - - protected void createRecord(V w, E e, double new_dist) - { - estimatedDistances.put(w, new_dist); - unknownVertices.add(w); - } - - protected void restoreVertex(V v, double dist) - { - estimatedDistances.put(v, dist); - unknownVertices.add(v); - distances.remove(v); - } + + protected void restoreVertex(V v, double dist) { + estimatedDistances.put(v, dist); + unknownVertices.add(v); + distances.remove(v); } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java index a3c61da0..f36b656f 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DijkstraShortestPath.java @@ -1,14 +1,16 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.shortestpath; +import com.google.common.base.Function; +import edu.uci.ics.jung.graph.Graph; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; @@ -17,281 +19,237 @@ import java.util.Map; import java.util.Set; -import com.google.common.base.Function; - -import edu.uci.ics.jung.graph.Graph; - /** - *

Calculates distances and shortest paths using Dijkstra's - * single-source-shortest-path algorithm. This is a lightweight - * extension of DijkstraDistance that also stores - * path information, so that the shortest paths can be reconstructed. - * - *

The elements in the maps returned by - * getIncomingEdgeMap are ordered (that is, returned - * by the iterator) by nondecreasing distance from source. - * + * Calculates distances and shortest paths using Dijkstra's single-source-shortest-path algorithm. + * This is a lightweight extension of DijkstraDistance that also stores path + * information, so that the shortest paths can be reconstructed. + * + *

The elements in the maps returned by getIncomingEdgeMap are ordered (that is, + * returned by the iterator) by nondecreasing distance from source. + * * @author Joshua O'Madadhain * @author Tom Nelson converted to jung2 * @see DijkstraDistance */ -public class DijkstraShortestPath extends DijkstraDistance implements ShortestPath -{ - /** - *

Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally if and only if - * cached is true. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, Function nev, boolean cached) - { - super(g, nev, cached); - } - - /** - *

Creates an instance of DijkstraShortestPath for - * the specified graph and the specified method of extracting weights - * from edges, which caches results locally. - * - * @param g the graph on which distances will be calculated - * @param nev the class responsible for returning weights for edges - */ - public DijkstraShortestPath(Graph g, Function nev) - { - super(g, nev); - } - - /** - *

Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - */ - public DijkstraShortestPath(Graph g) - { - super(g); - } +public class DijkstraShortestPath extends DijkstraDistance + implements ShortestPath { + /** + * Creates an instance of DijkstraShortestPath for the specified graph and the + * specified method of extracting weights from edges, which caches results locally if and only if + * cached is true. + * + * @param g the graph on which distances will be calculated + * @param nev the class responsible for returning weights for edges + * @param cached specifies whether the results are to be cached + */ + public DijkstraShortestPath(Graph g, Function nev, boolean cached) { + super(g, nev, cached); + } + + /** + * Creates an instance of DijkstraShortestPath for the specified graph and the + * specified method of extracting weights from edges, which caches results locally. + * + * @param g the graph on which distances will be calculated + * @param nev the class responsible for returning weights for edges + */ + public DijkstraShortestPath(Graph g, Function nev) { + super(g, nev); + } + + /** + * Creates an instance of DijkstraShortestPath for the specified unweighted graph + * (that is, all weights 1) which caches results locally. + * + * @param g the graph on which distances will be calculated + */ + public DijkstraShortestPath(Graph g) { + super(g); + } + + /** + * Creates an instance of DijkstraShortestPath for the specified unweighted graph + * (that is, all weights 1) which caches results locally. + * + * @param g the graph on which distances will be calculated + * @param cached specifies whether the results are to be cached + */ + public DijkstraShortestPath(Graph g, boolean cached) { + super(g, cached); + } + + @Override + protected SourceData getSourceData(V source) { + SourceData sd = sourceMap.get(source); + if (sd == null) sd = new SourcePathData(source); + return sd; + } + + /** + * Returns the last edge on a shortest path from source to target, or + * null if target is not reachable from source. + * + *

If either vertex is not in the graph for which this instance was created, throws + * IllegalArgumentException. + * + * @param source the vertex where the shortest path starts + * @param target the vertex where the shortest path ends + * @return the last edge on a shortest path from {@code source} to {@code target} or null if + * {@code target} is not reachable from {@code source} + */ + public E getIncomingEdge(V source, V target) { + if (!g.containsVertex(source)) + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); + + if (!g.containsVertex(target)) + throw new IllegalArgumentException( + "Specified target vertex " + target + " is not part of graph " + g); + + Set targets = new HashSet(); + targets.add(target); + singleSourceShortestPath(source, targets, g.getVertexCount()); + @SuppressWarnings("unchecked") + Map incomingEdgeMap = ((SourcePathData) sourceMap.get(source)).incomingEdges; + E incomingEdge = incomingEdgeMap.get(target); + + if (!cached) reset(source); - /** - *

Creates an instance of DijkstraShortestPath for - * the specified unweighted graph (that is, all weights 1) which - * caches results locally. - * - * @param g the graph on which distances will be calculated - * @param cached specifies whether the results are to be cached - */ - public DijkstraShortestPath(Graph g, boolean cached) - { - super(g, cached); + return incomingEdge; + } + + /** + * Returns a LinkedHashMap which maps each vertex in the graph (including the + * source vertex) to the last edge on the shortest path from the source + * vertex. The map's iterator will return the elements in order of increasing distance from + * source. + * + * @see DijkstraDistance#getDistanceMap(Object,int) + * @see DijkstraDistance#getDistance(Object,Object) + * @param source the vertex from which distances are measured + */ + public Map getIncomingEdgeMap(V source) { + return getIncomingEdgeMap(source, g.getVertexCount()); + } + + /** + * Returns a List of the edges on the shortest path from source to + * target, in order of their occurrence on this path. If either vertex is not in the + * graph for which this instance was created, throws IllegalArgumentException. + * + * @param source the starting vertex for the path to generate + * @param target the ending vertex for the path to generate + * @return the edges on the shortest path from {@code source} to {@code target}, in order of their + * occurrence + */ + public List getPath(V source, V target) { + if (!g.containsVertex(source)) + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); + + if (!g.containsVertex(target)) + throw new IllegalArgumentException( + "Specified target vertex " + target + " is not part of graph " + g); + + LinkedList path = new LinkedList(); + + // collect path data; must use internal method rather than + // calling getIncomingEdge() because getIncomingEdge() may + // wipe out results if results are not cached + Set targets = new HashSet(); + targets.add(target); + singleSourceShortestPath(source, targets, g.getVertexCount()); + @SuppressWarnings("unchecked") + Map incomingEdges = ((SourcePathData) sourceMap.get(source)).incomingEdges; + + if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) { + return path; } - - @Override - protected SourceData getSourceData(V source) - { - SourceData sd = sourceMap.get(source); - if (sd == null) - sd = new SourcePathData(source); - return sd; + V current = target; + while (!current.equals(source)) { + E incoming = incomingEdges.get(current); + path.addFirst(incoming); + current = ((Graph) g).getOpposite(current, incoming); } - - /** - *

Returns the last edge on a shortest path from source - * to target, or null if target is not - * reachable from source. - * - *

If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - * - * @param source the vertex where the shortest path starts - * @param target the vertex where the shortest path ends - * @return the last edge on a shortest path from {@code source} to {@code target} - * or null if {@code target} is not reachable from {@code source} - */ - public E getIncomingEdge(V source, V target) - { - if (!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if (!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); + return path; + } + + /** + * Returns a LinkedHashMap which maps each of the closest numDests + * vertices to the source vertex in the graph (including the source + * vertex) to the incoming edge along the path from that vertex. Throws an + * IllegalArgumentException if source is not in this instance's graph, or if + * numDests is either less than 1 or greater than the number of vertices in the + * graph. + * + * @see #getIncomingEdgeMap(Object) + * @see #getPath(Object,Object) + * @param source the vertex from which distances are measured + * @param numDests the number of vertices for which to measure distances + * @return a map from each of the closest {@code numDests} vertices to the last edge on the + * shortest path to that vertex starting from {@code source} + */ + public LinkedHashMap getIncomingEdgeMap(V source, int numDests) { + if (g.getVertices().contains(source) == false) + throw new IllegalArgumentException( + "Specified source vertex " + source + " is not part of graph " + g); + + if (numDests < 1 || numDests > g.getVertexCount()) + throw new IllegalArgumentException("numDests must be >= 1 " + "and <= g.numVertices()"); - Set targets = new HashSet(); - targets.add(target); - singleSourceShortestPath(source, targets, g.getVertexCount()); - @SuppressWarnings("unchecked") - Map incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - E incomingEdge = incomingEdgeMap.get(target); - - if (!cached) - reset(source); - - return incomingEdge; - } + singleSourceShortestPath(source, null, numDests); - /** - *

Returns a LinkedHashMap which maps each vertex - * in the graph (including the source vertex) - * to the last edge on the shortest path from the - * source vertex. - * The map's iterator will return the elements in order of - * increasing distance from source. - * - * @see DijkstraDistance#getDistanceMap(Object,int) - * @see DijkstraDistance#getDistance(Object,Object) - * @param source the vertex from which distances are measured - */ - public Map getIncomingEdgeMap(V source) - { - return getIncomingEdgeMap(source, g.getVertexCount()); - } + @SuppressWarnings("unchecked") + LinkedHashMap incomingEdgeMap = ((SourcePathData) sourceMap.get(source)).incomingEdges; - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - * If either vertex is not in the graph for which this instance - * was created, throws IllegalArgumentException. - * - * @param source the starting vertex for the path to generate - * @param target the ending vertex for the path to generate - * @return the edges on the shortest path from {@code source} to {@code target}, - * in order of their occurrence - */ - public List getPath(V source, V target) - { - if(!g.containsVertex(source)) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); - - if(!g.containsVertex(target)) - throw new IllegalArgumentException("Specified target vertex " + - target + " is not part of graph " + g); - - LinkedList path = new LinkedList(); + if (!cached) reset(source); - // collect path data; must use internal method rather than - // calling getIncomingEdge() because getIncomingEdge() may - // wipe out results if results are not cached - Set targets = new HashSet(); - targets.add(target); - singleSourceShortestPath(source, targets, g.getVertexCount()); - @SuppressWarnings("unchecked") - Map incomingEdges = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - current = ((Graph)g).getOpposite(current, incoming); - } - return path; - } + return incomingEdgeMap; + } - - /** - *

Returns a LinkedHashMap which maps each of the closest - * numDests vertices to the source vertex - * in the graph (including the source vertex) - * to the incoming edge along the path from that vertex. Throws - * an IllegalArgumentException if source - * is not in this instance's graph, or if numDests is - * either less than 1 or greater than the number of vertices in the - * graph. - * - * @see #getIncomingEdgeMap(Object) - * @see #getPath(Object,Object) - * @param source the vertex from which distances are measured - * @param numDests the number of vertices for which to measure distances - * @return a map from each of the closest {@code numDests} vertices - * to the last edge on the shortest path to that vertex starting from {@code source} - */ - public LinkedHashMap getIncomingEdgeMap(V source, int numDests) - { - if (g.getVertices().contains(source) == false) - throw new IllegalArgumentException("Specified source vertex " + - source + " is not part of graph " + g); + /** + * For a given source vertex, holds the estimated and final distances, tentative and final + * assignments of incoming edges on the shortest path from the source vertex, and a priority queue + * (ordered by estimaed distance) of the vertices for which distances are unknown. + * + * @author Joshua O'Madadhain + */ + protected class SourcePathData extends SourceData { + protected Map tentativeIncomingEdges; + protected LinkedHashMap incomingEdges; - if (numDests < 1 || numDests > g.getVertexCount()) - throw new IllegalArgumentException("numDests must be >= 1 " + - "and <= g.numVertices()"); + protected SourcePathData(V source) { + super(source); + incomingEdges = new LinkedHashMap(); + tentativeIncomingEdges = new HashMap(); + } - singleSourceShortestPath(source, null, numDests); - - @SuppressWarnings("unchecked") - LinkedHashMap incomingEdgeMap = - ((SourcePathData)sourceMap.get(source)).incomingEdges; - - if (!cached) - reset(source); - - return incomingEdgeMap; - } - - - /** - * For a given source vertex, holds the estimated and final distances, - * tentative and final assignments of incoming edges on the shortest path from - * the source vertex, and a priority queue (ordered by estimaed distance) - * of the vertices for which distances are unknown. - * - * @author Joshua O'Madadhain - */ - protected class SourcePathData extends SourceData - { - protected Map tentativeIncomingEdges; - protected LinkedHashMap incomingEdges; + @Override + public void update(V dest, E tentative_edge, double new_dist) { + super.update(dest, tentative_edge, new_dist); + tentativeIncomingEdges.put(dest, tentative_edge); + } - protected SourcePathData(V source) - { - super(source); - incomingEdges = new LinkedHashMap(); - tentativeIncomingEdges = new HashMap(); - } - - @Override - public void update(V dest, E tentative_edge, double new_dist) - { - super.update(dest, tentative_edge, new_dist); - tentativeIncomingEdges.put(dest, tentative_edge); - } - - @Override - public Map.Entry getNextVertex() - { - Map.Entry p = super.getNextVertex(); - V v = p.getKey(); - E incoming = tentativeIncomingEdges.remove(v); - incomingEdges.put(v, incoming); - return p; - } - - @Override - public void restoreVertex(V v, double dist) - { - super.restoreVertex(v, dist); - E incoming = incomingEdges.get(v); - tentativeIncomingEdges.put(v, incoming); - } - - @Override - public void createRecord(V w, E e, double new_dist) - { - super.createRecord(w, e, new_dist); - tentativeIncomingEdges.put(w, e); - } - + @Override + public Map.Entry getNextVertex() { + Map.Entry p = super.getNextVertex(); + V v = p.getKey(); + E incoming = tentativeIncomingEdges.remove(v); + incomingEdges.put(v, incoming); + return p; } + @Override + public void restoreVertex(V v, double dist) { + super.restoreVertex(v, dist); + E incoming = incomingEdges.get(v); + tentativeIncomingEdges.put(v, incoming); + } + + @Override + public void createRecord(V w, E e, double new_dist) { + super.createRecord(w, e, new_dist); + tentativeIncomingEdges.put(w, e); + } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java index 15e8e830..ef25dce6 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/Distance.java @@ -1,7 +1,7 @@ /* * Created on Apr 2, 2004 * - * Copyright (c) 2004, The JUNG Authors + * Copyright (c) 2004, The JUNG Authors * * All rights reserved. * @@ -13,34 +13,29 @@ import java.util.Map; - /** - * An interface for classes which calculate the distance between - * one vertex and another. - * + * An interface for classes which calculate the distance between one vertex and another. + * * @author Joshua O'Madadhain */ -public interface Distance -{ - /** - * Returns the distance from the source vertex to the - * target vertex. If target is not reachable from - * source, returns null. - * - * @param source the vertex from which distance is to be measured - * @param target the vertex to which distance is to be measured - * @return the distance from {@code source} to {@code target} - */ - Number getDistance(V source, V target); +public interface Distance { + /** + * Returns the distance from the source vertex to the target vertex. If + * target is not reachable from source, returns null. + * + * @param source the vertex from which distance is to be measured + * @param target the vertex to which distance is to be measured + * @return the distance from {@code source} to {@code target} + */ + Number getDistance(V source, V target); - /** - * Returns a Map which maps each vertex in the graph (including - * the source vertex) to its distance (represented as a Number) - * from source. If any vertex is not reachable from - * source, no distance is stored for that vertex. - * - * @param source the vertex from which distances are to be measured - * @return a {@code Map} of the distances from {@code source} to other vertices in the graph - */ - Map getDistanceMap(V source); + /** + * Returns a Map which maps each vertex in the graph (including the source + * vertex) to its distance (represented as a Number) from source. If any + * vertex is not reachable from source, no distance is stored for that vertex. + * + * @param source the vertex from which distances are to be measured + * @return a {@code Map} of the distances from {@code source} to other vertices in the graph + */ + Map getDistanceMap(V source); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java index e4e96241..f9e7e6f9 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/DistanceStatistics.java @@ -1,165 +1,151 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; import com.google.common.base.Function; - import edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality; import edu.uci.ics.jung.algorithms.scoring.util.VertexScoreTransformer; import edu.uci.ics.jung.graph.Hypergraph; +import java.util.Collection; /** * Statistics relating to vertex-vertex distances in a graph. - * + * *

Formerly known as GraphStatistics in JUNG 1.x. - * + * * @author Scott White * @author Joshua O'Madadhain */ -public class DistanceStatistics -{ - /** - * For each vertex v in graph, - * calculates the average shortest path length from v - * to all other vertices in graph using the metric - * specified by d, and returns the results in a - * Map from vertices to Double values. - * If there exists an ordered pair <u,v> - * for which d.getDistance(u,v) returns null, - * then the average distance value for u will be stored - * as Double.POSITIVE_INFINITY). - * - *

Does not include self-distances (path lengths from v - * to v). - * - *

To calculate the average distances, ignoring edge weights if any: - *

-     * Map distances = DistanceStatistics.averageDistances(g, new UnweightedShortestPath(g));
-     * 
- * To calculate the average distances respecting edge weights: - *
-     * DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev);
-     * Map distances = DistanceStatistics.averageDistances(g, dsp);
-     * 
- * where nev is an instance of Transformer that - * is used to fetch the weight for each edge. - * - * @see edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath - * @see edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance - * - * @param graph the graph for which distances are to be calculated - * @param d the distance metric to use for the calculation - * @param the vertex type - * @param the edge type - * @return a map from each vertex to the mean distance to each other (reachable) vertex - */ - public static Function averageDistances(Hypergraph graph, Distance d) - { - final ClosenessCentrality cc = new ClosenessCentrality(graph, d); - return new VertexScoreTransformer(cc); - } - - /** - * For each vertex v in g, - * calculates the average shortest path length from v - * to all other vertices in g, ignoring edge weights. - * @see #diameter(Hypergraph) - * @see edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality - * - * @param g the graph for which distances are to be calculated - * @param the vertex type - * @param the edge type - * @return a map from each vertex to the mean distance to each other (reachable) vertex - */ - public static Function averageDistances(Hypergraph g) - { - final ClosenessCentrality cc = new ClosenessCentrality(g, - new UnweightedShortestPath(g)); - return new VertexScoreTransformer(cc); - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v. If the graph is disconnected (that is, not - * all pairs of vertices are reachable from one another), the - * value returned will depend on use_max: - * if use_max == true, the value returned - * will be the the maximum shortest path length over all pairs of connected - * vertices; otherwise it will be Double.POSITIVE_INFINITY. - * - * @param g the graph for which distances are to be calculated - * @param d the distance metric to use for the calculation - * @param use_max if {@code true}, return the maximum shortest path length for all graphs; - * otherwise, return {@code Double.POSITIVE_INFINITY} for disconnected graphs - * @param the vertex type - * @param the edge type - * @return the longest distance from any vertex to any other - */ - public static double diameter(Hypergraph g, Distance d, boolean use_max) - { - double diameter = 0; - Collection vertices = g.getVertices(); - for(V v : vertices) { - for(V w : vertices) { +public class DistanceStatistics { + /** + * For each vertex v in graph, calculates the average shortest path + * length from v to all other vertices in graph using the metric + * specified by d, and returns the results in a Map from vertices to + * Double values. If there exists an ordered pair <u,v> for which + * d.getDistance(u,v) returns null, then the average distance value for + * u will be stored as Double.POSITIVE_INFINITY). + * + *

Does not include self-distances (path lengths from v to v). + * + *

To calculate the average distances, ignoring edge weights if any: + * + *

+   * Map distances = DistanceStatistics.averageDistances(g, new UnweightedShortestPath(g));
+   * 
+ * + * To calculate the average distances respecting edge weights: + * + *
+   * DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev);
+   * Map distances = DistanceStatistics.averageDistances(g, dsp);
+   * 
+ * + * where nev is an instance of Transformer that is used to fetch the + * weight for each edge. + * + * @see edu.uci.ics.jung.algorithms.shortestpath.UnweightedShortestPath + * @see edu.uci.ics.jung.algorithms.shortestpath.DijkstraDistance + * @param graph the graph for which distances are to be calculated + * @param d the distance metric to use for the calculation + * @param the vertex type + * @param the edge type + * @return a map from each vertex to the mean distance to each other (reachable) vertex + */ + public static Function averageDistances(Hypergraph graph, Distance d) { + final ClosenessCentrality cc = new ClosenessCentrality(graph, d); + return new VertexScoreTransformer(cc); + } - if (v.equals(w) == false) // don't include self-distances - { - Number dist = d.getDistance(v, w); - if (dist == null) - { - if (!use_max) - return Double.POSITIVE_INFINITY; - } - else - diameter = Math.max(diameter, dist.doubleValue()); - } + /** + * For each vertex v in g, calculates the average shortest path length + * from v to all other vertices in g, ignoring edge weights. + * + * @see #diameter(Hypergraph) + * @see edu.uci.ics.jung.algorithms.scoring.ClosenessCentrality + * @param g the graph for which distances are to be calculated + * @param the vertex type + * @param the edge type + * @return a map from each vertex to the mean distance to each other (reachable) vertex + */ + public static Function averageDistances(Hypergraph g) { + final ClosenessCentrality cc = + new ClosenessCentrality(g, new UnweightedShortestPath(g)); + return new VertexScoreTransformer(cc); + } + + /** + * Returns the diameter of g using the metric specified by d. The + * diameter is defined to be the maximum, over all pairs of vertices u,v, of the + * length of the shortest path from u to v. If the graph is disconnected + * (that is, not all pairs of vertices are reachable from one another), the value returned will + * depend on use_max: if use_max == true, the value returned will be the + * the maximum shortest path length over all pairs of connected vertices; otherwise it will + * be Double.POSITIVE_INFINITY. + * + * @param g the graph for which distances are to be calculated + * @param d the distance metric to use for the calculation + * @param use_max if {@code true}, return the maximum shortest path length for all graphs; + * otherwise, return {@code Double.POSITIVE_INFINITY} for disconnected graphs + * @param the vertex type + * @param the edge type + * @return the longest distance from any vertex to any other + */ + public static double diameter(Hypergraph g, Distance d, boolean use_max) { + double diameter = 0; + Collection vertices = g.getVertices(); + for (V v : vertices) { + for (V w : vertices) { + + if (v.equals(w) == false) // don't include self-distances + { + Number dist = d.getDistance(v, w); + if (dist == null) { + if (!use_max) { + return Double.POSITIVE_INFINITY; } + } else { + diameter = Math.max(diameter, dist.doubleValue()); + } } - return diameter; - } - - /** - * Returns the diameter of g using the metric - * specified by d. The diameter is defined to be - * the maximum, over all pairs of vertices u,v, - * of the length of the shortest path from u to - * v, or Double.POSITIVE_INFINITY - * if any of these distances do not exist. - * @see #diameter(Hypergraph, Distance, boolean) - * - * @param g the graph for which distances are to be calculated - * @param d the distance metric to use for the calculation - * @param the vertex type - * @param the edge type - * @return the longest distance from any vertex to any other - */ - public static double diameter(Hypergraph g, Distance d) - { - return diameter(g, d, false); - } - - /** - * Returns the diameter of g, ignoring edge weights. - * @see #diameter(Hypergraph, Distance, boolean) - * - * @param g the graph for which distances are to be calculated - * @param the vertex type - * @param the edge type - * @return the longest distance from any vertex to any other - */ - public static double diameter(Hypergraph g) - { - return diameter(g, new UnweightedShortestPath(g)); + } } + return diameter; + } + + /** + * Returns the diameter of g using the metric specified by d. The + * diameter is defined to be the maximum, over all pairs of vertices u,v, of the + * length of the shortest path from u to v, or + * Double.POSITIVE_INFINITY if any of these distances do not exist. + * + * @see #diameter(Hypergraph, Distance, boolean) + * @param g the graph for which distances are to be calculated + * @param d the distance metric to use for the calculation + * @param the vertex type + * @param the edge type + * @return the longest distance from any vertex to any other + */ + public static double diameter(Hypergraph g, Distance d) { + return diameter(g, d, false); + } + + /** + * Returns the diameter of g, ignoring edge weights. + * + * @see #diameter(Hypergraph, Distance, boolean) + * @param g the graph for which distances are to be calculated + * @param the vertex type + * @param the edge type + * @return the longest distance from any vertex to any other + */ + public static double diameter(Hypergraph g) { + return diameter(g, new UnweightedShortestPath(g)); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java index f6e30fbe..16912a74 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest.java @@ -1,163 +1,155 @@ package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Forest; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.EdgeType; import edu.uci.ics.jung.graph.util.Pair; +import java.util.Collection; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net + * For the input Graph, creates a MinimumSpanningTree using a variation of Prim's algorithm. * + * @author Tom Nelson - tomnelson@dev.java.net * @param the vertex type * @param the edge type */ -public class MinimumSpanningForest { - - protected Graph graph; - protected Forest forest; - protected Function weights; - - /** - * Creates a Forest from the supplied Graph and supplied Supplier, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitrary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created. - * @param graph the input graph - * @param Supplier the Supplier to use to create the new forest - * @param root the vertex of the graph to be used as the root of the forest - * @param weights edge weights - */ - public MinimumSpanningForest(Graph graph, Supplier> Supplier, - V root, Map weights) { - this(graph, Supplier.get(), root, weights); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - * @param weights edge weights, may be null - */ - public MinimumSpanningForest(Graph graph, Forest forest, - V root, Map weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = Functions.forMap(weights); - } - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); - } - updateForest(forest.getVertices(), unfinishedEdges); - } - - /** - * Creates a minimum spanning forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitrary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param root first Tree root, may be null - */ - @SuppressWarnings("unchecked") - public MinimumSpanningForest(Graph graph, Forest forest, - V root) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); +public class MinimumSpanningForest { + + protected Graph graph; + protected Forest forest; + protected Function weights; + + /** + * Creates a Forest from the supplied Graph and supplied Supplier, which is used to create a new, + * empty Forest. If non-null, the supplied root will be used as the root of the tree/forest. If + * the supplied root is null, or not present in the Graph, then an arbitrary Graph vertex will be + * selected as the root. If the Minimum Spanning Tree does not include all vertices of the Graph, + * then a leftover vertex is selected as a root, and another tree is created. + * + * @param graph the input graph + * @param Supplier the Supplier to use to create the new forest + * @param root the vertex of the graph to be used as the root of the forest + * @param weights edge weights + */ + public MinimumSpanningForest( + Graph graph, Supplier> Supplier, V root, Map weights) { + this(graph, Supplier.get(), root, weights); + } + + /** + * Creates a minimum spanning forest from the supplied graph, populating the supplied Forest, + * which must be empty. If the supplied root is null, or not present in the Graph, then an + * arbitrary Graph vertex will be selected as the root. If the Minimum Spanning Tree does not + * include all vertices of the Graph, then a leftover vertex is selected as a root, and another + * tree is created + * + * @param graph the Graph to find MST in + * @param forest the Forest to populate. Must be empty + * @param root first Tree root, may be null + * @param weights edge weights, may be null + */ + public MinimumSpanningForest( + Graph graph, Forest forest, V root, Map weights) { + + if (forest.getVertexCount() != 0) { + throw new IllegalArgumentException("Supplied Forest must be empty"); + } + this.graph = graph; + this.forest = forest; + if (weights != null) { + this.weights = Functions.forMap(weights); + } + Set unfinishedEdges = new HashSet(graph.getEdges()); + if (graph.getVertices().contains(root)) { + this.forest.addVertex(root); + } + updateForest(forest.getVertices(), unfinishedEdges); + } + + /** + * Creates a minimum spanning forest from the supplied graph, populating the supplied Forest, + * which must be empty. If the supplied root is null, or not present in the Graph, then an + * arbitrary Graph vertex will be selected as the root. If the Minimum Spanning Tree does not + * include all vertices of the Graph, then a leftover vertex is selected as a root, and another + * tree is created + * + * @param graph the Graph to find MST in + * @param forest the Forest to populate. Must be empty + * @param root first Tree root, may be null + */ + @SuppressWarnings("unchecked") + public MinimumSpanningForest(Graph graph, Forest forest, V root) { + + if (forest.getVertexCount() != 0) { + throw new IllegalArgumentException("Supplied Forest must be empty"); + } + this.graph = graph; + this.forest = forest; + this.weights = (Function) Functions.constant(1.0); + Set unfinishedEdges = new HashSet(graph.getEdges()); + if (graph.getVertices().contains(root)) { + this.forest.addVertex(root); + } + updateForest(forest.getVertices(), unfinishedEdges); + } + + /** @return the generated forest */ + public Forest getForest() { + return forest; + } + + protected void updateForest(Collection tv, Collection unfinishedEdges) { + double minCost = Double.MAX_VALUE; + E nextEdge = null; + V nextVertex = null; + V currentVertex = null; + for (E e : unfinishedEdges) { + + if (forest.getEdges().contains(e)) { + continue; + } + // find the lowest cost edge, get its opposite endpoint, + // and then update forest from its Successors + Pair endpoints = graph.getEndpoints(e); + V first = endpoints.getFirst(); + V second = endpoints.getSecond(); + if (tv.contains(first) == true && tv.contains(second) == false) { + if (weights.apply(e) < minCost) { + minCost = weights.apply(e); + nextEdge = e; + currentVertex = first; + nextVertex = second; } - this.graph = graph; - this.forest = forest; - this.weights = (Function) Functions.constant(1.0); - Set unfinishedEdges = new HashSet(graph.getEdges()); - if(graph.getVertices().contains(root)) { - this.forest.addVertex(root); + } + if (graph.getEdgeType(e) == EdgeType.UNDIRECTED + && tv.contains(second) == true + && tv.contains(first) == false) { + if (weights.apply(e) < minCost) { + minCost = weights.apply(e); + nextEdge = e; + currentVertex = second; + nextVertex = first; } - updateForest(forest.getVertices(), unfinishedEdges); + } + } + + if (nextVertex != null && nextEdge != null) { + unfinishedEdges.remove(nextEdge); + forest.addEdge(nextEdge, currentVertex, nextVertex); + updateForest(forest.getVertices(), unfinishedEdges); + } + Collection leftovers = new HashSet(graph.getVertices()); + leftovers.removeAll(forest.getVertices()); + if (leftovers.size() > 0) { + V anotherRoot = leftovers.iterator().next(); + forest.addVertex(anotherRoot); + updateForest(forest.getVertices(), unfinishedEdges); } - - /** - * @return the generated forest - */ - public Forest getForest() { - return forest; - } - - protected void updateForest(Collection tv, Collection unfinishedEdges) { - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(forest.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if(tv.contains(first) == true && tv.contains(second) == false) { - if(weights.apply(e) < minCost) { - minCost = weights.apply(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } - if(graph.getEdgeType(e) == EdgeType.UNDIRECTED && - tv.contains(second) == true && tv.contains(first) == false) { - if(weights.apply(e) < minCost) { - minCost = weights.apply(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - forest.addEdge(nextEdge, currentVertex, nextVertex); - updateForest(forest.getVertices(), unfinishedEdges); - } - Collection leftovers = new HashSet(graph.getVertices()); - leftovers.removeAll(forest.getVertices()); - if(leftovers.size() > 0) { - V anotherRoot = leftovers.iterator().next(); - forest.addVertex(anotherRoot); - updateForest(forest.getVertices(), unfinishedEdges); - } - } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java index e054902a..c9515989 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/MinimumSpanningForest2.java @@ -1,106 +1,95 @@ package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; -import java.util.Set; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.cluster.WeakComponentClusterer; import edu.uci.ics.jung.algorithms.filters.FilterUtils; import edu.uci.ics.jung.graph.Forest; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.Tree; import edu.uci.ics.jung.graph.util.TreeUtils; +import java.util.Collection; +import java.util.Set; /** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net + * For the input Graph, creates a MinimumSpanningTree using a variation of Prim's algorithm. * + * @author Tom Nelson - tomnelson@dev.java.net * @param the vertex type * @param the edge type */ @SuppressWarnings("unchecked") -public class MinimumSpanningForest2 { - - protected Graph graph; - protected Forest forest; - protected Function weights = - (Function)Functions.constant(1.0); - - /** - * Create a Forest from the supplied Graph and supplied Supplier, which - * is used to create a new, empty Forest. If non-null, the supplied root - * will be used as the root of the tree/forest. If the supplied root is - * null, or not present in the Graph, then an arbitary Graph vertex - * will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the graph for which the minimum spanning forest will be generated - * @param supplier a factory for the type of forest to build - * @param treeFactory a factory for the type of tree to build - * @param weights edge weights; may be null - */ - public MinimumSpanningForest2(Graph graph, - Supplier> supplier, - Supplier> treeFactory, - Function weights) { - this(graph, supplier.get(), - treeFactory, - weights); - } - - /** - * Create a forest from the supplied graph, populating the - * supplied Forest, which must be empty. - * If the supplied root is null, or not present in the Graph, - * then an arbitary Graph vertex will be selected as the root. - * If the Minimum Spanning Tree does not include all vertices of the - * Graph, then a leftover vertex is selected as a root, and another - * tree is created - * @param graph the Graph to find MST in - * @param forest the Forest to populate. Must be empty - * @param treeFactory a factory for the type of tree to build - * @param weights edge weights, may be null - */ - public MinimumSpanningForest2(Graph graph, - Forest forest, - Supplier> treeFactory, - Function weights) { - - if(forest.getVertexCount() != 0) { - throw new IllegalArgumentException("Supplied Forest must be empty"); - } - this.graph = graph; - this.forest = forest; - if(weights != null) { - this.weights = weights; - } - - WeakComponentClusterer wcc = - new WeakComponentClusterer(); - Set> component_vertices = wcc.apply(graph); - Collection> components = - FilterUtils.createAllInducedSubgraphs(component_vertices, graph); - - for(Graph component : components) { - PrimMinimumSpanningTree mst = - new PrimMinimumSpanningTree(treeFactory, this.weights); - Graph subTree = mst.apply(component); - if(subTree instanceof Tree) { - TreeUtils.addSubTree(forest, (Tree)subTree, null, null); - } - } - } - - /** - * @return the generated forest - */ - public Forest getForest() { - return forest; - } +public class MinimumSpanningForest2 { + + protected Graph graph; + protected Forest forest; + protected Function weights = + (Function) Functions.constant(1.0); + + /** + * Create a Forest from the supplied Graph and supplied Supplier, which is used to create a new, + * empty Forest. If non-null, the supplied root will be used as the root of the tree/forest. If + * the supplied root is null, or not present in the Graph, then an arbitary Graph vertex will be + * selected as the root. If the Minimum Spanning Tree does not include all vertices of the Graph, + * then a leftover vertex is selected as a root, and another tree is created + * + * @param graph the graph for which the minimum spanning forest will be generated + * @param supplier a factory for the type of forest to build + * @param treeFactory a factory for the type of tree to build + * @param weights edge weights; may be null + */ + public MinimumSpanningForest2( + Graph graph, + Supplier> supplier, + Supplier> treeFactory, + Function weights) { + this(graph, supplier.get(), treeFactory, weights); + } + + /** + * Create a forest from the supplied graph, populating the supplied Forest, which must be empty. + * If the supplied root is null, or not present in the Graph, then an arbitary Graph vertex will + * be selected as the root. If the Minimum Spanning Tree does not include all vertices of the + * Graph, then a leftover vertex is selected as a root, and another tree is created + * + * @param graph the Graph to find MST in + * @param forest the Forest to populate. Must be empty + * @param treeFactory a factory for the type of tree to build + * @param weights edge weights, may be null + */ + public MinimumSpanningForest2( + Graph graph, + Forest forest, + Supplier> treeFactory, + Function weights) { + + if (forest.getVertexCount() != 0) { + throw new IllegalArgumentException("Supplied Forest must be empty"); + } + this.graph = graph; + this.forest = forest; + if (weights != null) { + this.weights = weights; + } + + WeakComponentClusterer wcc = new WeakComponentClusterer(); + Set> component_vertices = wcc.apply(graph); + Collection> components = + FilterUtils.createAllInducedSubgraphs(component_vertices, graph); + + for (Graph component : components) { + PrimMinimumSpanningTree mst = + new PrimMinimumSpanningTree(treeFactory, this.weights); + Graph subTree = mst.apply(component); + if (subTree instanceof Tree) { + TreeUtils.addSubTree(forest, (Tree) subTree, null, null); + } + } + } + + /** @return the generated forest */ + public Forest getForest() { + return forest; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java index 9395024f..17ec0de0 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/PrimMinimumSpanningTree.java @@ -1,118 +1,116 @@ package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.Pair; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; /** - * For the input Graph, creates a MinimumSpanningTree - * using a variation of Prim's algorithm. - * - * @author Tom Nelson - tomnelson@dev.java.net + * For the input Graph, creates a MinimumSpanningTree using a variation of Prim's algorithm. * + * @author Tom Nelson - tomnelson@dev.java.net * @param the vertex type * @param the edge type */ -public class PrimMinimumSpanningTree implements Function,Graph> { - - protected Supplier> treeFactory; - protected Function weights; - - /** - * Creates an instance which generates a minimum spanning tree assuming constant edge weights. - * @param supplier used to create the tree instances - */ - public PrimMinimumSpanningTree(Supplier> supplier) { - this(supplier, Functions.constant(1.0)); - } +public class PrimMinimumSpanningTree implements Function, Graph> { + + protected Supplier> treeFactory; + protected Function weights; + + /** + * Creates an instance which generates a minimum spanning tree assuming constant edge weights. + * + * @param supplier used to create the tree instances + */ + public PrimMinimumSpanningTree(Supplier> supplier) { + this(supplier, Functions.constant(1.0)); + } + + /** + * Creates an instance which generates a minimum spanning tree using the input edge weights. + * + * @param supplier used to create the tree instances + * @param weights the edge weights to use for defining the MST + */ + public PrimMinimumSpanningTree( + Supplier> supplier, Function weights) { + this.treeFactory = supplier; + if (weights != null) { + this.weights = weights; + } + } + + /** @param graph the Graph to find MST in */ + public Graph apply(Graph graph) { + Set unfinishedEdges = new HashSet(graph.getEdges()); + Graph tree = treeFactory.get(); + V root = findRoot(graph); + if (graph.getVertices().contains(root)) { + tree.addVertex(root); + } else if (graph.getVertexCount() > 0) { + // pick an arbitrary vertex to make root + tree.addVertex(graph.getVertices().iterator().next()); + } + updateTree(tree, graph, unfinishedEdges); + + return tree; + } + + protected V findRoot(Graph graph) { + for (V v : graph.getVertices()) { + if (graph.getInEdges(v).size() == 0) { + return v; + } + } + // if there is no obvious root, pick any vertex + if (graph.getVertexCount() > 0) { + return graph.getVertices().iterator().next(); + } + // this graph has no vertices + return null; + } + + protected void updateTree(Graph tree, Graph graph, Collection unfinishedEdges) { + Collection tv = tree.getVertices(); + double minCost = Double.MAX_VALUE; + E nextEdge = null; + V nextVertex = null; + V currentVertex = null; + for (E e : unfinishedEdges) { + + if (tree.getEdges().contains(e)) { + continue; + } + // find the lowest cost edge, get its opposite endpoint, + // and then update forest from its Successors + Pair endpoints = graph.getEndpoints(e); + V first = endpoints.getFirst(); + V second = endpoints.getSecond(); + if ((tv.contains(first) == true && tv.contains(second) == false)) { + if (weights.apply(e) < minCost) { + minCost = weights.apply(e); + nextEdge = e; + currentVertex = first; + nextVertex = second; + } + } else if ((tv.contains(second) == true && tv.contains(first) == false)) { + if (weights.apply(e) < minCost) { + minCost = weights.apply(e); + nextEdge = e; + currentVertex = second; + nextVertex = first; + } + } + } - /** - * Creates an instance which generates a minimum spanning tree using the input edge weights. - * @param supplier used to create the tree instances - * @param weights the edge weights to use for defining the MST - */ - public PrimMinimumSpanningTree(Supplier> supplier, - Function weights) { - this.treeFactory = supplier; - if(weights != null) { - this.weights = weights; - } - } - - /** - * @param graph the Graph to find MST in - */ - public Graph apply(Graph graph) { - Set unfinishedEdges = new HashSet(graph.getEdges()); - Graph tree = treeFactory.get(); - V root = findRoot(graph); - if(graph.getVertices().contains(root)) { - tree.addVertex(root); - } else if(graph.getVertexCount() > 0) { - // pick an arbitrary vertex to make root - tree.addVertex(graph.getVertices().iterator().next()); - } - updateTree(tree, graph, unfinishedEdges); - - return tree; - } - - protected V findRoot(Graph graph) { - for(V v : graph.getVertices()) { - if(graph.getInEdges(v).size() == 0) { - return v; - } - } - // if there is no obvious root, pick any vertex - if(graph.getVertexCount() > 0) { - return graph.getVertices().iterator().next(); - } - // this graph has no vertices - return null; + if (nextVertex != null && nextEdge != null) { + unfinishedEdges.remove(nextEdge); + tree.addEdge(nextEdge, currentVertex, nextVertex); + updateTree(tree, graph, unfinishedEdges); } - - protected void updateTree(Graph tree, Graph graph, Collection unfinishedEdges) { - Collection tv = tree.getVertices(); - double minCost = Double.MAX_VALUE; - E nextEdge = null; - V nextVertex = null; - V currentVertex = null; - for(E e : unfinishedEdges) { - - if(tree.getEdges().contains(e)) continue; - // find the lowest cost edge, get its opposite endpoint, - // and then update forest from its Successors - Pair endpoints = graph.getEndpoints(e); - V first = endpoints.getFirst(); - V second = endpoints.getSecond(); - if((tv.contains(first) == true && tv.contains(second) == false)) { - if(weights.apply(e) < minCost) { - minCost = weights.apply(e); - nextEdge = e; - currentVertex = first; - nextVertex = second; - } - } else if((tv.contains(second) == true && tv.contains(first) == false)) { - if(weights.apply(e) < minCost) { - minCost = weights.apply(e); - nextEdge = e; - currentVertex = second; - nextVertex = first; - } - } - } - - if(nextVertex != null && nextEdge != null) { - unfinishedEdges.remove(nextEdge); - tree.addEdge(nextEdge, currentVertex, nextVertex); - updateTree(tree, graph, unfinishedEdges); - } - } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java index 104ace5f..fcc88a64 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPath.java @@ -1,31 +1,27 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -* -* Created on Feb 12, 2004 -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + * + * Created on Feb 12, 2004 + */ package edu.uci.ics.jung.algorithms.shortestpath; import java.util.Map; - -/** - * An interface for algorithms that calculate shortest paths. - */ -public interface ShortestPath -{ - /** - * Returns a map from vertices to the last edge on the shortest path to that vertex - * starting from {@code source}. - * - * @param source the starting point for the shortest paths - * @return a map from vertices to the last edge on the shortest path to that vertex - * starting from {@code source} - */ - Map getIncomingEdgeMap(V source); +/** An interface for algorithms that calculate shortest paths. */ +public interface ShortestPath { + /** + * Returns a map from vertices to the last edge on the shortest path to that vertex starting from + * {@code source}. + * + * @param source the starting point for the shortest paths + * @return a map from vertices to the last edge on the shortest path to that vertex starting from + * {@code source} + */ + Map getIncomingEdgeMap(V source); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java index 8764543a..2eedffcd 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/ShortestPathUtils.java @@ -1,7 +1,7 @@ /* * Created on Jul 10, 2005 * - * Copyright (c) 2005, The JUNG Authors + * Copyright (c) 2005, The JUNG Authors * * All rights reserved. * @@ -11,52 +11,47 @@ */ package edu.uci.ics.jung.algorithms.shortestpath; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.util.Pair; import java.util.LinkedList; import java.util.List; import java.util.Map; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.util.Pair; +/** Utilities relating to the shortest paths in a graph. */ +public class ShortestPathUtils { + /** + * Returns a List of the edges on the shortest path from source to + * target, in order of their occurrence on this path. + * + * @param graph the graph for which the shortest path is defined + * @param sp holder of the shortest path information + * @param source the vertex from which the shortest path is measured + * @param target the vertex to which the shortest path is measured + * @param the vertex type + * @param the edge type + * @return the edges on the shortest path from {@code source} to {@code target}, in the order + * traversed + */ + public static List getPath( + Graph graph, ShortestPath sp, V source, V target) { + LinkedList path = new LinkedList(); -/** - * Utilities relating to the shortest paths in a graph. - */ -public class ShortestPathUtils -{ - /** - * Returns a List of the edges on the shortest path from - * source to target, in order of their - * occurrence on this path. - * - * @param graph the graph for which the shortest path is defined - * @param sp holder of the shortest path information - * @param source the vertex from which the shortest path is measured - * @param target the vertex to which the shortest path is measured - * @param the vertex type - * @param the edge type - * @return the edges on the shortest path from {@code source} to {@code target}, - * in the order traversed - */ - public static List getPath(Graph graph, ShortestPath sp, V source, V target) - { - LinkedList path = new LinkedList(); - - Map incomingEdges = sp.getIncomingEdgeMap(source); - - if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) - return path; - V current = target; - while (!current.equals(source)) - { - E incoming = incomingEdges.get(current); - path.addFirst(incoming); - Pair endpoints = graph.getEndpoints(incoming); - if(endpoints.getFirst().equals(current)) { - current = endpoints.getSecond(); - } else { - current = endpoints.getFirst(); - } - } - return path; + Map incomingEdges = sp.getIncomingEdgeMap(source); + + if (incomingEdges.isEmpty() || incomingEdges.get(target) == null) { + return path; + } + V current = target; + while (!current.equals(source)) { + E incoming = incomingEdges.get(current); + path.addFirst(incoming); + Pair endpoints = graph.getEndpoints(incoming); + if (endpoints.getFirst().equals(current)) { + current = endpoints.getSecond(); + } else { + current = endpoints.getFirst(); + } } + return path; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java index af1cd8d7..fddb545c 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/shortestpath/UnweightedShortestPath.java @@ -1,152 +1,128 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.shortestpath; +import edu.uci.ics.jung.graph.Hypergraph; import java.util.HashMap; import java.util.Map; -import edu.uci.ics.jung.graph.Hypergraph; - /** * Computes the shortest path distances for graphs whose edges are not weighted (using BFS). - * + * * @author Scott White */ -public class UnweightedShortestPath - implements ShortestPath, Distance -{ - private Map> mDistanceMap; - private Map> mIncomingEdgeMap; - private Hypergraph mGraph; - private Map distances = new HashMap(); +public class UnweightedShortestPath implements ShortestPath, Distance { + private Map> mDistanceMap; + private Map> mIncomingEdgeMap; + private Hypergraph mGraph; + private Map distances = new HashMap(); + + /** + * Constructs and initializes algorithm + * + * @param g the graph + */ + public UnweightedShortestPath(Hypergraph g) { + mDistanceMap = new HashMap>(); + mIncomingEdgeMap = new HashMap>(); + mGraph = g; + } + + /** @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistance(Object, Object) */ + public Number getDistance(V source, V target) { + Map sourceSPMap = getDistanceMap(source); + return sourceSPMap.get(target); + } - /** - * Constructs and initializes algorithm - * @param g the graph - */ - public UnweightedShortestPath(Hypergraph g) - { - mDistanceMap = new HashMap>(); - mIncomingEdgeMap = new HashMap>(); - mGraph = g; - } + /** @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistanceMap(Object) */ + public Map getDistanceMap(V source) { + Map sourceSPMap = mDistanceMap.get(source); + if (sourceSPMap == null) { + computeShortestPathsFromSource(source); + sourceSPMap = mDistanceMap.get(source); + } + return sourceSPMap; + } - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistance(Object, Object) - */ - public Number getDistance(V source, V target) - { - Map sourceSPMap = getDistanceMap(source); - return sourceSPMap.get(target); - } + /** @see edu.uci.ics.jung.algorithms.shortestpath.ShortestPath#getIncomingEdgeMap(Object) */ + public Map getIncomingEdgeMap(V source) { + Map sourceIEMap = mIncomingEdgeMap.get(source); + if (sourceIEMap == null) { + computeShortestPathsFromSource(source); + sourceIEMap = mIncomingEdgeMap.get(source); + } + return sourceIEMap; + } - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.Distance#getDistanceMap(Object) - */ - public Map getDistanceMap(V source) - { - Map sourceSPMap = mDistanceMap.get(source); - if (sourceSPMap == null) - { - computeShortestPathsFromSource(source); - sourceSPMap = mDistanceMap.get(source); - } - return sourceSPMap; - } + /** + * Computes the shortest path distances from a given node to all other nodes. + * + * @param source the source node + */ + private void computeShortestPathsFromSource(V source) { + BFSDistanceLabeler labeler = new BFSDistanceLabeler(); + labeler.labelDistances(mGraph, source); + distances = labeler.getDistanceDecorator(); + Map currentSourceSPMap = new HashMap(); + Map currentSourceEdgeMap = new HashMap(); - /** - * @see edu.uci.ics.jung.algorithms.shortestpath.ShortestPath#getIncomingEdgeMap(Object) - */ - public Map getIncomingEdgeMap(V source) - { - Map sourceIEMap = mIncomingEdgeMap.get(source); - if (sourceIEMap == null) - { - computeShortestPathsFromSource(source); - sourceIEMap = mIncomingEdgeMap.get(source); - } - return sourceIEMap; - } + for (V vertex : mGraph.getVertices()) { + Number distanceVal = distances.get(vertex); + // BFSDistanceLabeler uses -1 to indicate unreachable vertices; + // don't bother to store unreachable vertices + if (distanceVal != null && distanceVal.intValue() >= 0) { + currentSourceSPMap.put(vertex, distanceVal); + int minDistance = distanceVal.intValue(); + for (E incomingEdge : mGraph.getInEdges(vertex)) { + for (V neighbor : mGraph.getIncidentVertices(incomingEdge)) { + if (neighbor.equals(vertex)) { + continue; + } - /** - * Computes the shortest path distances from a given node to all other nodes. - * @param source the source node - */ - private void computeShortestPathsFromSource(V source) - { - BFSDistanceLabeler labeler = new BFSDistanceLabeler(); - labeler.labelDistances(mGraph, source); - distances = labeler.getDistanceDecorator(); - Map currentSourceSPMap = new HashMap(); - Map currentSourceEdgeMap = new HashMap(); + Number predDistanceVal = distances.get(neighbor); - for(V vertex : mGraph.getVertices()) { - - Number distanceVal = distances.get(vertex); - // BFSDistanceLabeler uses -1 to indicate unreachable vertices; - // don't bother to store unreachable vertices - if (distanceVal != null && distanceVal.intValue() >= 0) - { - currentSourceSPMap.put(vertex, distanceVal); - int minDistance = distanceVal.intValue(); - for(E incomingEdge : mGraph.getInEdges(vertex)) - { - for (V neighbor : mGraph.getIncidentVertices(incomingEdge)) - { - if (neighbor.equals(vertex)) - continue; - - Number predDistanceVal = distances.get(neighbor); - - int pred_distance = predDistanceVal.intValue(); - if (pred_distance < minDistance && pred_distance >= 0) - { - minDistance = predDistanceVal.intValue(); - currentSourceEdgeMap.put(vertex, incomingEdge); - } - } - } + int pred_distance = predDistanceVal.intValue(); + if (pred_distance < minDistance && pred_distance >= 0) { + minDistance = predDistanceVal.intValue(); + currentSourceEdgeMap.put(vertex, incomingEdge); } - } - mDistanceMap.put(source, currentSourceSPMap); - mIncomingEdgeMap.put(source, currentSourceEdgeMap); - } - - /** - * Clears all stored distances for this instance. - * Should be called whenever the graph is modified (edge weights - * changed or edges added/removed). If the user knows that - * some currently calculated distances are unaffected by a - * change, reset(V) may be appropriate instead. - * - * @see #reset(Object) - */ - public void reset() - { - mDistanceMap.clear(); - mIncomingEdgeMap.clear(); - } - - /** - * Clears all stored distances for the specified source vertex - * source. Should be called whenever the stored distances - * from this vertex are invalidated by changes to the graph. - * - * @see #reset() - * - * @param v the vertex for which distances should be cleared - */ - public void reset(V v) - { - mDistanceMap.remove(v); - mIncomingEdgeMap.remove(v); + } + } + } } + mDistanceMap.put(source, currentSourceSPMap); + mIncomingEdgeMap.put(source, currentSourceEdgeMap); + } + + /** + * Clears all stored distances for this instance. Should be called whenever the graph is modified + * (edge weights changed or edges added/removed). If the user knows that some currently calculated + * distances are unaffected by a change, reset(V) may be appropriate instead. + * + * @see #reset(Object) + */ + public void reset() { + mDistanceMap.clear(); + mIncomingEdgeMap.clear(); + } + + /** + * Clears all stored distances for the specified source vertex source. Should be + * called whenever the stored distances from this vertex are invalidated by changes to the graph. + * + * @see #reset() + * @param v the vertex for which distances should be cleared + */ + public void reset(V v) { + mDistanceMap.remove(v); + mIncomingEdgeMap.remove(v); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java index 490558fe..9825d551 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/DirectionTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -13,7 +13,6 @@ package edu.uci.ics.jung.algorithms.transformation; import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.UndirectedGraph; @@ -21,105 +20,99 @@ import edu.uci.ics.jung.graph.util.Pair; /** - *

Functions for transforming graphs into directed or undirected graphs. - * - * + * Functions for transforming graphs into directed or undirected graphs. + * * @author Danyel Fisher * @author Joshua O'Madadhain */ -public class DirectionTransformer -{ +public class DirectionTransformer { + + /** + * Transforms graph (which may be of any directionality) into an undirected graph. + * (This may be useful for visualization tasks). Specifically: + * + *

    + *
  • Vertices are copied from graph. + *
  • Directed edges are 'converted' into a single new undirected edge in the new graph. + *
  • Each undirected edge (if any) in graph is 'recreated' with a new undirected + * edge in the new graph if create_new is true, or copied from graph + * otherwise. + *
+ * + * @param graph the graph to be transformed + * @param create_new specifies whether existing undirected edges are to be copied or recreated + * @param graph_factory used to create the new graph object + * @param edge_factory used to create new edges + * @param the vertex type + * @param the edge type + * @return the transformed Graph + */ + public static UndirectedGraph toUndirected( + Graph graph, + Supplier> graph_factory, + Supplier edge_factory, + boolean create_new) { + UndirectedGraph out = graph_factory.get(); + + for (V v : graph.getVertices()) out.addVertex(v); - /** - * Transforms graph (which may be of any directionality) - * into an undirected graph. (This may be useful for - * visualization tasks). - * Specifically: - *
    - *
  • Vertices are copied from graph. - *
  • Directed edges are 'converted' into a single new undirected edge in the new graph. - *
  • Each undirected edge (if any) in graph is 'recreated' with a new undirected edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
- * - * @param graph the graph to be transformed - * @param create_new specifies whether existing undirected edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @param the vertex type - * @param the edge type - * @return the transformed Graph - */ - public static UndirectedGraph toUndirected(Graph graph, - Supplier> graph_factory, - Supplier edge_factory, boolean create_new) - { - UndirectedGraph out = graph_factory.get(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - E to_add; - if (graph.getEdgeType(e) == EdgeType.DIRECTED || create_new) - to_add = edge_factory.get(); - else - to_add = e; - out.addEdge(to_add, v1, v2, EdgeType.UNDIRECTED); - } - return out; + for (E e : graph.getEdges()) { + Pair endpoints = graph.getEndpoints(e); + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + E to_add; + if (graph.getEdgeType(e) == EdgeType.DIRECTED || create_new) to_add = edge_factory.get(); + else to_add = e; + out.addEdge(to_add, v1, v2, EdgeType.UNDIRECTED); } - - /** - * Transforms graph (which may be of any directionality) - * into a directed graph. - * Specifically: - *
    - *
  • Vertices are copied from graph. - *
  • Undirected edges are 'converted' into two new antiparallel directed edges in the new graph. - *
  • Each directed edge (if any) in graph is 'recreated' with a new edge in the new - * graph if create_new is true, or copied from graph otherwise. - *
- * - * @param graph the graph to be transformed - * @param create_new specifies whether existing directed edges are to be copied or recreated - * @param graph_factory used to create the new graph object - * @param edge_factory used to create new edges - * @param the vertex type - * @param the edge type - * @return the transformed Graph - */ - public static Graph toDirected(Graph graph, Supplier> graph_factory, - Supplier edge_factory, boolean create_new) - { - DirectedGraph out = graph_factory.get(); - - for (V v : graph.getVertices()) - out.addVertex(v); - - for (E e : graph.getEdges()) - { - Pair endpoints = graph.getEndpoints(e); - if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) - { - V v1 = endpoints.getFirst(); - V v2 = endpoints.getSecond(); - out.addEdge(edge_factory.get(), v1, v2, EdgeType.DIRECTED); - out.addEdge(edge_factory.get(), v2, v1, EdgeType.DIRECTED); - } - else // if the edge is directed, just add it - { - V source = graph.getSource(e); - V dest = graph.getDest(e); - E to_add = create_new ? edge_factory.get() : e; - out.addEdge(to_add, source, dest, EdgeType.DIRECTED); - } - - } - return out; + return out; + } + + /** + * Transforms graph (which may be of any directionality) into a directed graph. + * Specifically: + * + *
    + *
  • Vertices are copied from graph. + *
  • Undirected edges are 'converted' into two new antiparallel directed edges in the new + * graph. + *
  • Each directed edge (if any) in graph is 'recreated' with a new edge in the + * new graph if create_new is true, or copied from graph + * otherwise. + *
+ * + * @param graph the graph to be transformed + * @param create_new specifies whether existing directed edges are to be copied or recreated + * @param graph_factory used to create the new graph object + * @param edge_factory used to create new edges + * @param the vertex type + * @param the edge type + * @return the transformed Graph + */ + public static Graph toDirected( + Graph graph, + Supplier> graph_factory, + Supplier edge_factory, + boolean create_new) { + DirectedGraph out = graph_factory.get(); + + for (V v : graph.getVertices()) out.addVertex(v); + + for (E e : graph.getEdges()) { + Pair endpoints = graph.getEndpoints(e); + if (graph.getEdgeType(e) == EdgeType.UNDIRECTED) { + V v1 = endpoints.getFirst(); + V v2 = endpoints.getSecond(); + out.addEdge(edge_factory.get(), v1, v2, EdgeType.DIRECTED); + out.addEdge(edge_factory.get(), v2, v1, EdgeType.DIRECTED); + } else // if the edge is directed, just add it + { + V source = graph.getSource(e); + V dest = graph.getDest(e); + E to_add = create_new ? edge_factory.get() : e; + out.addEdge(to_add, source, dest, EdgeType.DIRECTED); + } } -} \ No newline at end of file + return out; + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java index e0909afb..95b78775 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/FoldingTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -12,314 +12,275 @@ */ package edu.uci.ics.jung.algorithms.transformation; -import java.util.ArrayList; -import java.util.Collection; - import com.google.common.base.Predicate; import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.Hypergraph; import edu.uci.ics.jung.graph.KPartiteGraph; +import java.util.ArrayList; +import java.util.Collection; /** - * Methods for creating a "folded" graph based on a k-partite graph or a - * hypergraph. - * - *

A "folded" graph is derived from a k-partite graph by identifying - * a partition of vertices which will become the vertices of the new graph, copying - * these vertices into the new graph, and then connecting those vertices whose - * original analogues were connected indirectly through elements - * of other partitions. - * - *

A "folded" graph is derived from a hypergraph by creating vertices based on - * either the vertices or the hyperedges of the original graph, and connecting - * vertices in the new graph if their corresponding vertices/hyperedges share a - * connection with a common hyperedge/vertex. - * + * Methods for creating a "folded" graph based on a k-partite graph or a hypergraph. + * + *

A "folded" graph is derived from a k-partite graph by identifying a partition of vertices + * which will become the vertices of the new graph, copying these vertices into the new graph, and + * then connecting those vertices whose original analogues were connected indirectly through + * elements of other partitions. + * + *

A "folded" graph is derived from a hypergraph by creating vertices based on either the + * vertices or the hyperedges of the original graph, and connecting vertices in the new graph if + * their corresponding vertices/hyperedges share a connection with a common hyperedge/vertex. + * * @author Danyel Fisher * @author Joshua O'Madadhain */ -public class FoldingTransformer -{ - - /** - * Converts g into a unipartite graph whose vertex set is the - * vertices of g's partition p. For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * created by the input edge Factory. - * - *

If there is more than 1 such vertex c for a given pair - * (a,b), the type of the output graph will determine whether - * it will contain parallel edges or not. - * - *

This function will not create self-loops. - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory Supplier used to create the output graph - * @param edge_factory Supplier used to create the edges in the new graph - * @return a copy of the input graph folded with respect to the input partition - */ - public static Graph foldKPartiteGraph(KPartiteGraph g, Predicate p, - Supplier> graph_factory, Supplier edge_factory) - { - Graph newGraph = graph_factory.get(); +public class FoldingTransformer { - // get vertices for the specified partition - Collection vertices = g.getVertices(p); - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - newGraph.addEdge(edge_factory.get(), v, t); - } - } + /** + * Converts g into a unipartite graph whose vertex set is the vertices of g + * 's partition p. For vertices a and b in this + * partition, the resultant graph will include the edge (a,b) if the original graph + * contains edges (a,c) and (c,b) for at least one vertex c + * . + * + *

The vertices of the new graph are the same as the vertices of the appropriate partition in + * the old graph; the edges in the new graph are created by the input edge Factory. + * + *

If there is more than 1 such vertex c for a given pair (a,b), the + * type of the output graph will determine whether it will contain parallel edges or not. + * + *

This function will not create self-loops. + * + * @param vertex type + * @param input edge type + * @param g input k-partite graph + * @param p predicate specifying vertex partition + * @param graph_factory Supplier used to create the output graph + * @param edge_factory Supplier used to create the edges in the new graph + * @return a copy of the input graph folded with respect to the input partition + */ + public static Graph foldKPartiteGraph( + KPartiteGraph g, + Predicate p, + Supplier> graph_factory, + Supplier edge_factory) { + Graph newGraph = graph_factory.get(); + + // get vertices for the specified partition + Collection vertices = g.getVertices(p); + for (V v : vertices) { + newGraph.addVertex(v); + for (V s : g.getSuccessors(v)) { + for (V t : g.getSuccessors(s)) { + if (!vertices.contains(t) || t.equals(v)) { + continue; + } + newGraph.addVertex(t); + newGraph.addEdge(edge_factory.get(), v, t); } - return newGraph; + } } + return newGraph; + } - /** - * Converts g into a unipartite graph whose vertices are the - * vertices of g's partition p, and whose edges - * consist of collections of the intermediate vertices from other partitions. - * For vertices - * a and b in this partition, the resultant - * graph will include the edge (a,b) if the original graph - * contains edges (a,c) and (c,b) for at least - * one vertex c. - * - *

The vertices of the new graph are the same as the vertices of the - * appropriate partition in the old graph; the edges in the new graph are - * collections of the intermediate vertices c. - * - *

This function will not create self-loops. - * - * @param vertex type - * @param input edge type - * @param g input k-partite graph - * @param p predicate specifying vertex partition - * @param graph_factory Supplier used to create the output graph - * @return the result of folding g into unipartite graph whose vertices - * are those of the p partition of g - */ - public static Graph> foldKPartiteGraph(KPartiteGraph g, Predicate p, - Supplier>> graph_factory) - { - Graph> newGraph = graph_factory.get(); + /** + * Converts g into a unipartite graph whose vertices are the vertices of g + * 's partition p, and whose edges consist of collections of the intermediate + * vertices from other partitions. For vertices a and b in this + * partition, the resultant graph will include the edge (a,b) if the original graph + * contains edges (a,c) and (c,b) for at least one vertex c + * . + * + *

The vertices of the new graph are the same as the vertices of the appropriate partition in + * the old graph; the edges in the new graph are collections of the intermediate vertices c + * . + * + *

This function will not create self-loops. + * + * @param vertex type + * @param input edge type + * @param g input k-partite graph + * @param p predicate specifying vertex partition + * @param graph_factory Supplier used to create the output graph + * @return the result of folding g into unipartite graph whose vertices are those of the p + * partition of g + */ + public static Graph> foldKPartiteGraph( + KPartiteGraph g, Predicate p, Supplier>> graph_factory) { + Graph> newGraph = graph_factory.get(); - // get vertices for the specified partition, copy into new graph - Collection vertices = g.getVertices(p); + // get vertices for the specified partition, copy into new graph + Collection vertices = g.getVertices(p); - for (V v : vertices) - { - newGraph.addVertex(v); - for (V s : g.getSuccessors(v)) - { - for (V t : g.getSuccessors(s)) - { - if (!vertices.contains(t) || t.equals(v)) - continue; - newGraph.addVertex(t); - Collection v_coll = newGraph.findEdge(v, t); - if (v_coll == null) - { - v_coll = new ArrayList(); - newGraph.addEdge(v_coll, v, t); - } - v_coll.add(s); - } - } + for (V v : vertices) { + newGraph.addVertex(v); + for (V s : g.getSuccessors(v)) { + for (V t : g.getSuccessors(s)) { + if (!vertices.contains(t) || t.equals(v)) { + continue; + } + newGraph.addVertex(t); + Collection v_coll = newGraph.findEdge(v, t); + if (v_coll == null) { + v_coll = new ArrayList(); + newGraph.addEdge(v_coll, v, t); + } + v_coll.add(s); } - return newGraph; + } } - - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph. - * - *

The edges of the new graph consist of collections of each hyperedge that connected - * the corresponding vertex pair in the original graph. - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory Supplier used to generate the output graph - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph> foldHypergraphEdges(Hypergraph h, - Supplier>> graph_factory) - { - Graph> target = graph_factory.get(); + return newGraph; + } - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - populateTarget(target, e, incident); - } - return target; + /** + * Creates a Graph which is an edge-folded version of h, where + * hyperedges are replaced by k-cliques in the output graph. + * + *

The vertices of the new graph are the same objects as the vertices of h, and + * a is connected to b in the new graph if the corresponding vertices in + * h are connected by a hyperedge. Thus, each hyperedge with k vertices in + * h induces a k-clique in the new graph. + * + *

The edges of the new graph consist of collections of each hyperedge that connected the + * corresponding vertex pair in the original graph. + * + * @param vertex type + * @param input edge type + * @param h hypergraph to be folded + * @param graph_factory Supplier used to generate the output graph + * @return a copy of the input graph where hyperedges are replaced by cliques + */ + public static Graph> foldHypergraphEdges( + Hypergraph h, Supplier>> graph_factory) { + Graph> target = graph_factory.get(); + + for (V v : h.getVertices()) target.addVertex(v); + + for (E e : h.getEdges()) { + ArrayList incident = new ArrayList(h.getIncidentVertices(e)); + populateTarget(target, e, incident); } + return target; + } + /** + * Creates a Graph which is an edge-folded version of h, where + * hyperedges are replaced by k-cliques in the output graph. + * + *

The vertices of the new graph are the same objects as the vertices of h, and + * a is connected to b in the new graph if the corresponding vertices in + * h are connected by a hyperedge. Thus, each hyperedge with k vertices in + * h induces a k-clique in the new graph. + * + *

The edges of the new graph are generated by the specified edge Supplier. + * + * @param vertex type + * @param input edge type + * @param h hypergraph to be folded + * @param graph_factory Supplier used to generate the output graph + * @param edge_factory Supplier used to create the new edges + * @return a copy of the input graph where hyperedges are replaced by cliques + */ + public static Graph foldHypergraphEdges( + Hypergraph h, Supplier> graph_factory, Supplier edge_factory) { + Graph target = graph_factory.get(); - /** - * Creates a Graph which is an edge-folded version of h, where - * hyperedges are replaced by k-cliques in the output graph. - * - *

The vertices of the new graph are the same objects as the vertices of - * h, and a - * is connected to b in the new graph if the corresponding vertices - * in h are connected by a hyperedge. Thus, each hyperedge with - * k vertices in h induces a k-clique in the new graph. - * - *

The edges of the new graph are generated by the specified edge Supplier. - * - * @param vertex type - * @param input edge type - * @param h hypergraph to be folded - * @param graph_factory Supplier used to generate the output graph - * @param edge_factory Supplier used to create the new edges - * @return a copy of the input graph where hyperedges are replaced by cliques - */ - public static Graph foldHypergraphEdges(Hypergraph h, - Supplier> graph_factory, Supplier edge_factory) - { - Graph target = graph_factory.get(); + for (V v : h.getVertices()) target.addVertex(v); - for (V v : h.getVertices()) - target.addVertex(v); - - for (E e : h.getEdges()) - { - ArrayList incident = new ArrayList(h.getIncidentVertices(e)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.get(), incident.get(i), incident.get(j)); - } - return target; + for (E e : h.getEdges()) { + ArrayList incident = new ArrayList(h.getIncidentVertices(e)); + for (int i = 0; i < incident.size(); i++) + for (int j = i + 1; j < incident.size(); j++) + target.addEdge(edge_factory.get(), incident.get(i), incident.get(j)); } + return target; + } - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph. - * - *

The edges of the new graph are created by the specified Supplier. - * - * @param vertex type - * @param input edge type - * @param output edge type - * @param h hypergraph to be folded - * @param graph_factory Supplier used to generate the output graph - * @param edge_factory Supplier used to generate the output edges - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public static Graph foldHypergraphVertices(Hypergraph h, - Supplier> graph_factory, Supplier edge_factory) - { - Graph target = graph_factory.get(); - - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - for (int i = 0; i < incident.size(); i++) - for (int j = i+1; j < incident.size(); j++) - target.addEdge(edge_factory.get(), incident.get(i), incident.get(j)); - } - - return target; + /** + * Creates a Graph which is a vertex-folded version of h, whose vertices + * are the input's hyperedges and whose edges are induced by adjacent hyperedges in the input. + * + *

The vertices of the new graph are the same objects as the hyperedges of h, and + * a is connected to b in the new graph if the corresponding edges in + * h have a vertex in common. Thus, each vertex incident to k edges in h + * induces a k-clique in the new graph. + * + *

The edges of the new graph are created by the specified Supplier. + * + * @param vertex type + * @param input edge type + * @param output edge type + * @param h hypergraph to be folded + * @param graph_factory Supplier used to generate the output graph + * @param edge_factory Supplier used to generate the output edges + * @return a transformation of the input graph whose vertices correspond to the input's hyperedges + * and edges are induced by hyperedges sharing vertices in the input + */ + public static Graph foldHypergraphVertices( + Hypergraph h, Supplier> graph_factory, Supplier edge_factory) { + Graph target = graph_factory.get(); + + for (E e : h.getEdges()) target.addVertex(e); + + for (V v : h.getVertices()) { + ArrayList incident = new ArrayList(h.getIncidentEdges(v)); + for (int i = 0; i < incident.size(); i++) + for (int j = i + 1; j < incident.size(); j++) + target.addEdge(edge_factory.get(), incident.get(i), incident.get(j)); } - /** - * Creates a Graph which is a vertex-folded version of h, whose - * vertices are the input's hyperedges and whose edges are induced by adjacent hyperedges - * in the input. - * - *

The vertices of the new graph are the same objects as the hyperedges of - * h, and a - * is connected to b in the new graph if the corresponding edges - * in h have a vertex in common. Thus, each vertex incident to - * k edges in h induces a k-clique in the new graph. - * - *

The edges of the new graph consist of collections of each vertex incident to - * the corresponding hyperedge pair in the original graph. - * - * @param h hypergraph to be folded - * @param graph_factory Supplier used to generate the output graph - * @return a transformation of the input graph whose vertices correspond to the input's hyperedges - * and edges are induced by hyperedges sharing vertices in the input - */ - public Graph> foldHypergraphVertices(Hypergraph h, - Supplier>> graph_factory) - { - Graph> target = graph_factory.get(); + return target; + } - for (E e : h.getEdges()) - target.addVertex(e); - - for (V v : h.getVertices()) - { - ArrayList incident = new ArrayList(h.getIncidentEdges(v)); - populateTarget(target, v, incident); - } - return target; + /** + * Creates a Graph which is a vertex-folded version of h, whose vertices + * are the input's hyperedges and whose edges are induced by adjacent hyperedges in the input. + * + *

The vertices of the new graph are the same objects as the hyperedges of h, and + * a is connected to b in the new graph if the corresponding edges in + * h have a vertex in common. Thus, each vertex incident to k edges in h + * induces a k-clique in the new graph. + * + *

The edges of the new graph consist of collections of each vertex incident to the + * corresponding hyperedge pair in the original graph. + * + * @param h hypergraph to be folded + * @param graph_factory Supplier used to generate the output graph + * @return a transformation of the input graph whose vertices correspond to the input's hyperedges + * and edges are induced by hyperedges sharing vertices in the input + */ + public Graph> foldHypergraphVertices( + Hypergraph h, Supplier>> graph_factory) { + Graph> target = graph_factory.get(); + + for (E e : h.getEdges()) target.addVertex(e); + + for (V v : h.getVertices()) { + ArrayList incident = new ArrayList(h.getIncidentEdges(v)); + populateTarget(target, v, incident); } - - /** - * @param target - * @param e - * @param incident - */ - private static void populateTarget(Graph> target, T e, - ArrayList incident) - { - for (int i = 0; i < incident.size(); i++) - { - S v1 = incident.get(i); - for (int j = i+1; j < incident.size(); j++) - { - S v2 = incident.get(j); - Collection e_coll = target.findEdge(v1, v2); - if (e_coll == null) - { - e_coll = new ArrayList(); - target.addEdge(e_coll, v1, v2); - } - e_coll.add(e); - } + return target; + } + + /** + * @param target + * @param e + * @param incident + */ + private static void populateTarget( + Graph> target, T e, ArrayList incident) { + for (int i = 0; i < incident.size(); i++) { + S v1 = incident.get(i); + for (int j = i + 1; j < incident.size(); j++) { + S v2 = incident.get(j); + Collection e_coll = target.findEdge(v1, v2); + if (e_coll == null) { + e_coll = new ArrayList(); + target.addEdge(e_coll, v1, v2); } + e_coll.add(e); + } } - -} \ No newline at end of file + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java index dea8444c..aa4c67ce 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/transformation/VertexPartitionCollapser.java @@ -1,103 +1,95 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.transformation; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Supplier; - import edu.uci.ics.jung.algorithms.blockmodel.VertexPartition; import edu.uci.ics.jung.graph.Graph; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; /** - * This class transforms a graph with a known vertex partitioning into a graph whose - * vertices correspond to the input graph's partitions. Two vertices in the output graph - * are connected if and only if there exists at least one edge between vertices in the - * corresponding partitions of the input graph. If the output graph permits parallel edges, - * there will be an edge connecting two vertices in the new graph for each such - * edge connecting constituent vertices in the input graph. - * + * This class transforms a graph with a known vertex partitioning into a graph whose vertices + * correspond to the input graph's partitions. Two vertices in the output graph are connected if and + * only if there exists at least one edge between vertices in the corresponding partitions of the + * input graph. If the output graph permits parallel edges, there will be an edge connecting two + * vertices in the new graph for each such edge connecting constituent vertices in the input graph. + * *

Concept based on Danyel Fisher's GraphCollapser in JUNG 1.x. - * */ -public class VertexPartitionCollapser -{ - protected Supplier> graph_factory; - protected Supplier vertex_factory; - protected Supplier edge_factory; - protected Map, CV> set_collapsedv; - - /** - * Creates an instance with the specified graph and element factories. - * @param vertex_factory used to construct the vertices of the new graph - * @param edge_factory used to construct the edges of the new graph - * @param graph_factory used to construct the new graph - */ - public VertexPartitionCollapser(Supplier> graph_factory, - Supplier vertex_factory, Supplier edge_factory) - { - this.graph_factory = graph_factory; - this.vertex_factory = vertex_factory; - this.edge_factory = edge_factory; - this.set_collapsedv = new HashMap, CV>(); - } +public class VertexPartitionCollapser { + protected Supplier> graph_factory; + protected Supplier vertex_factory; + protected Supplier edge_factory; + protected Map, CV> set_collapsedv; + + /** + * Creates an instance with the specified graph and element factories. + * + * @param vertex_factory used to construct the vertices of the new graph + * @param edge_factory used to construct the edges of the new graph + * @param graph_factory used to construct the new graph + */ + public VertexPartitionCollapser( + Supplier> graph_factory, + Supplier vertex_factory, + Supplier edge_factory) { + this.graph_factory = graph_factory; + this.vertex_factory = vertex_factory; + this.edge_factory = edge_factory; + this.set_collapsedv = new HashMap, CV>(); + } - /** - * Creates a new graph whose vertices correspond to the partitions of the supplied graph. - * @param partitioning a vertex partition of a graph - * @return a new graph whose vertices correspond to the partitions of the supplied graph - */ - public Graph collapseVertexPartitions(VertexPartition partitioning) - { - Graph original = partitioning.getGraph(); - Graph collapsed = graph_factory.get(); - - // create vertices in new graph corresponding to equivalence sets in the original graph - for (Set set : partitioning.getVertexPartitions()) - { - CV cv = vertex_factory.get(); - collapsed.addVertex(vertex_factory.get()); - set_collapsedv.put(set, cv); - } + /** + * Creates a new graph whose vertices correspond to the partitions of the supplied graph. + * + * @param partitioning a vertex partition of a graph + * @return a new graph whose vertices correspond to the partitions of the supplied graph + */ + public Graph collapseVertexPartitions(VertexPartition partitioning) { + Graph original = partitioning.getGraph(); + Graph collapsed = graph_factory.get(); - // create edges in new graph corresponding to edges in original graph - for (E e : original.getEdges()) - { - Collection incident = original.getIncidentVertices(e); - Collection collapsed_vertices = new HashSet(); - Map> vertex_partitions = partitioning.getVertexToPartitionMap(); - // collect the collapsed vertices corresponding to the original incident vertices - for (V v : incident) - collapsed_vertices.add(set_collapsedv.get(vertex_partitions.get(v))); - // if there's only one collapsed vertex, continue (no edges to create) - if (collapsed_vertices.size() > 1) - { - CE ce = edge_factory.get(); - collapsed.addEdge(ce, collapsed_vertices); - } - } - return collapsed; + // create vertices in new graph corresponding to equivalence sets in the original graph + for (Set set : partitioning.getVertexPartitions()) { + CV cv = vertex_factory.get(); + collapsed.addVertex(vertex_factory.get()); + set_collapsedv.put(set, cv); } - - /** - * @return a Function from vertex sets in the original graph to collapsed vertices - * in the transformed graph. - */ - public Function, CV> getSetToCollapsedVertexTransformer() - { - return Functions.forMap(set_collapsedv); + + // create edges in new graph corresponding to edges in original graph + for (E e : original.getEdges()) { + Collection incident = original.getIncidentVertices(e); + Collection collapsed_vertices = new HashSet(); + Map> vertex_partitions = partitioning.getVertexToPartitionMap(); + // collect the collapsed vertices corresponding to the original incident vertices + for (V v : incident) collapsed_vertices.add(set_collapsedv.get(vertex_partitions.get(v))); + // if there's only one collapsed vertex, continue (no edges to create) + if (collapsed_vertices.size() > 1) { + CE ce = edge_factory.get(); + collapsed.addEdge(ce, collapsed_vertices); + } } + return collapsed; + } + + /** + * @return a Function from vertex sets in the original graph to collapsed vertices in the + * transformed graph. + */ + public Function, CV> getSetToCollapsedVertexTransformer() { + return Functions.forMap(set_collapsedv); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java index c59ed9b4..c30d2593 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/BasicMapEntry.java @@ -9,73 +9,68 @@ * @param the key type * @param the value type */ -public class BasicMapEntry implements Map.Entry { - final K key; - V value; - - /** - * @param k the key - * @param v the value - */ - public BasicMapEntry(K k, V v) { - value = v; - key = k; - } +public class BasicMapEntry implements Map.Entry { + final K key; + V value; - public K getKey() { - return key; - } + /** + * @param k the key + * @param v the value + */ + public BasicMapEntry(K k, V v) { + value = v; + key = k; + } - public V getValue() { - return value; - } + public K getKey() { + return key; + } - public V setValue(V newValue) { + public V getValue() { + return value; + } + + public V setValue(V newValue) { V oldValue = value; - value = newValue; - return oldValue; - } + value = newValue; + return oldValue; + } - @Override - public boolean equals(Object o) { - if (!(o instanceof Map.Entry)) - return false; - @SuppressWarnings("rawtypes") - Map.Entry e = (Map.Entry)o; - Object k1 = getKey(); - Object k2 = e.getKey(); - if (k1 == k2 || (k1 != null && k1.equals(k2))) { - Object v1 = getValue(); - Object v2 = e.getValue(); - if (v1 == v2 || (v1 != null && v1.equals(v2))) - return true; - } - return false; + @Override + public boolean equals(Object o) { + if (!(o instanceof Map.Entry)) { + return false; } - - @Override - public int hashCode() { - return (key==null ? 0 : key.hashCode()) ^ - (value==null ? 0 : value.hashCode()); + @SuppressWarnings("rawtypes") + Map.Entry e = (Map.Entry) o; + Object k1 = getKey(); + Object k2 = e.getKey(); + if (k1 == k2 || (k1 != null && k1.equals(k2))) { + Object v1 = getValue(); + Object v2 = e.getValue(); + if (v1 == v2 || (v1 != null && v1.equals(v2))) { + return true; + } } + return false; + } - @Override - public String toString() { - return getKey() + "=" + getValue(); - } + @Override + public int hashCode() { + return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode()); + } - /** - * This method is invoked whenever the value in an entry is - * overwritten by an invocation of put(k,v) for a key k that's already - * in the HashMap. - */ - void recordAccess(HashMap m) { - } + @Override + public String toString() { + return getKey() + "=" + getValue(); + } - /** - * This method is invoked whenever the entry is - * removed from the table. - */ - void recordRemoval(HashMap m) { - } + /** + * This method is invoked whenever the value in an entry is overwritten by an invocation of + * put(k,v) for a key k that's already in the HashMap. + */ + void recordAccess(HashMap m) {} + + /** This method is invoked whenever the entry is removed from the table. */ + void recordRemoval(HashMap m) {} } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java index d0ed62ac..14c2bc06 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/DiscreteDistribution.java @@ -1,219 +1,191 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * * This software is open-source under the BSD license; see either * "license.txt" or * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * + * * Created on Feb 18, 2004 */ package edu.uci.ics.jung.algorithms.util; +import com.google.common.base.Preconditions; import java.util.Collection; import java.util.Iterator; -import com.google.common.base.Preconditions; - /** - * A utility class for calculating properties of discrete distributions. - * Generally, these distributions are represented as arrays of - * double values, which are assumed to be normalized - * such that the entries in a single array sum to 1. - * + * A utility class for calculating properties of discrete distributions. Generally, these + * distributions are represented as arrays of double values, which are assumed to be + * normalized such that the entries in a single array sum to 1. + * * @author Joshua O'Madadhain */ -public class DiscreteDistribution -{ - - /** - * Returns the Kullback-Leibler divergence between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of - * dist[i] * Math.log(dist[i] / reference[i]). - * Note that this value is not symmetric; see - * symmetricKL for a symmetric variant. - * @see #symmetricKL(double[], double[]) - * @param dist the distribution whose divergence from {@code reference} is being measured - * @param reference the reference distribution - * @return sum_i of {@code dist[i] * Math.log(dist[i] / reference[i])} - */ - public static double KullbackLeibler(double[] dist, double[] reference) - { - double distance = 0; - - Preconditions.checkArgument(dist.length == reference.length, - "input arrays must be of the same length"); - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0 && reference[i] > 0) - distance += dist[i] * Math.log(dist[i] / reference[i]); - } - return distance; - } - - /** - * @param dist the distribution whose divergence from {@code reference} is being measured - * @param reference the reference distribution - * @return KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist) - * @see #KullbackLeibler(double[], double[]) - */ - public static double symmetricKL(double[] dist, double[] reference) - { - return KullbackLeibler(dist, reference) - + KullbackLeibler(reference, dist); +public class DiscreteDistribution { + + /** + * Returns the Kullback-Leibler divergence between the two specified distributions, which must + * have the same number of elements. This is defined as the sum over all i of + * dist[i] * Math.log(dist[i] / reference[i]). Note that this value is not symmetric; see + * symmetricKL for a symmetric variant. + * + * @see #symmetricKL(double[], double[]) + * @param dist the distribution whose divergence from {@code reference} is being measured + * @param reference the reference distribution + * @return sum_i of {@code dist[i] * Math.log(dist[i] / reference[i])} + */ + public static double KullbackLeibler(double[] dist, double[] reference) { + double distance = 0; + + Preconditions.checkArgument( + dist.length == reference.length, "input arrays must be of the same length"); + + for (int i = 0; i < dist.length; i++) { + if (dist[i] > 0 && reference[i] > 0) distance += dist[i] * Math.log(dist[i] / reference[i]); } - - /** - * Returns the squared difference between the - * two specified distributions, which must have the same - * number of elements. This is defined as - * the sum over all i of the square of - * (dist[i] - reference[i]). - * @param dist the distribution whose distance from {@code reference} is being measured - * @param reference the reference distribution - * @return sum_i {@code (dist[i] - reference[i])^2} - */ - public static double squaredError(double[] dist, double[] reference) - { - double error = 0; - - Preconditions.checkArgument(dist.length == reference.length, - "input arrays must be of the same length"); - - for (int i = 0; i < dist.length; i++) - { - double difference = dist[i] - reference[i]; - error += difference * difference; - } - return error; + return distance; + } + + /** + * @param dist the distribution whose divergence from {@code reference} is being measured + * @param reference the reference distribution + * @return KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist) + * @see #KullbackLeibler(double[], double[]) + */ + public static double symmetricKL(double[] dist, double[] reference) { + return KullbackLeibler(dist, reference) + KullbackLeibler(reference, dist); + } + + /** + * Returns the squared difference between the two specified distributions, which must have the + * same number of elements. This is defined as the sum over all i of the square of + * (dist[i] - reference[i]). + * + * @param dist the distribution whose distance from {@code reference} is being measured + * @param reference the reference distribution + * @return sum_i {@code (dist[i] - reference[i])^2} + */ + public static double squaredError(double[] dist, double[] reference) { + double error = 0; + + Preconditions.checkArgument( + dist.length == reference.length, "input arrays must be of the same length"); + + for (int i = 0; i < dist.length; i++) { + double difference = dist[i] - reference[i]; + error += difference * difference; } - - /** - * Returns the cosine distance between the two - * specified distributions, which must have the same number - * of elements. The distributions are treated as vectors - * in dist.length-dimensional space. - * Given the following definitions - *

    - *
  • v = the sum over all i of dist[i] * dist[i] - *
  • w = the sum over all i of reference[i] * reference[i] - *
  • vw = the sum over all i of dist[i] * reference[i] - *
- * the value returned is defined as vw / (Math.sqrt(v) * Math.sqrt(w)). - * @param dist the distribution whose distance from {@code reference} is being measured - * @param reference the reference distribution - * @return the cosine distance between {@code dist} and {@code reference}, considered as vectors - */ - public static double cosine(double[] dist, double[] reference) - { - double v_prod = 0; // dot product x*x - double w_prod = 0; // dot product y*y - double vw_prod = 0; // dot product x*y - - Preconditions.checkArgument(dist.length == reference.length, - "input arrays must be of the same length"); - - for (int i = 0; i < dist.length; i++) - { - vw_prod += dist[i] * reference[i]; - v_prod += dist[i] * dist[i]; - w_prod += reference[i] * reference[i]; - } - // cosine distance between v and w - return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod)); - } - - /** - * Returns the entropy of this distribution. - * High entropy indicates that the distribution is - * close to uniform; low entropy indicates that the - * distribution is close to a Dirac delta (i.e., if - * the probability mass is concentrated at a single - * point, this method returns 0). Entropy is defined as - * the sum over all i of - * -(dist[i] * Math.log(dist[i])) - * - * @param dist the distribution whose entropy is being measured - * @return sum_i {@code -(dist[i] * Math.log(dist[i]))} - */ - public static double entropy(double[] dist) - { - double total = 0; - - for (int i = 0; i < dist.length; i++) - { - if (dist[i] > 0) - total += dist[i] * Math.log(dist[i]); - } - return -total; - } - - /** - * Normalizes, with Lagrangian smoothing, the specified double - * array, so that the values sum to 1 (i.e., can be treated as probabilities). - * The effect of the Lagrangian smoothing is to ensure that all entries - * are nonzero; effectively, a value of alpha is added to each - * entry in the original array prior to normalization. - * @param counts the array to be converted into a probability distribution - * @param alpha the value to add to each entry prior to normalization - */ - public static void normalize(double[] counts, double alpha) - { - double total_count = 0; - - for (int i = 0; i < counts.length; i++) - total_count += counts[i]; - - for (int i = 0; i < counts.length; i++) - counts[i] = (counts[i] + alpha) - / (total_count + counts.length * alpha); - } - - /** - * Returns the mean of the specified Collection of - * distributions, which are assumed to be normalized arrays of - * double values. - * @see #mean(double[][]) - * @param distributions the distributions whose mean is to be calculated - * @return the mean of the distributions - */ - public static double[] mean(Collection distributions) - { - if (distributions.isEmpty()) - throw new IllegalArgumentException("Distribution collection must be non-empty"); - Iterator iter = distributions.iterator(); - double[] first = iter.next(); - double[][] d_array = new double[distributions.size()][first.length]; - d_array[0] = first; - for (int i = 1; i < d_array.length; i++) - d_array[i] = iter.next(); - - return mean(d_array); + return error; + } + + /** + * Returns the cosine distance between the two specified distributions, which must have the same + * number of elements. The distributions are treated as vectors in dist.length + * -dimensional space. Given the following definitions + * + *
    + *
  • v = the sum over all i of dist[i] * dist[i] + *
  • w = the sum over all i of reference[i] * reference[i] + * + *
  • vw = the sum over all i of dist[i] * reference[i] + *
+ * + * the value returned is defined as vw / (Math.sqrt(v) * Math.sqrt(w)). + * + * @param dist the distribution whose distance from {@code reference} is being measured + * @param reference the reference distribution + * @return the cosine distance between {@code dist} and {@code reference}, considered as vectors + */ + public static double cosine(double[] dist, double[] reference) { + double v_prod = 0; // dot product x*x + double w_prod = 0; // dot product y*y + double vw_prod = 0; // dot product x*y + + Preconditions.checkArgument( + dist.length == reference.length, "input arrays must be of the same length"); + + for (int i = 0; i < dist.length; i++) { + vw_prod += dist[i] * reference[i]; + v_prod += dist[i] * dist[i]; + w_prod += reference[i] * reference[i]; } - - /** - * Returns the mean of the specified array of distributions, - * represented as normalized arrays of double values. - * Will throw an "index out of bounds" exception if the - * distribution arrays are not all of the same length. - * @param distributions the distributions whose mean is to be calculated - * @return the mean of the distributions - */ - public static double[] mean(double[][] distributions) - { - double[] d_mean = new double[distributions[0].length]; - for (int j = 0; j < d_mean.length; j++) - d_mean[j] = 0; - - for (int i = 0; i < distributions.length; i++) - for (int j = 0; j < d_mean.length; j++) - d_mean[j] += distributions[i][j] / distributions.length; - - return d_mean; + // cosine distance between v and w + return vw_prod / (Math.sqrt(v_prod) * Math.sqrt(w_prod)); + } + + /** + * Returns the entropy of this distribution. High entropy indicates that the distribution is close + * to uniform; low entropy indicates that the distribution is close to a Dirac delta (i.e., if the + * probability mass is concentrated at a single point, this method returns 0). Entropy is defined + * as the sum over all i of -(dist[i] * Math.log(dist[i])) + * + * @param dist the distribution whose entropy is being measured + * @return sum_i {@code -(dist[i] * Math.log(dist[i]))} + */ + public static double entropy(double[] dist) { + double total = 0; + + for (int i = 0; i < dist.length; i++) { + if (dist[i] > 0) total += dist[i] * Math.log(dist[i]); } - -} \ No newline at end of file + return -total; + } + + /** + * Normalizes, with Lagrangian smoothing, the specified double array, so that the + * values sum to 1 (i.e., can be treated as probabilities). The effect of the Lagrangian smoothing + * is to ensure that all entries are nonzero; effectively, a value of alpha is added + * to each entry in the original array prior to normalization. + * + * @param counts the array to be converted into a probability distribution + * @param alpha the value to add to each entry prior to normalization + */ + public static void normalize(double[] counts, double alpha) { + double total_count = 0; + + for (int i = 0; i < counts.length; i++) total_count += counts[i]; + + for (int i = 0; i < counts.length; i++) + counts[i] = (counts[i] + alpha) / (total_count + counts.length * alpha); + } + + /** + * Returns the mean of the specified Collection of distributions, which are assumed + * to be normalized arrays of double values. + * + * @see #mean(double[][]) + * @param distributions the distributions whose mean is to be calculated + * @return the mean of the distributions + */ + public static double[] mean(Collection distributions) { + if (distributions.isEmpty()) + throw new IllegalArgumentException("Distribution collection must be non-empty"); + Iterator iter = distributions.iterator(); + double[] first = iter.next(); + double[][] d_array = new double[distributions.size()][first.length]; + d_array[0] = first; + for (int i = 1; i < d_array.length; i++) d_array[i] = iter.next(); + + return mean(d_array); + } + + /** + * Returns the mean of the specified array of distributions, represented as normalized arrays of + * double values. Will throw an "index out of bounds" exception if the distribution + * arrays are not all of the same length. + * + * @param distributions the distributions whose mean is to be calculated + * @return the mean of the distributions + */ + public static double[] mean(double[][] distributions) { + double[] d_mean = new double[distributions[0].length]; + for (int j = 0; j < d_mean.length; j++) d_mean[j] = 0; + + for (int i = 0; i < distributions.length; i++) + for (int j = 0; j < d_mean.length; j++) + d_mean[j] += distributions[i][j] / distributions.length; + + return d_mean; + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java index c41ed453..53c56a91 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/Indexer.java @@ -1,56 +1,53 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.util; -import java.util.Collection; - import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; +import java.util.Collection; /** - * A class providing static methods useful for improving the - * performance of graph algorithms. - * - * @author Tom Nelson + * A class providing static methods useful for improving the performance of graph algorithms. * + * @author Tom Nelson */ public class Indexer { - - /** - * Returns a BiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param the type of the collection elements - * @param collection the collection whose indices are to be generated - * @return a bidirectional map from collection elements to 0-based indices - */ - public static BiMap create(Collection collection) { - return create(collection, 0); - } - /** - * Returns a BiMap mapping each element of the collection to its - * index as encountered while iterating over the collection. The purpose - * of the index operation is to supply an O(1) replacement operation for the - * O(n) indexOf(element) method of a List - * @param the type of the collection elements - * @param collection the collection whose indices are to be generated - * @param start start index - * @return a bidirectional map from collection elements to start-based indices - */ - public static BiMap create(Collection collection, int start) { - BiMap map = HashBiMap.create(); - int i=start; - for(T t : collection) { - map.put(t,i++); - } - return map; - } + + /** + * Returns a BiMap mapping each element of the collection to its index as encountered + * while iterating over the collection. The purpose of the index operation is to supply an O(1) + * replacement operation for the O(n) indexOf(element) method of a List + * + * @param the type of the collection elements + * @param collection the collection whose indices are to be generated + * @return a bidirectional map from collection elements to 0-based indices + */ + public static BiMap create(Collection collection) { + return create(collection, 0); + } + /** + * Returns a BiMap mapping each element of the collection to its index as encountered + * while iterating over the collection. The purpose of the index operation is to supply an O(1) + * replacement operation for the O(n) indexOf(element) method of a List + * + * @param the type of the collection elements + * @param collection the collection whose indices are to be generated + * @param start start index + * @return a bidirectional map from collection elements to start-based indices + */ + public static BiMap create(Collection collection, int start) { + BiMap map = HashBiMap.create(); + int i = start; + for (T t : collection) { + map.put(t, i++); + } + return map; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java index de719627..010280e4 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeContext.java @@ -1,28 +1,19 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ -package edu.uci.ics.jung.algorithms.util; - - -/** - * An interface for algorithms that proceed iteratively. + * Copyright (c) 2003, The JUNG Authors * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ -public interface IterativeContext -{ - /** - * Advances one step. - */ - void step(); +package edu.uci.ics.jung.algorithms.util; + +/** An interface for algorithms that proceed iteratively. */ +public interface IterativeContext { + /** Advances one step. */ + void step(); - /** - * @return {@code true} if this iterative process is finished, and {@code false} otherwise. - */ - boolean done(); + /** @return {@code true} if this iterative process is finished, and {@code false} otherwise. */ + boolean done(); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java index 67749e70..403ceca5 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/IterativeProcess.java @@ -1,26 +1,26 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.util; - - /** * Provides basic infrastructure for iterative algorithms. Services provided include: + * *
    - *
  • storage of current and max iteration count
  • - *
  • framework for initialization, iterative evaluation, and finalization
  • - *
  • test for convergence
  • - *
  • etc.
  • + *
  • storage of current and max iteration count + *
  • framework for initialization, iterative evaluation, and finalization + *
  • test for convergence + *
  • etc. *
- *

- * Algorithms that subclass this class are typically used in the following way:
+ * + *

Algorithms that subclass this class are typically used in the following way:
+ * *

  * FooAlgorithm foo = new FooAlgorithm(...)
  * foo.setMaximumIterations(100); //set up conditions
@@ -28,147 +28,108 @@
  * foo.evaluate(); //key method which initiates iterative process
  * foo.getSomeResult();
  * 
- * + * * @author Scott White (originally written by Didier Besset) */ public abstract class IterativeProcess implements IterativeContext { - /** - * Number of iterations performed. - */ - private int iterations; - /** - * Maximum allowed number of iterations. - */ - private int maximumIterations = 50; - /** - * Desired precision. - */ - private double desiredPrecision = Double.MIN_VALUE; - /** - * Achieved precision. - */ - private double precision; - - - /** - * Generic constructor. - */ - public IterativeProcess() { - } - - /** - * Performs the iterative process. - * Note: this method does not return anything because Java does not - * allow mixing double, int, or objects - */ - public void evaluate() { - iterations = 0; - initializeIterations(); - while (iterations++ < maximumIterations) { - step(); - precision = getPrecision(); - if (hasConverged()) - break; - } - finalizeIterations(); - } - - /** - * Evaluate the result of the current iteration. - */ - abstract public void step(); - - /** - * Perform eventual clean-up operations - * (must be implement by subclass when needed). - */ - protected void finalizeIterations() { - } - - /** - * @return the desired precision. - */ - public double getDesiredPrecision() { - return desiredPrecision; - } - - /** - * @return the number of iterations performed. - */ - public int getIterations() { - return iterations; - } - - /** - * @return the maximum allowed number of iterations. - */ - public int getMaximumIterations() { - return maximumIterations; - } - - /** - * @return the attained precision. - */ - public double getPrecision() { - return precision; - } - - /** - * @param precision the precision to set - */ - public void setPrecision(double precision) { - this.precision = precision; - } - - /** - * - * Check to see if the result has been attained. - * @return boolean - */ - public boolean hasConverged() { - return precision < desiredPrecision; - } - - public boolean done() { - return hasConverged(); - } - - /** - * Initializes internal parameters to start the iterative process. - */ - protected void initializeIterations() { - } - - /** - * - */ - public void reset() { - } - - /** - * @return double - * @param epsilon double - * @param x double - */ - public double relativePrecision(double epsilon, double x) { - return x > desiredPrecision ? epsilon / x: epsilon; - } - - /** - * @param prec the desired precision. - */ - public void setDesiredPrecision(double prec) throws IllegalArgumentException { - if (prec <= 0) - throw new IllegalArgumentException("Non-positive precision: " + prec); - desiredPrecision = prec; - } - - /** - * @param maxIter the maximum allowed number of iterations - */ - public void setMaximumIterations(int maxIter) throws IllegalArgumentException { - if (maxIter < 1) - throw new IllegalArgumentException("Non-positive maximum iteration: " + maxIter); - maximumIterations = maxIter; + /** Number of iterations performed. */ + private int iterations; + /** Maximum allowed number of iterations. */ + private int maximumIterations = 50; + /** Desired precision. */ + private double desiredPrecision = Double.MIN_VALUE; + /** Achieved precision. */ + private double precision; + + /** Generic constructor. */ + public IterativeProcess() {} + + /** + * Performs the iterative process. Note: this method does not return anything because Java does + * not allow mixing double, int, or objects + */ + public void evaluate() { + iterations = 0; + initializeIterations(); + while (iterations++ < maximumIterations) { + step(); + precision = getPrecision(); + if (hasConverged()) { + break; + } } -} \ No newline at end of file + finalizeIterations(); + } + + /** Evaluate the result of the current iteration. */ + public abstract void step(); + + /** Perform eventual clean-up operations (must be implement by subclass when needed). */ + protected void finalizeIterations() {} + + /** @return the desired precision. */ + public double getDesiredPrecision() { + return desiredPrecision; + } + + /** @return the number of iterations performed. */ + public int getIterations() { + return iterations; + } + + /** @return the maximum allowed number of iterations. */ + public int getMaximumIterations() { + return maximumIterations; + } + + /** @return the attained precision. */ + public double getPrecision() { + return precision; + } + + /** @param precision the precision to set */ + public void setPrecision(double precision) { + this.precision = precision; + } + + /** + * Check to see if the result has been attained. + * + * @return boolean + */ + public boolean hasConverged() { + return precision < desiredPrecision; + } + + public boolean done() { + return hasConverged(); + } + + /** Initializes internal parameters to start the iterative process. */ + protected void initializeIterations() {} + + /** */ + public void reset() {} + + /** + * @return double + * @param epsilon double + * @param x double + */ + public double relativePrecision(double epsilon, double x) { + return x > desiredPrecision ? epsilon / x : epsilon; + } + + /** @param prec the desired precision. */ + public void setDesiredPrecision(double prec) throws IllegalArgumentException { + if (prec <= 0) throw new IllegalArgumentException("Non-positive precision: " + prec); + desiredPrecision = prec; + } + + /** @param maxIter the maximum allowed number of iterations */ + public void setMaximumIterations(int maxIter) throws IllegalArgumentException { + if (maxIter < 1) + throw new IllegalArgumentException("Non-positive maximum iteration: " + maxIter); + maximumIterations = maxIter; + } +} diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java index bfc7e52b..d4679fc1 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/KMeansClusterer.java @@ -23,235 +23,199 @@ import java.util.Random; import java.util.Set; - - /** - * Groups items into a specified number of clusters, based on their proximity in - * d-dimensional space, using the k-means algorithm. Calls to - * cluster will terminate when either of the two following - * conditions is true: + * Groups items into a specified number of clusters, based on their proximity in d-dimensional + * space, using the k-means algorithm. Calls to cluster will terminate when either of + * the two following conditions is true: + * *
    - *
  • the number of iterations is > max_iterations - *
  • none of the centroids has moved as much as convergence_threshold - * since the previous iteration + *
  • the number of iterations is > max_iterations + *
  • none of the centroids has moved as much as convergence_threshold since the + * previous iteration *
- * + * * @author Joshua O'Madadhain */ -public class KMeansClusterer -{ - protected int max_iterations; - protected double convergence_threshold; - protected Random rand; - - /** - * Creates an instance which will terminate when either the maximum number of - * iterations has been reached, or all changes are smaller than the convergence threshold. - * @param max_iterations the maximum number of iterations to employ - * @param convergence_threshold the smallest change we want to track - */ - public KMeansClusterer(int max_iterations, double convergence_threshold) - { - this.max_iterations = max_iterations; - this.convergence_threshold = convergence_threshold; - this.rand = new Random(); +public class KMeansClusterer { + protected int max_iterations; + protected double convergence_threshold; + protected Random rand; + + /** + * Creates an instance which will terminate when either the maximum number of iterations has been + * reached, or all changes are smaller than the convergence threshold. + * + * @param max_iterations the maximum number of iterations to employ + * @param convergence_threshold the smallest change we want to track + */ + public KMeansClusterer(int max_iterations, double convergence_threshold) { + this.max_iterations = max_iterations; + this.convergence_threshold = convergence_threshold; + this.rand = new Random(); + } + + /** Creates an instance with max iterations of 100 and convergence threshold of 0.001. */ + public KMeansClusterer() { + this(100, 0.001); + } + + /** @return the maximum number of iterations */ + public int getMaxIterations() { + return max_iterations; + } + + /** @param max_iterations the maximum number of iterations */ + public void setMaxIterations(int max_iterations) { + if (max_iterations < 0) throw new IllegalArgumentException("max iterations must be >= 0"); + + this.max_iterations = max_iterations; + } + + /** @return the convergence threshold */ + public double getConvergenceThreshold() { + return convergence_threshold; + } + + /** @param convergence_threshold the convergence threshold */ + public void setConvergenceThreshold(double convergence_threshold) { + if (convergence_threshold <= 0) + throw new IllegalArgumentException("convergence threshold " + "must be > 0"); + + this.convergence_threshold = convergence_threshold; + } + + /** + * Returns a Collection of clusters, where each cluster is represented as a Map + * of Objects to locations in d-dimensional space. + * + * @param object_locations a map of the items to cluster, to double arrays that + * specify their locations in d-dimensional space. + * @param num_clusters the number of clusters to create + * @return a clustering of the input objects in d-dimensional space + * @throws NotEnoughClustersException if {@code num_clusters} is larger than the number of + * distinct points in object_locations + */ + @SuppressWarnings("unchecked") + public Collection> cluster(Map object_locations, int num_clusters) { + if (object_locations == null || object_locations.isEmpty()) + throw new IllegalArgumentException("'objects' must be non-empty"); + + if (num_clusters < 2 || num_clusters > object_locations.size()) + throw new IllegalArgumentException( + "number of clusters " + + "must be >= 2 and <= number of objects (" + + object_locations.size() + + ")"); + + Set centroids = new HashSet(); + + Object[] obj_array = object_locations.keySet().toArray(); + Set tried = new HashSet(); + + // create the specified number of clusters + while (centroids.size() < num_clusters && tried.size() < object_locations.size()) { + T o = (T) obj_array[(int) (rand.nextDouble() * obj_array.length)]; + tried.add(o); + double[] mean_value = object_locations.get(o); + boolean duplicate = false; + for (double[] cur : centroids) { + if (Arrays.equals(mean_value, cur)) duplicate = true; + } + if (!duplicate) centroids.add(mean_value); } - /** - * Creates an instance with max iterations of 100 and convergence threshold - * of 0.001. - */ - public KMeansClusterer() - { - this(100, 0.001); - } - - /** - * @return the maximum number of iterations - */ - public int getMaxIterations() - { - return max_iterations; - } - - /** - * @param max_iterations the maximum number of iterations - */ - public void setMaxIterations(int max_iterations) - { - if (max_iterations < 0) - throw new IllegalArgumentException("max iterations must be >= 0"); - - this.max_iterations = max_iterations; - } - - /** - * @return the convergence threshold - */ - public double getConvergenceThreshold() - { - return convergence_threshold; - } - - /** - * @param convergence_threshold the convergence threshold - */ - public void setConvergenceThreshold(double convergence_threshold) - { - if (convergence_threshold <= 0) - throw new IllegalArgumentException("convergence threshold " + - "must be > 0"); - - this.convergence_threshold = convergence_threshold; - } - - /** - * Returns a Collection of clusters, where each cluster is - * represented as a Map of Objects to locations - * in d-dimensional space. - * @param object_locations a map of the items to cluster, to - * double arrays that specify their locations in d-dimensional space. - * @param num_clusters the number of clusters to create - * @return a clustering of the input objects in d-dimensional space - * @throws NotEnoughClustersException if {@code num_clusters} is larger than the number of - * distinct points in object_locations - */ - @SuppressWarnings("unchecked") - public Collection> cluster(Map object_locations, int num_clusters) - { - if (object_locations == null || object_locations.isEmpty()) - throw new IllegalArgumentException("'objects' must be non-empty"); - - if (num_clusters < 2 || num_clusters > object_locations.size()) - throw new IllegalArgumentException("number of clusters " + - "must be >= 2 and <= number of objects (" + - object_locations.size() + ")"); - - - Set centroids = new HashSet(); - - Object[] obj_array = object_locations.keySet().toArray(); - Set tried = new HashSet(); - - // create the specified number of clusters - while (centroids.size() < num_clusters && tried.size() < object_locations.size()) - { - T o = (T)obj_array[(int)(rand.nextDouble() * obj_array.length)]; - tried.add(o); - double[] mean_value = object_locations.get(o); - boolean duplicate = false; - for (double[] cur : centroids) - { - if (Arrays.equals(mean_value, cur)) - duplicate = true; - } - if (!duplicate) - centroids.add(mean_value); - } - - if (tried.size() >= object_locations.size()) - throw new NotEnoughClustersException(); - - // put items in their initial clusters - Map> clusterMap = assignToClusters(object_locations, centroids); - - // keep reconstituting clusters until either - // (a) membership is stable, or - // (b) number of iterations passes max_iterations, or - // (c) max movement of any centroid is <= convergence_threshold - int iterations = 0; - double max_movement = Double.POSITIVE_INFINITY; - while (iterations++ < max_iterations && max_movement > convergence_threshold) - { - max_movement = 0; - Set new_centroids = new HashSet(); - // calculate new mean for each cluster - for (Map.Entry> entry : clusterMap.entrySet()) - { - double[] centroid = entry.getKey(); - Map elements = entry.getValue(); - ArrayList locations = new ArrayList(elements.values()); - - double[] mean = DiscreteDistribution.mean(locations); - max_movement = Math.max(max_movement, - Math.sqrt(DiscreteDistribution.squaredError(centroid, mean))); - new_centroids.add(mean); - } - - // TODO: check membership of clusters: have they changed? - - // regenerate cluster membership based on means - clusterMap = assignToClusters(object_locations, new_centroids); - } - return clusterMap.values(); + if (tried.size() >= object_locations.size()) throw new NotEnoughClustersException(); + + // put items in their initial clusters + Map> clusterMap = assignToClusters(object_locations, centroids); + + // keep reconstituting clusters until either + // (a) membership is stable, or + // (b) number of iterations passes max_iterations, or + // (c) max movement of any centroid is <= convergence_threshold + int iterations = 0; + double max_movement = Double.POSITIVE_INFINITY; + while (iterations++ < max_iterations && max_movement > convergence_threshold) { + max_movement = 0; + Set new_centroids = new HashSet(); + // calculate new mean for each cluster + for (Map.Entry> entry : clusterMap.entrySet()) { + double[] centroid = entry.getKey(); + Map elements = entry.getValue(); + ArrayList locations = new ArrayList(elements.values()); + + double[] mean = DiscreteDistribution.mean(locations); + max_movement = + Math.max(max_movement, Math.sqrt(DiscreteDistribution.squaredError(centroid, mean))); + new_centroids.add(mean); + } + + // TODO: check membership of clusters: have they changed? + + // regenerate cluster membership based on means + clusterMap = assignToClusters(object_locations, new_centroids); } - - /** - * Assigns each object to the cluster whose centroid is closest to the - * object. - * @param object_locations a map of objects to locations - * @param centroids the centroids of the clusters to be formed - * @return a map of objects to assigned clusters - */ - protected Map> assignToClusters(Map object_locations, Set centroids) - { - Map> clusterMap = new HashMap>(); - for (double[] centroid : centroids) - clusterMap.put(centroid, new HashMap()); - - for (Map.Entry object_location : object_locations.entrySet()) - { - T object = object_location.getKey(); - double[] location = object_location.getValue(); - - // find the cluster with the closest centroid - Iterator c_iter = centroids.iterator(); - double[] closest = c_iter.next(); - double distance = DiscreteDistribution.squaredError(location, closest); - - while (c_iter.hasNext()) - { - double[] centroid = c_iter.next(); - double dist_cur = DiscreteDistribution.squaredError(location, centroid); - if (dist_cur < distance) - { - distance = dist_cur; - closest = centroid; - } - } - clusterMap.get(closest).put(object, location); + return clusterMap.values(); + } + + /** + * Assigns each object to the cluster whose centroid is closest to the object. + * + * @param object_locations a map of objects to locations + * @param centroids the centroids of the clusters to be formed + * @return a map of objects to assigned clusters + */ + protected Map> assignToClusters( + Map object_locations, Set centroids) { + Map> clusterMap = new HashMap>(); + for (double[] centroid : centroids) clusterMap.put(centroid, new HashMap()); + + for (Map.Entry object_location : object_locations.entrySet()) { + T object = object_location.getKey(); + double[] location = object_location.getValue(); + + // find the cluster with the closest centroid + Iterator c_iter = centroids.iterator(); + double[] closest = c_iter.next(); + double distance = DiscreteDistribution.squaredError(location, closest); + + while (c_iter.hasNext()) { + double[] centroid = c_iter.next(); + double dist_cur = DiscreteDistribution.squaredError(location, centroid); + if (dist_cur < distance) { + distance = dist_cur; + closest = centroid; } - - return clusterMap; - } - - /** - * Sets the seed used by the internal random number generator. - * Enables consistent outputs. - * @param random_seed the random seed to use - */ - public void setSeed(int random_seed) - { - this.rand = new Random(random_seed); + } + clusterMap.get(closest).put(object, location); } - /** - * An exception that indicates that the specified data points cannot be - * clustered into the number of clusters requested by the user. - * This will happen if and only if there are fewer distinct points than - * requested clusters. (If there are fewer total data points than - * requested clusters, IllegalArgumentException will be thrown.) - * - * @author Joshua O'Madadhain - */ - @SuppressWarnings("serial") - public static class NotEnoughClustersException extends RuntimeException - { - @Override - public String getMessage() - { - return "Not enough distinct points in the input data set to form " + - "the requested number of clusters"; - } + return clusterMap; + } + + /** + * Sets the seed used by the internal random number generator. Enables consistent outputs. + * + * @param random_seed the random seed to use + */ + public void setSeed(int random_seed) { + this.rand = new Random(random_seed); + } + + /** + * An exception that indicates that the specified data points cannot be clustered into the number + * of clusters requested by the user. This will happen if and only if there are fewer distinct + * points than requested clusters. (If there are fewer total data points than requested clusters, + * IllegalArgumentException will be thrown.) + * + * @author Joshua O'Madadhain + */ + @SuppressWarnings("serial") + public static class NotEnoughClustersException extends RuntimeException { + @Override + public String getMessage() { + return "Not enough distinct points in the input data set to form " + + "the requested number of clusters"; } + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java index cf152de4..7cad6701 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapBinaryHeap.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2003, The JUNG Authors + * Copyright (c) 2003, The JUNG Authors * * All rights reserved. * @@ -8,11 +8,12 @@ * https://github.com/jrtom/jung/blob/master/LICENSE for a description. */ /* - * + * * Created on Oct 29, 2003 */ package edu.uci.ics.jung.algorithms.util; +import com.google.common.collect.Iterators; import java.util.AbstractCollection; import java.util.Collection; import java.util.Comparator; @@ -23,357 +24,291 @@ import java.util.Queue; import java.util.Vector; -import com.google.common.collect.Iterators; - /** - * An array-based binary heap implementation of a priority queue, - * which also provides - * efficient update() and contains operations. - * It contains extra infrastructure (a hash table) to keep track of the - * position of each element in the array; thus, if the key value of an element - * changes, it may be "resubmitted" to the heap via update - * so that the heap can reposition it efficiently, as necessary. - * + * An array-based binary heap implementation of a priority queue, which also provides efficient + * update() and contains operations. It contains extra infrastructure (a + * hash table) to keep track of the position of each element in the array; thus, if the key value of + * an element changes, it may be "resubmitted" to the heap via update so that the heap + * can reposition it efficiently, as necessary. + * * @author Joshua O'Madadhain */ -public class MapBinaryHeap - extends AbstractCollection - implements Queue -{ - private Vector heap = new Vector(); // holds the heap as an implicit binary tree - private Map object_indices = new HashMap(); // maps each object in the heap to its index in the heap - private Comparator comp; - private final static int TOP = 0; // the index of the top of the heap - - /** - * Creates a MapBinaryHeap whose heap ordering - * is based on the ordering of the elements specified by comp. - * @param comp the comparator to use to order elements in the heap - */ - public MapBinaryHeap(Comparator comp) - { - initialize(comp); - } - - /** - * Creates a MapBinaryHeap whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - */ - public MapBinaryHeap() - { - initialize(new ComparableComparator()); - } +public class MapBinaryHeap extends AbstractCollection implements Queue { + private Vector heap = new Vector(); // holds the heap as an implicit binary tree + private Map object_indices = + new HashMap(); // maps each object in the heap to its index in the heap + private Comparator comp; + private static final int TOP = 0; // the index of the top of the heap - /** - * Creates a MapBinaryHeap based on the specified - * collection whose heap ordering - * will be based on the natural ordering of the elements, - * which must be Comparable. - * @param c the collection of {@code Comparable} elements to add to the heap - */ - public MapBinaryHeap(Collection c) - { - this(); - addAll(c); - } - - /** - * Creates a MapBinaryHeap based on the specified collection - * whose heap ordering - * is based on the ordering of the elements specified by c. - * @param c the collection of elements to add to the heap - * @param comp the comparator to use for items in {@code c} - */ - public MapBinaryHeap(Collection c, Comparator comp) - { - this(comp); - addAll(c); - } - - private void initialize(Comparator comp) - { - this.comp = comp; - clear(); - } - - /** - * @see Collection#clear() - */ - @Override - public void clear() - { - object_indices.clear(); - heap.clear(); - } - - /** - * Inserts o into this collection. - */ - @Override - public boolean add(T o) - { - int i = heap.size(); // index 1 past the end of the heap - heap.setSize(i+1); - percolateUp(i, o); - return true; - } - - /** - * Returns true if this collection contains no elements, and - * false otherwise. - */ - @Override - public boolean isEmpty() - { - return heap.isEmpty(); - } - - /** - * Returns the element at the top of the heap; does not - * alter the heap. - */ - public T peek() - { - if (heap.size() > 0) - return heap.elementAt(TOP); - else - return null; - } - - /** - * @return the size of this heap - */ - @Override - public int size() - { - return heap.size(); - } - - /** - * Informs the heap that this object's internal key value has been - * updated, and that its place in the heap may need to be shifted - * (up or down). - * @param o the object whose key value has been updated - */ - public void update(T o) - { - // Since we don't know whether the key value increased or - // decreased, we just percolate up followed by percolating down; - // one of the two will have no effect. - - int cur = object_indices.get(o).intValue(); // current index - int new_idx = percolateUp(cur, o); - percolateDown(new_idx); - } + /** + * Creates a MapBinaryHeap whose heap ordering is based on the ordering of the + * elements specified by comp. + * + * @param comp the comparator to use to order elements in the heap + */ + public MapBinaryHeap(Comparator comp) { + initialize(comp); + } - @Override - public boolean contains(Object o) - { - return object_indices.containsKey(o); - } - - /** - * Moves the element at position cur closer to - * the bottom of the heap, or returns if no further motion is - * necessary. Calls itself recursively if further motion is - * possible. - */ - private void percolateDown(int cur) - { - int left = lChild(cur); - int right = rChild(cur); - int smallest; - - if ((left < heap.size()) && - (comp.compare(heap.elementAt(left), heap.elementAt(cur)) < 0)) { - smallest = left; - } else { - smallest = cur; - } - - if ((right < heap.size()) && - (comp.compare(heap.elementAt(right), heap.elementAt(smallest)) < 0)) { - smallest = right; - } - - if (cur != smallest) - { - swap(cur, smallest); - percolateDown(smallest); - } - } + /** + * Creates a MapBinaryHeap whose heap ordering will be based on the natural + * ordering of the elements, which must be Comparable. + */ + public MapBinaryHeap() { + initialize(new ComparableComparator()); + } - /** - * Moves the element o at position cur - * as high as it can go in the heap. Returns the new position of the - * element in the heap. - */ - private int percolateUp(int cur, T o) - { - int i = cur; - - while ((i > TOP) && (comp.compare(heap.elementAt(parent(i)), o) > 0)) - { - T parentElt = heap.elementAt(parent(i)); - heap.setElementAt(parentElt, i); - object_indices.put(parentElt, new Integer(i)); // reset index to i (new location) - i = parent(i); - } - - // place object in heap at appropriate place - object_indices.put(o, new Integer(i)); - heap.setElementAt(o, i); - - return i; - } - - /** - * Returns the index of the left child of the element at - * index i of the heap. - * @param i - * @return the index of the left child of the element at - * index i of the heap - */ - private int lChild(int i) - { - return (i<<1) + 1; - } - - /** - * Returns the index of the right child of the element at - * index i of the heap. - * @param i - * @return the index of the right child of the element at - * index i of the heap - */ - private int rChild(int i) - { - return (i<<1) + 2; - } - - /** - * Returns the index of the parent of the element at - * index i of the heap. - * @param i - * @return the index of the parent of the element at index i of the heap - */ - private int parent(int i) - { - return (i-1)>>1; - } - - /** - * Swaps the positions of the elements at indices i - * and j of the heap. - * @param i - * @param j - */ - private void swap(int i, int j) - { - T iElt = heap.elementAt(i); - T jElt = heap.elementAt(j); - - heap.setElementAt(jElt, i); - object_indices.put(jElt, new Integer(i)); - - heap.setElementAt(iElt, j); - object_indices.put(iElt, new Integer(j)); + /** + * Creates a MapBinaryHeap based on the specified collection whose heap ordering will + * be based on the natural ordering of the elements, which must be Comparable. + * + * @param c the collection of {@code Comparable} elements to add to the heap + */ + public MapBinaryHeap(Collection c) { + this(); + addAll(c); + } + + /** + * Creates a MapBinaryHeap based on the specified collection whose heap ordering is + * based on the ordering of the elements specified by c. + * + * @param c the collection of elements to add to the heap + * @param comp the comparator to use for items in {@code c} + */ + public MapBinaryHeap(Collection c, Comparator comp) { + this(comp); + addAll(c); + } + + private void initialize(Comparator comp) { + this.comp = comp; + clear(); + } + + /** @see Collection#clear() */ + @Override + public void clear() { + object_indices.clear(); + heap.clear(); + } + + /** Inserts o into this collection. */ + @Override + public boolean add(T o) { + int i = heap.size(); // index 1 past the end of the heap + heap.setSize(i + 1); + percolateUp(i, o); + return true; + } + + /** + * Returns true if this collection contains no elements, and false + * otherwise. + */ + @Override + public boolean isEmpty() { + return heap.isEmpty(); + } + + /** Returns the element at the top of the heap; does not alter the heap. */ + public T peek() { + if (heap.size() > 0) { + return heap.elementAt(TOP); + } else { + return null; } - - /** - * Comparator used if none is specified in the constructor. - * @author Joshua O'Madadhain - */ - private class ComparableComparator implements Comparator - { - /** - * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) - */ - @SuppressWarnings("unchecked") - public int compare(T arg0, T arg1) - { - if (!(arg0 instanceof Comparable) || !(arg1 instanceof Comparable)) - throw new IllegalArgumentException("Arguments must be Comparable"); - - return ((Comparable)arg0).compareTo(arg1); - } + } + + /** @return the size of this heap */ + @Override + public int size() { + return heap.size(); + } + + /** + * Informs the heap that this object's internal key value has been updated, and that its place in + * the heap may need to be shifted (up or down). + * + * @param o the object whose key value has been updated + */ + public void update(T o) { + // Since we don't know whether the key value increased or + // decreased, we just percolate up followed by percolating down; + // one of the two will have no effect. + + int cur = object_indices.get(o).intValue(); // current index + int new_idx = percolateUp(cur, o); + percolateDown(new_idx); + } + + @Override + public boolean contains(Object o) { + return object_indices.containsKey(o); + } + + /** + * Moves the element at position cur closer to the bottom of the heap, or returns if + * no further motion is necessary. Calls itself recursively if further motion is possible. + */ + private void percolateDown(int cur) { + int left = lChild(cur); + int right = rChild(cur); + int smallest; + + if ((left < heap.size()) && (comp.compare(heap.elementAt(left), heap.elementAt(cur)) < 0)) { + smallest = left; + } else { + smallest = cur; } - /** - * Returns an Iterator that does not support modification - * of the heap. - */ - @Override - public Iterator iterator() - { - return Iterators.unmodifiableIterator(heap.iterator()); + if ((right < heap.size()) + && (comp.compare(heap.elementAt(right), heap.elementAt(smallest)) < 0)) { + smallest = right; } - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean remove(Object o) - { - throw new UnsupportedOperationException(); + if (cur != smallest) { + swap(cur, smallest); + percolateDown(smallest); } + } + + /** + * Moves the element o at position cur as high as it can go in the heap. + * Returns the new position of the element in the heap. + */ + private int percolateUp(int cur, T o) { + int i = cur; - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean removeAll(Collection c) - { - throw new UnsupportedOperationException(); + while ((i > TOP) && (comp.compare(heap.elementAt(parent(i)), o) > 0)) { + T parentElt = heap.elementAt(parent(i)); + heap.setElementAt(parentElt, i); + object_indices.put(parentElt, new Integer(i)); // reset index to i (new location) + i = parent(i); } - /** - * This data structure does not support the removal of arbitrary elements. - */ - @Override - public boolean retainAll(Collection c) - { - throw new UnsupportedOperationException(); + // place object in heap at appropriate place + object_indices.put(o, new Integer(i)); + heap.setElementAt(o, i); + + return i; + } + + /** + * Returns the index of the left child of the element at index i of the heap. + * + * @param i + * @return the index of the left child of the element at index i of the heap + */ + private int lChild(int i) { + return (i << 1) + 1; + } + + /** + * Returns the index of the right child of the element at index i of the heap. + * + * @param i + * @return the index of the right child of the element at index i of the heap + */ + private int rChild(int i) { + return (i << 1) + 2; + } + + /** + * Returns the index of the parent of the element at index i of the heap. + * + * @param i + * @return the index of the parent of the element at index i of the heap + */ + private int parent(int i) { + return (i - 1) >> 1; + } + + /** + * Swaps the positions of the elements at indices i and j of the heap. + * + * @param i + * @param j + */ + private void swap(int i, int j) { + T iElt = heap.elementAt(i); + T jElt = heap.elementAt(j); + + heap.setElementAt(jElt, i); + object_indices.put(jElt, new Integer(i)); + + heap.setElementAt(iElt, j); + object_indices.put(iElt, new Integer(j)); + } + + /** + * Comparator used if none is specified in the constructor. + * + * @author Joshua O'Madadhain + */ + private class ComparableComparator implements Comparator { + /** @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) */ + @SuppressWarnings("unchecked") + public int compare(T arg0, T arg1) { + if (!(arg0 instanceof Comparable) || !(arg1 instanceof Comparable)) + throw new IllegalArgumentException("Arguments must be Comparable"); + + return ((Comparable) arg0).compareTo(arg1); } + } + + /** Returns an Iterator that does not support modification of the heap. */ + @Override + public Iterator iterator() { + return Iterators.unmodifiableIterator(heap.iterator()); + } + + /** This data structure does not support the removal of arbitrary elements. */ + @Override + public boolean remove(Object o) { + throw new UnsupportedOperationException(); + } + + /** This data structure does not support the removal of arbitrary elements. */ + @Override + public boolean removeAll(Collection c) { + throw new UnsupportedOperationException(); + } - public T element() throws NoSuchElementException - { - T top = this.peek(); - if (top == null) - throw new NoSuchElementException(); - return top; - } - - public boolean offer(T o) - { - return add(o); - } - - public T poll() - { - T top = this.peek(); - if (top != null) - { - T bottom_elt = heap.lastElement(); - heap.setElementAt(bottom_elt, TOP); - object_indices.put(bottom_elt, new Integer(TOP)); - - heap.setSize(heap.size() - 1); // remove the last element - if (heap.size() > 1) - percolateDown(TOP); - - object_indices.remove(top); - } - return top; - } - - public T remove() - { - T top = this.poll(); - if (top == null) - throw new NoSuchElementException(); - return top; - } + /** This data structure does not support the removal of arbitrary elements. */ + @Override + public boolean retainAll(Collection c) { + throw new UnsupportedOperationException(); + } + + public T element() throws NoSuchElementException { + T top = this.peek(); + if (top == null) throw new NoSuchElementException(); + return top; + } + + public boolean offer(T o) { + return add(o); + } + + public T poll() { + T top = this.peek(); + if (top != null) { + T bottom_elt = heap.lastElement(); + heap.setElementAt(bottom_elt, TOP); + object_indices.put(bottom_elt, new Integer(TOP)); + + heap.setSize(heap.size() - 1); // remove the last element + if (heap.size() > 1) percolateDown(TOP); + + object_indices.remove(top); + } + return top; + } + public T remove() { + T top = this.poll(); + if (top == null) throw new NoSuchElementException(); + return top; + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java index 267ea61b..1368d2f0 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/MapSettableTransformer.java @@ -1,7 +1,7 @@ /* * Created on Aug 5, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -13,33 +13,29 @@ import java.util.Map; - /** * A SettableTransformer that operates on an underlying Map instance. * Similar to MapTransformer. - * + * * @author Joshua O'Madadhain */ -public class MapSettableTransformer implements SettableTransformer -{ - protected Map map; - - /** - * Creates an instance based on m. - * @param m the map on which this instance is based - */ - public MapSettableTransformer(Map m) - { - this.map = m; - } +public class MapSettableTransformer implements SettableTransformer { + protected Map map; + + /** + * Creates an instance based on m. + * + * @param m the map on which this instance is based + */ + public MapSettableTransformer(Map m) { + this.map = m; + } - public O apply(I input) - { - return map.get(input); - } + public O apply(I input) { + return map.get(input); + } - public void set(I input, O output) - { - map.put(input, output); - } + public void set(I input, O output) { + map.put(input, output); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java index 13d90446..3e235bb9 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SelfLoopEdgePredicate.java @@ -1,23 +1,21 @@ package edu.uci.ics.jung.algorithms.util; import com.google.common.base.Predicate; - import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.Context; import edu.uci.ics.jung.graph.util.Pair; /** - * A Predicate that returns true if the input edge's - * endpoints in the input graph are identical. (Thus, an edge which connects - * its sole incident vertex to itself). + * A Predicate that returns true if the input edge's endpoints in the + * input graph are identical. (Thus, an edge which connects its sole incident vertex to itself). * * @param the vertex type * @param the edge type */ -public class SelfLoopEdgePredicate implements Predicate,E>> { +public class SelfLoopEdgePredicate implements Predicate, E>> { - public boolean apply(Context,E> context) { - Pair endpoints = context.graph.getEndpoints(context.element); - return endpoints.getFirst().equals(endpoints.getSecond()); - } + public boolean apply(Context, E> context) { + Pair endpoints = context.graph.getEndpoints(context.element); + return endpoints.getFirst().equals(endpoints.getSecond()); + } } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java index dc0797bb..4ff3c58a 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/SettableTransformer.java @@ -1,7 +1,7 @@ /* * Created on Aug 5, 2007 * - * Copyright (c) 2007, The JUNG Authors + * Copyright (c) 2007, The JUNG Authors * * All rights reserved. * @@ -16,16 +16,16 @@ /** * An interface for classes that can set the value to be returned (from transform()) * when invoked on a given input. - * + * * @author Joshua O'Madadhain */ -public interface SettableTransformer extends Function -{ - /** - * Sets the value (output) to be returned by a call to - * transform(input)). - * @param input the value whose output value is being specified - * @param output the output value for {@code input} - */ - public void set(I input, O output); +public interface SettableTransformer extends Function { + /** + * Sets the value (output) to be returned by a call to transform(input) + * ). + * + * @param input the value whose output value is being specified + * @param output the output value for {@code input} + */ + public void set(I input, O output); } diff --git a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java index 719caef6..ef522707 100644 --- a/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java +++ b/jung-algorithms/src/main/java/edu/uci/ics/jung/algorithms/util/WeightedChoice.java @@ -1,13 +1,10 @@ /** - * Copyright (c) 2009, The JUNG Authors + * Copyright (c) 2009, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jan 8, 2009 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jan 8, 2009 */ package edu.uci.ics.jung.algorithms.util; @@ -19,185 +16,171 @@ import java.util.Random; /** - * Selects items according to their probability in an arbitrary probability - * distribution. The distribution is specified by a {@code Map} from - * items (of type {@code T}) to weights of type {@code Number}, supplied - * to the constructor; these weights are normalized internally to act as + * Selects items according to their probability in an arbitrary probability distribution. The + * distribution is specified by a {@code Map} from items (of type {@code T}) to weights of type + * {@code Number}, supplied to the constructor; these weights are normalized internally to act as * probabilities. - * + * *

This implementation selects items in O(1) time, and requires O(n) space. - * + * * @author Joshua O'Madadhain */ -public class WeightedChoice -{ - private List item_pairs; - private Random random; - - /** - * The default minimum value that is treated as a valid probability - * (as opposed to rounding error from floating-point operations). - */ - public static final double DEFAULT_THRESHOLD = 0.00000000001; - - /** - * Equivalent to {@code this(item_weights, new Random(), DEFAULT_THRESHOLD)}. - * @param item_weights a map from items to their weights - */ - public WeightedChoice(Map item_weights) - { - this(item_weights, new Random(), DEFAULT_THRESHOLD); - } - - /** - * Equivalent to {@code this(item_weights, new Random(), threshold)}. - * @param item_weights a map from items to their weights - * @param threshold the minimum value that is treated as a probability - * (anything smaller will be considered equivalent to a floating-point rounding error) - */ - public WeightedChoice(Map item_weights, double threshold) - { - this(item_weights, new Random(), threshold); - } - - /** - * Equivalent to {@code this(item_weights, random, DEFAULT_THRESHOLD)}. - * @param item_weights a map from items to their weights - * @param random the Random instance to use for selection - */ - public WeightedChoice(Map item_weights, Random random) - { - this(item_weights, random, DEFAULT_THRESHOLD); - } - - /** - * Creates an instance with the specified mapping from items to weights, - * random number generator, and threshold value. - * - *

The mapping defines the weight for each item to be selected; this - * will be proportional to the probability of its selection. - *

The random number generator specifies the mechanism which will be - * used to provide uniform integer and double values. - *

The threshold indicates default minimum value that is treated as a valid - * probability (as opposed to rounding error from floating-point operations). - * @param item_weights a map from items to their weights - * @param random the Random instance to use for selection - * @param threshold the minimum value that is treated as a probability - * (anything smaller will be considered equivalent to a floating-point rounding error) - */ - public WeightedChoice(Map item_weights, Random random, - double threshold) - { - if (item_weights.isEmpty()) - throw new IllegalArgumentException("Item weights must be non-empty"); - - int item_count = item_weights.size(); - item_pairs = new ArrayList(item_count); - - double sum = 0; - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue(); - if (value <= 0) - throw new IllegalArgumentException("Weights must be > 0"); - sum += value; - } - double bucket_weight = 1.0 / item_weights.size(); - - Queue light_weights = new LinkedList(); - Queue heavy_weights = new LinkedList(); - for (Map.Entry entry : item_weights.entrySet()) - { - double value = entry.getValue().doubleValue() / sum; - enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); - } - - // repeat until both queues empty - while (!heavy_weights.isEmpty() || !light_weights.isEmpty()) - { - ItemPair heavy_item = heavy_weights.poll(); - ItemPair light_item = light_weights.poll(); - double light_weight = 0; - T light = null; - T heavy = null; - if (light_item != null) - { - light_weight = light_item.weight; - light = light_item.light; - } - if (heavy_item != null) - { - heavy = heavy_item.heavy; - // put the 'left over' weight from the heavy item--what wasn't - // needed to make up the difference between the light weight and - // 1/n--back in the appropriate queue - double new_weight = heavy_item.weight - (bucket_weight - light_weight); - if (new_weight > threshold) - enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights); - } - light_weight *= item_count; - - item_pairs.add(new ItemPair(light, heavy, light_weight)); - } - - this.random = random; - } - - /** - * Adds key/value to the appropriate queue. Keys with values less than - * the threshold get added to {@code light_weights}, all others get added - * to {@code heavy_weights}. - */ - private void enqueueItem(T key, double value, double threshold, - Queue light_weights, Queue heavy_weights) - { - if (value < threshold) - light_weights.offer(new ItemPair(key, null, value)); - else - heavy_weights.offer(new ItemPair(null, key, value)); - } - - /** - * @param seed the seed to be used by the internal random number generator - */ - public void setRandomSeed(long seed) - { - this.random.setSeed(seed); - } - - /** - * Retrieves an item with probability proportional to its weight in the - * {@code Map} provided in the input. - * @return an item chosen randomly based on its specified weight - */ - public T nextItem() - { - ItemPair item_pair = item_pairs.get(random.nextInt(item_pairs.size())); - if (random.nextDouble() < item_pair.weight) - return item_pair.light; - return item_pair.heavy; - } - - /** - * Manages light object/heavy object/light conditional probability tuples. - */ - private class ItemPair - { - T light; - T heavy; - double weight; - - private ItemPair(T light, T heavy, double weight) - { - this.light = light; - this.heavy = heavy; - this.weight = weight; - } - - @Override - public String toString() - { - return String.format("[L:%s, H:%s, %.3f]", light, heavy, weight); - } - } +public class WeightedChoice { + private List item_pairs; + private Random random; + + /** + * The default minimum value that is treated as a valid probability (as opposed to rounding error + * from floating-point operations). + */ + public static final double DEFAULT_THRESHOLD = 0.00000000001; + + /** + * Equivalent to {@code this(item_weights, new Random(), DEFAULT_THRESHOLD)}. + * + * @param item_weights a map from items to their weights + */ + public WeightedChoice(Map item_weights) { + this(item_weights, new Random(), DEFAULT_THRESHOLD); + } + + /** + * Equivalent to {@code this(item_weights, new Random(), threshold)}. + * + * @param item_weights a map from items to their weights + * @param threshold the minimum value that is treated as a probability (anything smaller will be + * considered equivalent to a floating-point rounding error) + */ + public WeightedChoice(Map item_weights, double threshold) { + this(item_weights, new Random(), threshold); + } + + /** + * Equivalent to {@code this(item_weights, random, DEFAULT_THRESHOLD)}. + * + * @param item_weights a map from items to their weights + * @param random the Random instance to use for selection + */ + public WeightedChoice(Map item_weights, Random random) { + this(item_weights, random, DEFAULT_THRESHOLD); + } + + /** + * Creates an instance with the specified mapping from items to weights, random number generator, + * and threshold value. + * + *

The mapping defines the weight for each item to be selected; this will be proportional to + * the probability of its selection. + * + *

The random number generator specifies the mechanism which will be used to provide uniform + * integer and double values. + * + *

The threshold indicates default minimum value that is treated as a valid probability (as + * opposed to rounding error from floating-point operations). + * + * @param item_weights a map from items to their weights + * @param random the Random instance to use for selection + * @param threshold the minimum value that is treated as a probability (anything smaller will be + * considered equivalent to a floating-point rounding error) + */ + public WeightedChoice(Map item_weights, Random random, double threshold) { + if (item_weights.isEmpty()) + throw new IllegalArgumentException("Item weights must be non-empty"); + + int item_count = item_weights.size(); + item_pairs = new ArrayList(item_count); + + double sum = 0; + for (Map.Entry entry : item_weights.entrySet()) { + double value = entry.getValue().doubleValue(); + if (value <= 0) throw new IllegalArgumentException("Weights must be > 0"); + sum += value; + } + double bucket_weight = 1.0 / item_weights.size(); + + Queue light_weights = new LinkedList(); + Queue heavy_weights = new LinkedList(); + for (Map.Entry entry : item_weights.entrySet()) { + double value = entry.getValue().doubleValue() / sum; + enqueueItem(entry.getKey(), value, bucket_weight, light_weights, heavy_weights); + } + + // repeat until both queues empty + while (!heavy_weights.isEmpty() || !light_weights.isEmpty()) { + ItemPair heavy_item = heavy_weights.poll(); + ItemPair light_item = light_weights.poll(); + double light_weight = 0; + T light = null; + T heavy = null; + if (light_item != null) { + light_weight = light_item.weight; + light = light_item.light; + } + if (heavy_item != null) { + heavy = heavy_item.heavy; + // put the 'left over' weight from the heavy item--what wasn't + // needed to make up the difference between the light weight and + // 1/n--back in the appropriate queue + double new_weight = heavy_item.weight - (bucket_weight - light_weight); + if (new_weight > threshold) + enqueueItem(heavy, new_weight, bucket_weight, light_weights, heavy_weights); + } + light_weight *= item_count; + + item_pairs.add(new ItemPair(light, heavy, light_weight)); + } + + this.random = random; + } + + /** + * Adds key/value to the appropriate queue. Keys with values less than the threshold get added to + * {@code light_weights}, all others get added to {@code heavy_weights}. + */ + private void enqueueItem( + T key, + double value, + double threshold, + Queue light_weights, + Queue heavy_weights) { + if (value < threshold) light_weights.offer(new ItemPair(key, null, value)); + else heavy_weights.offer(new ItemPair(null, key, value)); + } + + /** @param seed the seed to be used by the internal random number generator */ + public void setRandomSeed(long seed) { + this.random.setSeed(seed); + } + + /** + * Retrieves an item with probability proportional to its weight in the {@code Map} provided in + * the input. + * + * @return an item chosen randomly based on its specified weight + */ + public T nextItem() { + ItemPair item_pair = item_pairs.get(random.nextInt(item_pairs.size())); + if (random.nextDouble() < item_pair.weight) { + return item_pair.light; + } + return item_pair.heavy; + } + + /** Manages light object/heavy object/light conditional probability tuples. */ + private class ItemPair { + T light; + T heavy; + double weight; + + private ItemPair(T light, T heavy, double weight) { + this.light = light; + this.heavy = heavy; + this.weight = weight; + } + + @Override + public String toString() { + return String.format("[L:%s, H:%s, %.3f]", light, heavy, weight); + } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestBicomponentClusterer.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestBicomponentClusterer.java index dafc559d..6f2bb53d 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestBicomponentClusterer.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestBicomponentClusterer.java @@ -1,268 +1,250 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.cluster; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.UndirectedGraph; +import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; - import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedGraph; -import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; - -/** - * @author Scott White - */ +/** @author Scott White */ public class TestBicomponentClusterer extends TestCase { - public static Test suite() { - return new TestSuite(TestBicomponentClusterer.class); - } - - @Override - protected void setUp() { - - } - - public void testExtract0() throws Exception - { - UndirectedGraph graph = new UndirectedSparseMultigraph(); - String[] v = {"0"}; - graph.addVertex(v[0]); - - List> c = new ArrayList>(); - c.add(0, new HashSet()); - c.get(0).add(v[0]); - -// Set[] c = {new HashSet()}; - -// c[0].add(v[0]); - - testComponents(graph, v, c); + public static Test suite() { + return new TestSuite(TestBicomponentClusterer.class); + } + + @Override + protected void setUp() {} + + public void testExtract0() throws Exception { + UndirectedGraph graph = new UndirectedSparseMultigraph(); + String[] v = {"0"}; + graph.addVertex(v[0]); + + List> c = new ArrayList>(); + c.add(0, new HashSet()); + c.get(0).add(v[0]); + + // Set[] c = {new HashSet()}; + + // c[0].add(v[0]); + + testComponents(graph, v, c); + } + + public void testExtractEdge() throws Exception { + UndirectedGraph graph = new UndirectedSparseMultigraph(); + String[] v = {"0", "1"}; + graph.addVertex(v[0]); + graph.addVertex(v[1]); + graph.addEdge(0, v[0], v[1]); + + List> c = new ArrayList>(); + c.add(0, new HashSet()); + c.get(0).add(v[0]); + c.get(0).add(v[1]); + + // Set[] c = {new HashSet()}; + // + // c[0].add(v[0]); + // c[0].add(v[1]); + + testComponents(graph, v, c); + } + + public void testExtractV() throws Exception { + UndirectedGraph graph = new UndirectedSparseMultigraph(); + String[] v = new String[3]; + for (int i = 0; i < 3; i++) { + v[i] = "" + i; + graph.addVertex(v[i]); } - - public void testExtractEdge() throws Exception - { - UndirectedGraph graph = new UndirectedSparseMultigraph(); - String[] v = {"0","1"}; - graph.addVertex(v[0]); - graph.addVertex(v[1]); - graph.addEdge(0, v[0], v[1]); - - List> c = new ArrayList>(); - c.add(0, new HashSet()); - c.get(0).add(v[0]); - c.get(0).add(v[1]); - -// Set[] c = {new HashSet()}; -// -// c[0].add(v[0]); -// c[0].add(v[1]); - - testComponents(graph, v, c); + graph.addEdge(0, v[0], v[1]); + graph.addEdge(1, v[0], v[2]); + + List> c = new ArrayList>(); + c.add(0, new HashSet()); + c.add(1, new HashSet()); + + c.get(0).add(v[0]); + c.get(0).add(v[1]); + + c.get(1).add(v[0]); + c.get(1).add(v[2]); + + // Set[] c = {new HashSet(), new HashSet()}; + // + // c[0].add(v[0]); + // c[0].add(v[1]); + // + // c[1].add(v[0]); + // c[1].add(v[2]); + + testComponents(graph, v, c); + } + + public void createEdges(String[] v, int[][] edge_array, Graph g) { + for (int k = 0; k < edge_array.length; k++) { + int i = edge_array[k][0]; + int j = edge_array[k][1]; + String v1 = getVertex(v, i, g); + String v2 = getVertex(v, j, g); + + g.addEdge(k, v1, v2); } - - public void testExtractV() throws Exception - { - UndirectedGraph graph = new UndirectedSparseMultigraph(); - String[] v = new String[3]; - for (int i = 0; i < 3; i++) - { - v[i] = ""+i; - graph.addVertex(v[i]); - } - graph.addEdge(0, v[0], v[1]); - graph.addEdge(1, v[0], v[2]); - - List> c = new ArrayList>(); - c.add(0, new HashSet()); - c.add(1, new HashSet()); - - c.get(0).add(v[0]); - c.get(0).add(v[1]); - - c.get(1).add(v[0]); - c.get(1).add(v[2]); - -// Set[] c = {new HashSet(), new HashSet()}; -// -// c[0].add(v[0]); -// c[0].add(v[1]); -// -// c[1].add(v[0]); -// c[1].add(v[2]); - - testComponents(graph, v, c); + } + + public String getVertex(String[] v_array, int i, Graph g) { + String v = v_array[i]; + if (v == null) { + v_array[i] = Character.toString((char) ('0' + i)); + g.addVertex(v_array[i]); + v = v_array[i]; } - - public void createEdges(String[] v, int[][] edge_array, Graph g) - { - for (int k = 0; k < edge_array.length; k++) - { - int i = edge_array[k][0]; - int j = edge_array[k][1]; - String v1 = getVertex(v, i, g); - String v2 = getVertex(v, j, g); - - g.addEdge(k, v1, v2); + return v; + } + + public void testExtract1() { + String[] v = new String[6]; + int[][] edges1 = {{0, 1}, {0, 5}, {0, 3}, {0, 4}, {1, 5}, {3, 4}, {2, 3}}; + UndirectedGraph graph = new UndirectedSparseMultigraph(); + createEdges(v, edges1, graph); + + List> c = new ArrayList>(); + for (int i = 0; i < 3; i++) c.add(i, new HashSet()); + + c.get(0).add(v[0]); + c.get(0).add(v[1]); + c.get(0).add(v[5]); + + c.get(1).add(v[0]); + c.get(1).add(v[3]); + c.get(1).add(v[4]); + + c.get(2).add(v[2]); + c.get(2).add(v[3]); + + // Set[] c = new Set[3]; + // for (int i = 0; i < c.length; i++) + // c[i] = new HashSet(); + // + // c[0].add(v[0]); + // c[0].add(v[1]); + // c[0].add(v[5]); + // + // c[1].add(v[0]); + // c[1].add(v[3]); + // c[1].add(v[4]); + // + // c[2].add(v[2]); + // c[2].add(v[3]); + + testComponents(graph, v, c); + } + + public void testExtract2() { + String[] v = new String[9]; + int[][] edges1 = { + {0, 2}, {0, 4}, {1, 0}, {2, 1}, {3, 0}, {4, 3}, {5, 3}, {6, 7}, {6, 8}, {8, 7} + }; + UndirectedGraph graph = new UndirectedSparseMultigraph(); + createEdges(v, edges1, graph); + + List> c = new ArrayList>(); + for (int i = 0; i < 4; i++) c.add(i, new HashSet()); + + c.get(0).add(v[0]); + c.get(0).add(v[1]); + c.get(0).add(v[2]); + + c.get(1).add(v[0]); + c.get(1).add(v[3]); + c.get(1).add(v[4]); + + c.get(2).add(v[5]); + c.get(2).add(v[3]); + + c.get(3).add(v[6]); + c.get(3).add(v[7]); + c.get(3).add(v[8]); + + // Set[] c = new Set[4]; + // for (int i = 0; i < c.length; i++) + // c[i] = new HashSet(); + // + // c[0].add(v[0]); + // c[0].add(v[1]); + // c[0].add(v[2]); + // + // c[1].add(v[0]); + // c[1].add(v[3]); + // c[1].add(v[4]); + // + // c[2].add(v[5]); + // c[2].add(v[3]); + // + // c[3].add(v[6]); + // c[3].add(v[7]); + // c[3].add(v[8]); + + testComponents(graph, v, c); + } + + public void testComponents( + UndirectedGraph graph, String[] vertices, List> c) { + BicomponentClusterer finder = new BicomponentClusterer(); + Set> bicomponents = finder.apply(graph); + + // check number of components + assertEquals(bicomponents.size(), c.size()); + + // diagnostic; should be commented out for typical unit tests + // for (int i = 0; i < bicomponents.size(); i++) + // { + // System.out.print("Component " + i + ": "); + // Set bicomponent = bicomponents.getCluster(i); + // for (Iterator iter = bicomponent.iterator(); iter.hasNext(); ) + // { + // Vertex w = (Vertex)iter.next(); + // System.out.print(sl.getLabel(w) + " "); + // } + // System.out.println(); + // } + // System.out.println(); + + // make sure that each set in c[] is found in bicomponents + List> clusterList = new ArrayList>(bicomponents); + boolean found = false; + for (int i = 0; i < c.size(); i++) { + for (int j = 0; j < bicomponents.size(); j++) + if (clusterList.get(j).equals(c.get(i))) { + found = true; + break; } + assertTrue(found); } - - public String getVertex(String[] v_array, int i, Graph g) - { - String v = v_array[i]; - if (v == null) - { - v_array[i] = Character.toString((char)('0'+i)); - g.addVertex(v_array[i]); - v = v_array[i]; - } - return v; + + // make sure that each vertex is represented in >=1 element of bicomponents + Set collapsedSet = new HashSet(); + for (Set set : bicomponents) { + collapsedSet.addAll(set); } - - public void testExtract1() { - String[] v = new String[6]; - int[][] edges1 = {{0,1}, {0,5}, {0,3}, {0,4}, {1,5}, {3,4}, {2,3}}; - UndirectedGraph graph = new UndirectedSparseMultigraph(); - createEdges(v, edges1, graph); - - List> c = new ArrayList>(); - for (int i = 0; i < 3; i++) - c.add(i, new HashSet()); - - c.get(0).add(v[0]); - c.get(0).add(v[1]); - c.get(0).add(v[5]); - - c.get(1).add(v[0]); - c.get(1).add(v[3]); - c.get(1).add(v[4]); - - c.get(2).add(v[2]); - c.get(2).add(v[3]); - -// Set[] c = new Set[3]; -// for (int i = 0; i < c.length; i++) -// c[i] = new HashSet(); -// -// c[0].add(v[0]); -// c[0].add(v[1]); -// c[0].add(v[5]); -// -// c[1].add(v[0]); -// c[1].add(v[3]); -// c[1].add(v[4]); -// -// c[2].add(v[2]); -// c[2].add(v[3]); - - testComponents(graph, v, c); - } - - public void testExtract2() { - String[] v = new String[9]; - int[][] edges1 = {{0,2}, {0,4}, {1,0}, {2,1}, {3,0}, {4,3}, {5,3}, {6,7}, {6,8}, {8,7}}; - UndirectedGraph graph = new UndirectedSparseMultigraph(); - createEdges(v, edges1, graph); - - List> c = new ArrayList>(); - for (int i = 0; i < 4; i++) - c.add(i, new HashSet()); - - c.get(0).add(v[0]); - c.get(0).add(v[1]); - c.get(0).add(v[2]); - - c.get(1).add(v[0]); - c.get(1).add(v[3]); - c.get(1).add(v[4]); - - c.get(2).add(v[5]); - c.get(2).add(v[3]); - - c.get(3).add(v[6]); - c.get(3).add(v[7]); - c.get(3).add(v[8]); - -// Set[] c = new Set[4]; -// for (int i = 0; i < c.length; i++) -// c[i] = new HashSet(); -// -// c[0].add(v[0]); -// c[0].add(v[1]); -// c[0].add(v[2]); -// -// c[1].add(v[0]); -// c[1].add(v[3]); -// c[1].add(v[4]); -// -// c[2].add(v[5]); -// c[2].add(v[3]); -// -// c[3].add(v[6]); -// c[3].add(v[7]); -// c[3].add(v[8]); - - testComponents(graph, v, c); - } - - public void testComponents(UndirectedGraph graph, String[] vertices, List> c) - { - BicomponentClusterer finder = new BicomponentClusterer(); - Set> bicomponents = finder.apply(graph); - - // check number of components - assertEquals(bicomponents.size(), c.size()); - - // diagnostic; should be commented out for typical unit tests -// for (int i = 0; i < bicomponents.size(); i++) -// { -// System.out.print("Component " + i + ": "); -// Set bicomponent = bicomponents.getCluster(i); -// for (Iterator iter = bicomponent.iterator(); iter.hasNext(); ) -// { -// Vertex w = (Vertex)iter.next(); -// System.out.print(sl.getLabel(w) + " "); -// } -// System.out.println(); -// } -// System.out.println(); - - // make sure that each set in c[] is found in bicomponents - List> clusterList = new ArrayList>(bicomponents); - boolean found = false; - for (int i = 0; i < c.size(); i++) - { - for (int j = 0; j < bicomponents.size(); j++) - if (clusterList.get(j).equals(c.get(i))) - { - found = true; - break; - } - assertTrue(found); - } - - // make sure that each vertex is represented in >=1 element of bicomponents - Set collapsedSet = new HashSet(); - for(Set set : bicomponents) { - collapsedSet.addAll(set); - } - for (String v : graph.getVertices()) - { - assertTrue(collapsedSet.contains(v)); -// assertFalse(((LinkedHashSet)vset).get(v).isEmpty()); - } + for (String v : graph.getVertices()) { + assertTrue(collapsedSet.contains(v)); + // assertFalse(((LinkedHashSet)vset).get(v).isEmpty()); } - + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestEdgeBetweennessClusterer.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestEdgeBetweennessClusterer.java index bf740379..e5e8cbaa 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestEdgeBetweennessClusterer.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/TestEdgeBetweennessClusterer.java @@ -1,83 +1,87 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.cluster; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.SparseMultigraph; import java.util.Collection; import java.util.Set; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Supplier; +/** @author Scott White */ +public class TestEdgeBetweennessClusterer extends TestCase { + public static Test suite() { + return new TestSuite(TestEdgeBetweennessClusterer.class); + } -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.SparseMultigraph; + Supplier> graphFactory; + Supplier vertexFactory; + Supplier edgeFactory; + @Override + protected void setUp() { + graphFactory = + new Supplier>() { + public Graph get() { + return new SparseMultigraph(); + } + }; + vertexFactory = + new Supplier() { + int n = 0; -/** - * @author Scott White - */ -public class TestEdgeBetweennessClusterer extends TestCase { - public static Test suite() { - return new TestSuite(TestEdgeBetweennessClusterer.class); - } - Supplier> graphFactory; - Supplier vertexFactory; - Supplier edgeFactory; + public Integer get() { + return n++; + } + }; + edgeFactory = + new Supplier() { + int n = 0; + + public Number get() { + return n++; + } + }; + } - @Override - protected void setUp() { - graphFactory = new Supplier>() { - public Graph get() { - return new SparseMultigraph(); - } - }; - vertexFactory = new Supplier() { - int n = 0; - public Integer get() { return n++; } - }; - edgeFactory = new Supplier() { - int n = 0; - public Number get() { return n++; } - }; + public void testRanker() { + Graph graph = new SparseMultigraph(); + for (int i = 0; i < 10; i++) { + graph.addVertex(i + 1); } + int j = 0; + graph.addEdge(j++, 1, 2); + graph.addEdge(j++, 1, 3); + graph.addEdge(j++, 2, 3); + graph.addEdge(j++, 5, 6); + graph.addEdge(j++, 5, 7); + graph.addEdge(j++, 6, 7); + graph.addEdge(j++, 8, 10); + graph.addEdge(j++, 7, 8); + graph.addEdge(j++, 7, 10); + graph.addEdge(j++, 3, 4); + graph.addEdge(j++, 4, 6); + graph.addEdge(j++, 4, 8); - public void testRanker() { - - Graph graph = new SparseMultigraph(); - for(int i=0; i<10; i++) { - graph.addVertex(i+1); - } - int j=0; - graph.addEdge(j++,1,2); - graph.addEdge(j++,1,3); - graph.addEdge(j++,2,3); - graph.addEdge(j++,5,6); - graph.addEdge(j++,5,7); - graph.addEdge(j++,6,7); - graph.addEdge(j++,8,10); - graph.addEdge(j++,7,8); - graph.addEdge(j++,7,10); - graph.addEdge(j++,3,4); - graph.addEdge(j++,4,6); - graph.addEdge(j++,4,8); + Assert.assertEquals(graph.getVertexCount(), 10); + Assert.assertEquals(graph.getEdgeCount(), 12); - Assert.assertEquals(graph.getVertexCount(),10); - Assert.assertEquals(graph.getEdgeCount(),12); + EdgeBetweennessClusterer clusterer = + new EdgeBetweennessClusterer(3); + Collection> clusters = clusterer.apply(graph); - EdgeBetweennessClusterer clusterer = new EdgeBetweennessClusterer(3); - Collection> clusters = clusterer.apply(graph); - - Assert.assertEquals(clusters.size(),3); - } + Assert.assertEquals(clusters.size(), 3); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClustererTest.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClustererTest.java index 52ef2c01..53a4188a 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClustererTest.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/cluster/WeakComponentClustererTest.java @@ -1,19 +1,17 @@ package edu.uci.ics.jung.algorithms.cluster; -import junit.framework.TestCase; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.TestGraphs; +import junit.framework.TestCase; public class WeakComponentClustererTest extends TestCase { - - Graph graph = TestGraphs.getDemoGraph(); - - public void testWeakComponent() { - WeakComponentClusterer clusterer = - new WeakComponentClusterer(); -// Set> clusterSet = - clusterer.apply(graph); -// System.err.println("set is "+clusterSet); - } + Graph graph = TestGraphs.getDemoGraph(); + + public void testWeakComponent() { + WeakComponentClusterer clusterer = new WeakComponentClusterer(); + // Set> clusterSet = + clusterer.apply(graph); + // System.err.println("set is "+clusterSet); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/filters/impl/TestKNeighborhoodFilter.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/filters/impl/TestKNeighborhoodFilter.java index 445060a1..8da1b3ce 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/filters/impl/TestKNeighborhoodFilter.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/filters/impl/TestKNeighborhoodFilter.java @@ -1,62 +1,60 @@ package edu.uci.ics.jung.algorithms.filters.impl; -/** - * @author Tom Nelson - */ - -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; +/** @author Tom Nelson */ import edu.uci.ics.jung.algorithms.filters.Filter; import edu.uci.ics.jung.algorithms.filters.KNeighborhoodFilter; import edu.uci.ics.jung.algorithms.filters.KNeighborhoodFilter.EdgeType; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.Graph; - +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; public class TestKNeighborhoodFilter extends TestCase { - - DirectedGraph graph; - - public static Test suite() { - return new TestSuite(TestKNeighborhoodFilter.class); - } - - @Override - protected void setUp() { - graph = new DirectedSparseMultigraph(); - for(int i=0; i<7; i++) { - graph.addVertex(i); - } - int j=0; - graph.addEdge(j++, 0, 1); - graph.addEdge(j++, 0, 2); - graph.addEdge(j++, 2, 3); - graph.addEdge(j++, 2, 4); - graph.addEdge(j++, 3, 5); - graph.addEdge(j++, 5, 6); - graph.addEdge(j++, 5, 0); - graph.addEdge(j++, 3, 0); - graph.addEdge(j++, 6, 7); - } - - public void testIn() { - Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.IN); - Graph result = filter.apply(graph); - assertEquals(result.getVertexCount(), 4); - assertEquals(result.getEdgeCount(), 5); - } - public void testOut() { - Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.OUT); - Graph result = filter.apply(graph); - assertEquals(result.getVertexCount(), 5); - assertEquals(result.getEdgeCount(), 5); - } - public void testInOut() { - Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.IN_OUT); - Graph result = filter.apply(graph); - assertEquals(result.getVertexCount(), 7); - assertEquals(result.getEdgeCount(), 8); - } + + DirectedGraph graph; + + public static Test suite() { + return new TestSuite(TestKNeighborhoodFilter.class); + } + + @Override + protected void setUp() { + graph = new DirectedSparseMultigraph(); + for (int i = 0; i < 7; i++) { + graph.addVertex(i); + } + int j = 0; + graph.addEdge(j++, 0, 1); + graph.addEdge(j++, 0, 2); + graph.addEdge(j++, 2, 3); + graph.addEdge(j++, 2, 4); + graph.addEdge(j++, 3, 5); + graph.addEdge(j++, 5, 6); + graph.addEdge(j++, 5, 0); + graph.addEdge(j++, 3, 0); + graph.addEdge(j++, 6, 7); + } + + public void testIn() { + Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.IN); + Graph result = filter.apply(graph); + assertEquals(result.getVertexCount(), 4); + assertEquals(result.getEdgeCount(), 5); + } + + public void testOut() { + Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.OUT); + Graph result = filter.apply(graph); + assertEquals(result.getVertexCount(), 5); + assertEquals(result.getEdgeCount(), 5); + } + + public void testInOut() { + Filter filter = new KNeighborhoodFilter(0, 2, EdgeType.IN_OUT); + Graph result = filter.apply(graph); + assertEquals(result.getVertexCount(), 7); + assertEquals(result.getEdgeCount(), 8); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/flows/TestEdmondsKarpMaxFlow.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/flows/TestEdmondsKarpMaxFlow.java index 6993f1e0..c91525d7 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/flows/TestEdmondsKarpMaxFlow.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/flows/TestEdmondsKarpMaxFlow.java @@ -1,211 +1,204 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.flows; +import com.google.common.base.Functions; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; +import edu.uci.ics.jung.graph.util.EdgeType; import java.util.HashMap; import java.util.Map; import java.util.Set; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Functions; -import com.google.common.base.Supplier; +/** @author Scott White, Joshua O'Madadhain, Tom Nelson */ +public class TestEdmondsKarpMaxFlow extends TestCase { -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.util.EdgeType; + public static Test suite() { + return new TestSuite(TestEdmondsKarpMaxFlow.class); + } -/** - * @author Scott White, Joshua O'Madadhain, Tom Nelson - */ -public class TestEdmondsKarpMaxFlow extends TestCase { + @Override + protected void setUp() {} + + public void testSanityChecks() { + DirectedGraph g = new DirectedSparseMultigraph(); + Number source = new Integer(1); + Number sink = new Integer(2); + g.addVertex(source); + g.addVertex(sink); + + Number v = new Integer(3); + + DirectedGraph h = new DirectedSparseMultigraph(); + Number w = new Integer(4); + g.addVertex(w); + + try { + new EdmondsKarpMaxFlow(g, source, source, null, null, null); + fail("source and sink vertices not distinct"); + } catch (IllegalArgumentException iae) { + } + + try { + new EdmondsKarpMaxFlow(h, source, w, null, null, null); + fail("source and sink vertices not both part of specified graph"); + } catch (IllegalArgumentException iae) { + } - public static Test suite() { - return new TestSuite(TestEdmondsKarpMaxFlow.class); - } - - @Override - protected void setUp() { - - } - - public void testSanityChecks() - { - DirectedGraph g = new DirectedSparseMultigraph(); - Number source = new Integer(1); - Number sink = new Integer(2); - g.addVertex(source); - g.addVertex(sink); - - Number v = new Integer(3); - - DirectedGraph h = new DirectedSparseMultigraph(); - Number w = new Integer(4); - g.addVertex(w); - - try - { - new EdmondsKarpMaxFlow(g, source, source, null, null, null); - fail("source and sink vertices not distinct"); - } - catch (IllegalArgumentException iae) {} - - try - { - new EdmondsKarpMaxFlow(h, source, w, null, null, null); - fail("source and sink vertices not both part of specified graph"); - } - catch (IllegalArgumentException iae) {} - - try - { - new EdmondsKarpMaxFlow(g, source, v, null, null, null); - fail("source and sink vertices not both part of specified graph"); - } - catch (IllegalArgumentException iae) {} + try { + new EdmondsKarpMaxFlow(g, source, v, null, null, null); + fail("source and sink vertices not both part of specified graph"); + } catch (IllegalArgumentException iae) { + } + } + + public void testSimpleFlow() { + DirectedGraph graph = new DirectedSparseMultigraph(); + Supplier edgeFactory = + new Supplier() { + int count = 0; + + public Number get() { + return count++; + } + }; + + Map edgeCapacityMap = new HashMap(); + for (int i = 0; i < 6; i++) { + graph.addVertex(i); } - - public void testSimpleFlow() { - DirectedGraph graph = new DirectedSparseMultigraph(); - Supplier edgeFactory = new Supplier() { - int count = 0; - public Number get() { - return count++; - } - }; - - Map edgeCapacityMap = new HashMap(); - for(int i=0; i<6; i++) { - graph.addVertex(i); - } - - Map edgeFlowMap = new HashMap(); - - graph.addEdge(edgeFactory.get(),0,1,EdgeType.DIRECTED); - edgeCapacityMap.put(0, 16); - - graph.addEdge(edgeFactory.get(),0,2,EdgeType.DIRECTED); - edgeCapacityMap.put(1,13); - - graph.addEdge(edgeFactory.get(),1,2,EdgeType.DIRECTED); - edgeCapacityMap.put(2, 6); - - graph.addEdge(edgeFactory.get(),1,3,EdgeType.DIRECTED); - edgeCapacityMap.put(3, 12); - - graph.addEdge(edgeFactory.get(),2,4,EdgeType.DIRECTED); - edgeCapacityMap.put(4, 14); - - graph.addEdge(edgeFactory.get(),3,2,EdgeType.DIRECTED); - edgeCapacityMap.put(5, 9); - - graph.addEdge(edgeFactory.get(),3,5,EdgeType.DIRECTED); - edgeCapacityMap.put(6, 20); - - graph.addEdge(edgeFactory.get(),4,3,EdgeType.DIRECTED); - edgeCapacityMap.put(7, 7); - - graph.addEdge(edgeFactory.get(),4,5,EdgeType.DIRECTED); - edgeCapacityMap.put(8, 4); - - EdmondsKarpMaxFlow ek = - new EdmondsKarpMaxFlow( - graph, - 0, - 5, - Functions.forMap(edgeCapacityMap, null), - edgeFlowMap, - edgeFactory); - ek.evaluate(); - - assertTrue(ek.getMaxFlow() == 23); - Set nodesInS = ek.getNodesInSourcePartition(); - assertEquals(4,nodesInS.size()); - - for (Number v : nodesInS) { - Assert.assertTrue(v.intValue() != 3 && v.intValue() != 5); - } - - Set nodesInT = ek.getNodesInSinkPartition(); - assertEquals(2,nodesInT.size()); - - for (Number v : nodesInT) { - Assert.assertTrue(v.intValue() == 3 || v.intValue() == 5); - } - - Set minCutEdges = ek.getMinCutEdges(); - int maxFlow = 0; - for (Number e : minCutEdges) { - Number flow = edgeFlowMap.get(e); - maxFlow += flow.intValue(); - } - Assert.assertEquals(23,maxFlow); - Assert.assertEquals(3,minCutEdges.size()); - } - - public void testAnotherSimpleFlow() { - DirectedGraph graph = new DirectedSparseMultigraph(); - Supplier edgeFactory = new Supplier() { - int count=0; - public Number get() { - return count++; - } - }; - - Map edgeCapacityMap = new HashMap(); - for(int i=0; i<6; i++) { - graph.addVertex(i); - } - - Map edgeFlowMap = new HashMap(); - - graph.addEdge(edgeFactory.get(),0,1,EdgeType.DIRECTED); - edgeCapacityMap.put(0,5); - - graph.addEdge(edgeFactory.get(),0,2,EdgeType.DIRECTED); - edgeCapacityMap.put(1,3); - - graph.addEdge(edgeFactory.get(),1,5,EdgeType.DIRECTED); - edgeCapacityMap.put(2,2); - - graph.addEdge(edgeFactory.get(),1,2,EdgeType.DIRECTED); - edgeCapacityMap.put(3,8); - - graph.addEdge(edgeFactory.get(),2,3,EdgeType.DIRECTED); - edgeCapacityMap.put(4,4); - - graph.addEdge(edgeFactory.get(),2,4,EdgeType.DIRECTED); - edgeCapacityMap.put(5,2); - - graph.addEdge(edgeFactory.get(),3,4,EdgeType.DIRECTED); - edgeCapacityMap.put(6,3); - - graph.addEdge(edgeFactory.get(),3,5,EdgeType.DIRECTED); - edgeCapacityMap.put(7,6); - - graph.addEdge(edgeFactory.get(),4,5,EdgeType.DIRECTED); - edgeCapacityMap.put(8,1); - - EdmondsKarpMaxFlow ek = - new EdmondsKarpMaxFlow( - graph, - 0, - 5, - Functions.forMap(edgeCapacityMap, null), - edgeFlowMap, - edgeFactory); - ek.evaluate(); - - assertTrue(ek.getMaxFlow() == 7); - } + + Map edgeFlowMap = new HashMap(); + + graph.addEdge(edgeFactory.get(), 0, 1, EdgeType.DIRECTED); + edgeCapacityMap.put(0, 16); + + graph.addEdge(edgeFactory.get(), 0, 2, EdgeType.DIRECTED); + edgeCapacityMap.put(1, 13); + + graph.addEdge(edgeFactory.get(), 1, 2, EdgeType.DIRECTED); + edgeCapacityMap.put(2, 6); + + graph.addEdge(edgeFactory.get(), 1, 3, EdgeType.DIRECTED); + edgeCapacityMap.put(3, 12); + + graph.addEdge(edgeFactory.get(), 2, 4, EdgeType.DIRECTED); + edgeCapacityMap.put(4, 14); + + graph.addEdge(edgeFactory.get(), 3, 2, EdgeType.DIRECTED); + edgeCapacityMap.put(5, 9); + + graph.addEdge(edgeFactory.get(), 3, 5, EdgeType.DIRECTED); + edgeCapacityMap.put(6, 20); + + graph.addEdge(edgeFactory.get(), 4, 3, EdgeType.DIRECTED); + edgeCapacityMap.put(7, 7); + + graph.addEdge(edgeFactory.get(), 4, 5, EdgeType.DIRECTED); + edgeCapacityMap.put(8, 4); + + EdmondsKarpMaxFlow ek = + new EdmondsKarpMaxFlow( + graph, + 0, + 5, + Functions.forMap(edgeCapacityMap, null), + edgeFlowMap, + edgeFactory); + ek.evaluate(); + + assertTrue(ek.getMaxFlow() == 23); + Set nodesInS = ek.getNodesInSourcePartition(); + assertEquals(4, nodesInS.size()); + + for (Number v : nodesInS) { + Assert.assertTrue(v.intValue() != 3 && v.intValue() != 5); + } + + Set nodesInT = ek.getNodesInSinkPartition(); + assertEquals(2, nodesInT.size()); + + for (Number v : nodesInT) { + Assert.assertTrue(v.intValue() == 3 || v.intValue() == 5); + } + + Set minCutEdges = ek.getMinCutEdges(); + int maxFlow = 0; + for (Number e : minCutEdges) { + Number flow = edgeFlowMap.get(e); + maxFlow += flow.intValue(); + } + Assert.assertEquals(23, maxFlow); + Assert.assertEquals(3, minCutEdges.size()); + } + + public void testAnotherSimpleFlow() { + DirectedGraph graph = new DirectedSparseMultigraph(); + Supplier edgeFactory = + new Supplier() { + int count = 0; + + public Number get() { + return count++; + } + }; + + Map edgeCapacityMap = new HashMap(); + for (int i = 0; i < 6; i++) { + graph.addVertex(i); + } + + Map edgeFlowMap = new HashMap(); + + graph.addEdge(edgeFactory.get(), 0, 1, EdgeType.DIRECTED); + edgeCapacityMap.put(0, 5); + + graph.addEdge(edgeFactory.get(), 0, 2, EdgeType.DIRECTED); + edgeCapacityMap.put(1, 3); + + graph.addEdge(edgeFactory.get(), 1, 5, EdgeType.DIRECTED); + edgeCapacityMap.put(2, 2); + + graph.addEdge(edgeFactory.get(), 1, 2, EdgeType.DIRECTED); + edgeCapacityMap.put(3, 8); + + graph.addEdge(edgeFactory.get(), 2, 3, EdgeType.DIRECTED); + edgeCapacityMap.put(4, 4); + + graph.addEdge(edgeFactory.get(), 2, 4, EdgeType.DIRECTED); + edgeCapacityMap.put(5, 2); + + graph.addEdge(edgeFactory.get(), 3, 4, EdgeType.DIRECTED); + edgeCapacityMap.put(6, 3); + + graph.addEdge(edgeFactory.get(), 3, 5, EdgeType.DIRECTED); + edgeCapacityMap.put(7, 6); + + graph.addEdge(edgeFactory.get(), 4, 5, EdgeType.DIRECTED); + edgeCapacityMap.put(8, 1); + + EdmondsKarpMaxFlow ek = + new EdmondsKarpMaxFlow( + graph, + 0, + 5, + Functions.forMap(edgeCapacityMap, null), + edgeFlowMap, + edgeFactory); + ek.evaluate(); + + assertTrue(ek.getMaxFlow() == 7); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/TestLattice2D.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/TestLattice2D.java index fec9b9e1..9f2041d8 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/TestLattice2D.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/TestLattice2D.java @@ -1,87 +1,86 @@ package edu.uci.ics.jung.algorithms.generators; - -import junit.framework.Assert; -import junit.framework.TestCase; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; - +import junit.framework.Assert; +import junit.framework.TestCase; public class TestLattice2D extends TestCase { - - protected Supplier> undirectedGraphFactory; - protected Supplier> directedGraphFactory; - protected Supplier vertexFactory; - protected Supplier edgeFactory; - @Override - protected void setUp() { - undirectedGraphFactory = new Supplier>() { - public UndirectedGraph get() { - return new UndirectedSparseMultigraph(); - } - }; - directedGraphFactory = new Supplier>() { - public DirectedGraph get() { - return new DirectedSparseMultigraph(); - } + protected Supplier> undirectedGraphFactory; + protected Supplier> directedGraphFactory; + protected Supplier vertexFactory; + protected Supplier edgeFactory; + + @Override + protected void setUp() { + undirectedGraphFactory = + new Supplier>() { + public UndirectedGraph get() { + return new UndirectedSparseMultigraph(); + } }; + directedGraphFactory = + new Supplier>() { + public DirectedGraph get() { + return new DirectedSparseMultigraph(); + } + }; + + vertexFactory = + new Supplier() { + int count; + + public String get() { + return Character.toString((char) ('A' + count++)); + } + }; + edgeFactory = + new Supplier() { + int count; + + public Number get() { + return count++; + } + }; + } + + public void testCreateSingular() { + try { + generate(1, 0, 0); + fail("Did not reject lattice of size < 2"); + } catch (IllegalArgumentException iae) { + } + } + + public void testget() { + for (int i = 3; i <= 10; i++) { + for (int j = 0; j < 2; j++) { + for (int k = 0; k < 2; k++) { + Lattice2DGenerator generator = generate(i, j, k); + Graph graph = generator.get(); + Assert.assertEquals(i * i, graph.getVertexCount()); + checkEdgeCount(generator, graph); + } + } + } + } - vertexFactory = new Supplier() { - int count; - public String get() { - return Character.toString((char)('A'+count++)); - } - }; - edgeFactory = - new Supplier() { - int count; - public Number get() { - return count++; - } - }; - } + protected Lattice2DGenerator generate(int i, int j, int k) { + return new Lattice2DGenerator( + k == 0 ? undirectedGraphFactory : directedGraphFactory, + vertexFactory, + edgeFactory, + i, + j == 0 ? true : false); // toroidal? + } - public void testCreateSingular() - { - try - { - generate(1, 0, 0); - fail("Did not reject lattice of size < 2"); - } - catch (IllegalArgumentException iae) {} - } - - public void testget() { - for (int i = 3; i <= 10; i++) { - for (int j = 0; j < 2; j++) { - for (int k = 0; k < 2; k++) { - Lattice2DGenerator generator = generate(i, j, k); - Graph graph = generator.get(); - Assert.assertEquals(i*i, graph.getVertexCount()); - checkEdgeCount(generator, graph); - } - } - } - } - - protected Lattice2DGenerator generate(int i, int j, int k) - { - return new Lattice2DGenerator( - k == 0 ? undirectedGraphFactory : directedGraphFactory, - vertexFactory, edgeFactory, - i, j == 0 ? true : false); // toroidal? - } - - protected void checkEdgeCount(Lattice2DGenerator generator, - Graph graph) - { - Assert.assertEquals(generator.getGridEdgeCount(), graph.getEdgeCount()); - } + protected void checkEdgeCount( + Lattice2DGenerator generator, Graph graph) { + Assert.assertEquals(generator.getGridEdgeCount(), graph.getEdgeCount()); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestBarabasiAlbert.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestBarabasiAlbert.java index ba50d3c2..652dadce 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestBarabasiAlbert.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestBarabasiAlbert.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2016, the JUNG Project and the Regents of the University + * Copyright (c) 2016, the JUNG Project and the Regents of the University * of California. All rights reserved. * * This software is open-source under the BSD license; see @@ -7,10 +7,7 @@ */ package edu.uci.ics.jung.algorithms.generators.random; -import java.util.HashSet; - import com.google.common.base.Supplier; - import edu.uci.ics.jung.graph.DirectedSparseGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.Graph; @@ -20,6 +17,7 @@ import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; import edu.uci.ics.jung.graph.util.EdgeType; import edu.uci.ics.jung.graph.util.Pair; +import java.util.HashSet; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; @@ -30,225 +28,310 @@ * @author James Marchant */ public class TestBarabasiAlbert extends TestCase { - protected Supplier> graphFactory; - protected Supplier vertexFactory; - protected Supplier edgeFactory; - - protected int init_vertices = 1; - protected int edges_to_add_per_timestep = 1; - protected int random_seed = 0; - protected int num_timesteps = 10; - protected int num_tests = 10; - - public static Test suite() { - return new TestSuite(TestBarabasiAlbert.class); - } - - @Override - protected void setUp() { - graphFactory = new Supplier>() { - public Graph get() { - return new SparseMultigraph(); - } - }; - vertexFactory = new Supplier() { - int count; - - public Integer get() { - return count++; - } - }; - edgeFactory = new Supplier() { - int count; - - public Number get() { - return count++; - } - }; - } - - private Graph generateAndTestSizeOfBarabasiAlbertGraph( - Supplier> graphFactory, Supplier vertexFactory, - Supplier edgeFactory, int init_vertices, int edges_to_add_per_timestep, int random_seed, - int num_tests) { - BarabasiAlbertGenerator generator = new BarabasiAlbertGenerator(graphFactory, - vertexFactory, edgeFactory, init_vertices, edges_to_add_per_timestep, random_seed, - new HashSet()); - - Graph graph = null; - // test the graph size over {@code num_tests} intervals of {@code - // num_timesteps} timesteps - for (int i = 1; i <= num_tests; i++) { - generator.evolveGraph(num_timesteps); - graph = generator.get(); - assertEquals(graph.getVertexCount(), (i * num_timesteps) + init_vertices); - assertEquals(graph.getEdgeCount(), edges_to_add_per_timestep * (i * num_timesteps)); - } - - return graph; - } - - public void testMultigraphCreation() { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testDirectedMultigraphCreation() { - graphFactory = new Supplier>() { - public Graph get() { - return new DirectedSparseMultigraph(); - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testUndirectedMultigraphCreation() { - graphFactory = new Supplier>() { - public Graph get() { - return new UndirectedSparseMultigraph(); - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testGraphCreation() { - graphFactory = new Supplier>() { - public Graph get() { - return new SparseGraph(); - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testDirectedGraphCreation() { - graphFactory = new Supplier>() { - public Graph get() { - return new DirectedSparseGraph(); - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testUndirectedGraphCreation() { - graphFactory = new Supplier>() { - public Graph get() { - return new UndirectedSparseGraph(); - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - /** - * Due to the way the Barabasi-Albert algorithm works there should be no - * opportunities for the generation of self-loops within the graph. - */ - public void testNoSelfLoops() { - graphFactory = new Supplier>() { - public Graph get() { - return new UndirectedSparseGraph() { - private static final long serialVersionUID = 1L; - - /** - * This anonymous class works as an UndirectedSparseGraph - * but will not accept edges that connect a vertex to - * itself. - */ - @Override - public boolean addEdge(Number edge, Pair endpoints, EdgeType edgeType) { - if (endpoints == null) - throw new IllegalArgumentException("endpoints may not be null"); - - Integer v1 = endpoints.getFirst(); - Integer v2 = endpoints.getSecond(); - - if (v1.equals(v2)) - throw new IllegalArgumentException("No self-loops"); - else - return super.addEdge(edge, endpoints, edgeType); - } - }; - } - }; - - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, - edges_to_add_per_timestep, random_seed, num_tests); - } - - public void testPreconditions() { - // test init_vertices = 0 - try { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, 0, - edges_to_add_per_timestep, random_seed, num_tests); - fail(); - } catch (IllegalArgumentException e) { - } - - // test negative init_vertices - try { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, -1, - edges_to_add_per_timestep, random_seed, num_tests); - fail(); - } catch (IllegalArgumentException e) { - } - - // test edges_to_add_per_timestep = 0 - try { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, 0, - random_seed, num_tests); - fail(); - } catch (IllegalArgumentException e) { - } - - // test negative edges_to_add_per_timestep - try { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, init_vertices, -1, - random_seed, num_tests); - fail(); - } catch (IllegalArgumentException e) { - } - - // test edges_to_add_per_timestep > init_vertices - try { - generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, edgeFactory, 2, 3, random_seed, - num_tests); - fail(); - } catch (IllegalArgumentException e) { - } - } - - /** - * Every node should have an out-degree AT LEAST equal to the number of - * edges added per timestep (dependent on if it is directed or undirected). - */ - public void testEveryNodeHasCorrectMinimumNumberOfEdges() { - Graph graph = generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, - edgeFactory, init_vertices, edges_to_add_per_timestep, random_seed, num_tests); - - for (Integer v : graph.getVertices()) { - assertTrue(graph.outDegree(v) >= edges_to_add_per_timestep); - } - } - - /** - * Check that not every edge goes to one node; the in-degree of any node - * should be strictly less than the number of edges. - */ - public void testNotEveryEdgeToOneNode() { - Graph graph = generateAndTestSizeOfBarabasiAlbertGraph(graphFactory, vertexFactory, - edgeFactory, init_vertices, edges_to_add_per_timestep, random_seed, num_tests); - - for (Integer v : graph.getVertices()) { - assertTrue(graph.inDegree(v) < graph.getEdgeCount()); - } - } + protected Supplier> graphFactory; + protected Supplier vertexFactory; + protected Supplier edgeFactory; + + protected int init_vertices = 1; + protected int edges_to_add_per_timestep = 1; + protected int random_seed = 0; + protected int num_timesteps = 10; + protected int num_tests = 10; + + public static Test suite() { + return new TestSuite(TestBarabasiAlbert.class); + } + + @Override + protected void setUp() { + graphFactory = + new Supplier>() { + public Graph get() { + return new SparseMultigraph(); + } + }; + vertexFactory = + new Supplier() { + int count; + + public Integer get() { + return count++; + } + }; + edgeFactory = + new Supplier() { + int count; + + public Number get() { + return count++; + } + }; + } + + private Graph generateAndTestSizeOfBarabasiAlbertGraph( + Supplier> graphFactory, + Supplier vertexFactory, + Supplier edgeFactory, + int init_vertices, + int edges_to_add_per_timestep, + int random_seed, + int num_tests) { + BarabasiAlbertGenerator generator = + new BarabasiAlbertGenerator( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + new HashSet()); + + Graph graph = null; + // test the graph size over {@code num_tests} intervals of {@code + // num_timesteps} timesteps + for (int i = 1; i <= num_tests; i++) { + generator.evolveGraph(num_timesteps); + graph = generator.get(); + assertEquals(graph.getVertexCount(), (i * num_timesteps) + init_vertices); + assertEquals(graph.getEdgeCount(), edges_to_add_per_timestep * (i * num_timesteps)); + } + + return graph; + } + + public void testMultigraphCreation() { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testDirectedMultigraphCreation() { + graphFactory = + new Supplier>() { + public Graph get() { + return new DirectedSparseMultigraph(); + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testUndirectedMultigraphCreation() { + graphFactory = + new Supplier>() { + public Graph get() { + return new UndirectedSparseMultigraph(); + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testGraphCreation() { + graphFactory = + new Supplier>() { + public Graph get() { + return new SparseGraph(); + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testDirectedGraphCreation() { + graphFactory = + new Supplier>() { + public Graph get() { + return new DirectedSparseGraph(); + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testUndirectedGraphCreation() { + graphFactory = + new Supplier>() { + public Graph get() { + return new UndirectedSparseGraph(); + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + /** + * Due to the way the Barabasi-Albert algorithm works there should be no opportunities for the + * generation of self-loops within the graph. + */ + public void testNoSelfLoops() { + graphFactory = + new Supplier>() { + public Graph get() { + return new UndirectedSparseGraph() { + private static final long serialVersionUID = 1L; + + /** + * This anonymous class works as an UndirectedSparseGraph but will not accept edges + * that connect a vertex to itself. + */ + @Override + public boolean addEdge( + Number edge, Pair endpoints, EdgeType edgeType) { + if (endpoints == null) + throw new IllegalArgumentException("endpoints may not be null"); + + Integer v1 = endpoints.getFirst(); + Integer v2 = endpoints.getSecond(); + + if (v1.equals(v2)) throw new IllegalArgumentException("No self-loops"); + else return super.addEdge(edge, endpoints, edgeType); + } + }; + } + }; + + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + } + + public void testPreconditions() { + // test init_vertices = 0 + try { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + 0, + edges_to_add_per_timestep, + random_seed, + num_tests); + fail(); + } catch (IllegalArgumentException e) { + } + + // test negative init_vertices + try { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + -1, + edges_to_add_per_timestep, + random_seed, + num_tests); + fail(); + } catch (IllegalArgumentException e) { + } + + // test edges_to_add_per_timestep = 0 + try { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, vertexFactory, edgeFactory, init_vertices, 0, random_seed, num_tests); + fail(); + } catch (IllegalArgumentException e) { + } + + // test negative edges_to_add_per_timestep + try { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, vertexFactory, edgeFactory, init_vertices, -1, random_seed, num_tests); + fail(); + } catch (IllegalArgumentException e) { + } + + // test edges_to_add_per_timestep > init_vertices + try { + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, vertexFactory, edgeFactory, 2, 3, random_seed, num_tests); + fail(); + } catch (IllegalArgumentException e) { + } + } + + /** + * Every node should have an out-degree AT LEAST equal to the number of edges added per timestep + * (dependent on if it is directed or undirected). + */ + public void testEveryNodeHasCorrectMinimumNumberOfEdges() { + Graph graph = + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + + for (Integer v : graph.getVertices()) { + assertTrue(graph.outDegree(v) >= edges_to_add_per_timestep); + } + } + + /** + * Check that not every edge goes to one node; the in-degree of any node should be strictly less + * than the number of edges. + */ + public void testNotEveryEdgeToOneNode() { + Graph graph = + generateAndTestSizeOfBarabasiAlbertGraph( + graphFactory, + vertexFactory, + edgeFactory, + init_vertices, + edges_to_add_per_timestep, + random_seed, + num_tests); + + for (Integer v : graph.getVertices()) { + assertTrue(graph.inDegree(v) < graph.getEdgeCount()); + } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestEppsteinPowerLawGenerator.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestEppsteinPowerLawGenerator.java index ba72951c..07d3b329 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestEppsteinPowerLawGenerator.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestEppsteinPowerLawGenerator.java @@ -1,106 +1,106 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.generators.random; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.SparseMultigraph; import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Supplier; - -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.SparseMultigraph; - -/** - * @author Scott White - */ +/** @author Scott White */ public class TestEppsteinPowerLawGenerator extends TestCase { - - Supplier> graphFactory; - Supplier vertexFactory; - Supplier edgeFactory; - public static Test suite() { - return new TestSuite(TestEppsteinPowerLawGenerator.class); - } + Supplier> graphFactory; + Supplier vertexFactory; + Supplier edgeFactory; + + public static Test suite() { + return new TestSuite(TestEppsteinPowerLawGenerator.class); + } - @Override + @Override protected void setUp() { - graphFactory = new Supplier>() { - public Graph get() { - return new SparseMultigraph(); - } - }; - vertexFactory = new Supplier() { - int count; - public Integer get() { - return count++; - } - }; - edgeFactory = - new Supplier() { - int count; - public Number get() { - return count++; - } - }; - } + graphFactory = + new Supplier>() { + public Graph get() { + return new SparseMultigraph(); + } + }; + vertexFactory = + new Supplier() { + int count; + + public Integer get() { + return count++; + } + }; + edgeFactory = + new Supplier() { + int count; - public void testSimpleDirectedCase() { + public Number get() { + return count++; + } + }; + } - for (int r=0; r<10; r++) { - EppsteinPowerLawGenerator generator = - new EppsteinPowerLawGenerator(graphFactory, vertexFactory, edgeFactory, 10,40,r); - generator.setSeed(2); + public void testSimpleDirectedCase() { - Graph graph = generator.get(); - Assert.assertEquals(graph.getVertexCount(),10); - Assert.assertEquals(graph.getEdgeCount(),40); - } + for (int r = 0; r < 10; r++) { + EppsteinPowerLawGenerator generator = + new EppsteinPowerLawGenerator( + graphFactory, vertexFactory, edgeFactory, 10, 40, r); + generator.setSeed(2); + Graph graph = generator.get(); + Assert.assertEquals(graph.getVertexCount(), 10); + Assert.assertEquals(graph.getEdgeCount(), 40); } + } - // TODO: convert what is needed for this test -// public void testPowerLawProperties() { -// -// //long start = System.currentTimeMillis(); -// EppsteinPowerLawGenerator generator = new EppsteinPowerLawGenerator(vertexFactory, edgeFactory, -// 500,1500,100000); -// generator.setSeed(5); -// Graph graph = (Graph) generator.generateGraph(); -// //long stop = System.currentTimeMillis(); -// //System.out.println((stop-start)/1000l); -// -// DoubleArrayList degreeList = DegreeDistributions.getOutdegreeValues(graph.getVertices()); -// int maxDegree = (int) Descriptive.max(degreeList); -// Histogram degreeHistogram = GraphStatistics.createHistogram(degreeList,0,maxDegree,1); -// //for (int index=0;index degreeHistogram.binHeight(2) + degreeHistogram.binHeight(3)); -// -// generator = new EppsteinPowerLawGenerator(500,1500,0); -// graph = (Graph) generator.generateGraph(); -// degreeList = DegreeDistributions.getOutdegreeValues(graph.getVertices()); -// maxDegree = (int) Descriptive.max(degreeList); -// degreeHistogram = GraphStatistics.createHistogram(degreeList,0,maxDegree,1); -// //for (int index=0;index degreeHistogram.binHeight(2) + degreeHistogram.binHeight(3)); + // + // generator = new EppsteinPowerLawGenerator(500,1500,0); + // graph = (Graph) generator.generateGraph(); + // degreeList = DegreeDistributions.getOutdegreeValues(graph.getVertices()); + // maxDegree = (int) Descriptive.max(degreeList); + // degreeHistogram = GraphStatistics.createHistogram(degreeList,0,maxDegree,1); + // //for (int index=0;index> graphFactory; - Supplier vertexFactory; - Supplier edgeFactory; - public static Test suite() { - return new TestSuite(TestErdosRenyi.class); - } + Supplier> graphFactory; + Supplier vertexFactory; + Supplier edgeFactory; + + public static Test suite() { + return new TestSuite(TestErdosRenyi.class); + } - @Override + @Override protected void setUp() { - graphFactory = new Supplier>() { - public UndirectedGraph get() { - return new UndirectedSparseMultigraph(); - } - }; - vertexFactory = new Supplier() { - int count; - public String get() { - return Character.toString((char)('A'+count++)); - } - }; - edgeFactory = - new Supplier() { - int count; - public Number get() { - return count++; - } - }; - } + graphFactory = + new Supplier>() { + public UndirectedGraph get() { + return new UndirectedSparseMultigraph(); + } + }; + vertexFactory = + new Supplier() { + int count; + + public String get() { + return Character.toString((char) ('A' + count++)); + } + }; + edgeFactory = + new Supplier() { + int count; - public void test() { + public Number get() { + return count++; + } + }; + } - int numVertices = 100; - int total = 0; - for (int i = 1; i <= 10; i++) { - ErdosRenyiGenerator generator = - new ErdosRenyiGenerator(graphFactory, vertexFactory, edgeFactory, - numVertices,0.1); - generator.setSeed(0); + public void test() { - Graph graph = generator.get(); - Assert.assertTrue(graph.getVertexCount() == numVertices); - total += graph.getEdgeCount(); - } - total /= 10.0; - Assert.assertTrue(total > 495-50 && total < 495+50); + int numVertices = 100; + int total = 0; + for (int i = 1; i <= 10; i++) { + ErdosRenyiGenerator generator = + new ErdosRenyiGenerator( + graphFactory, vertexFactory, edgeFactory, numVertices, 0.1); + generator.setSeed(0); - } - - + Graph graph = generator.get(); + Assert.assertTrue(graph.getVertexCount() == numVertices); + total += graph.getEdgeCount(); + } + total /= 10.0; + Assert.assertTrue(total > 495 - 50 && total < 495 + 50); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestKleinberg.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestKleinberg.java index b1e0c554..f42ed743 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestKleinberg.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/generators/random/TestKleinberg.java @@ -1,39 +1,33 @@ package edu.uci.ics.jung.algorithms.generators.random; - -import junit.framework.Assert; import edu.uci.ics.jung.algorithms.generators.Lattice2DGenerator; import edu.uci.ics.jung.algorithms.generators.TestLattice2D; import edu.uci.ics.jung.graph.Graph; +import junit.framework.Assert; - -/** - * - * @author Joshua O'Madadhain - */ +/** @author Joshua O'Madadhain */ public class TestKleinberg extends TestLattice2D { - - @Override - protected Lattice2DGenerator generate(int i, int j, int k) - { - return new KleinbergSmallWorldGenerator( - k == 0 ? undirectedGraphFactory : directedGraphFactory, - vertexFactory, edgeFactory, - i, // rows - i, // columns - 0.1, // clustering exponent - j == 0 ? true : false); // toroidal? - } - - @Override - protected void checkEdgeCount(Lattice2DGenerator generator, - Graph graph) - - { - Assert.assertEquals( - generator.getGridEdgeCount() + - ((KleinbergSmallWorldGenerator)generator).getConnectionCount() - * graph.getVertexCount(), - graph.getEdgeCount()); - } + + @Override + protected Lattice2DGenerator generate(int i, int j, int k) { + return new KleinbergSmallWorldGenerator( + k == 0 ? undirectedGraphFactory : directedGraphFactory, + vertexFactory, + edgeFactory, + i, // rows + i, // columns + 0.1, // clustering exponent + j == 0 ? true : false); // toroidal? + } + + @Override + protected void checkEdgeCount( + Lattice2DGenerator generator, Graph graph) { + + Assert.assertEquals( + generator.getGridEdgeCount() + + ((KleinbergSmallWorldGenerator) generator).getConnectionCount() + * graph.getVertexCount(), + graph.getEdgeCount()); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestBetweennessCentrality.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestBetweennessCentrality.java index 87a6a37b..ed174484 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestBetweennessCentrality.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestBetweennessCentrality.java @@ -1,119 +1,113 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; -import junit.framework.Assert; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseGraph; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseGraph; +import junit.framework.Assert; +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; -/** - * @author Scott White - */ +/** @author Scott White */ public class TestBetweennessCentrality extends TestCase { - public static Test suite() { - return new TestSuite(TestBetweennessCentrality.class); - } + public static Test suite() { + return new TestSuite(TestBetweennessCentrality.class); + } + + @Override + protected void setUp() {} - @Override - protected void setUp() {} - -// private static E getEdge(Graph g, int v1Index, int v2Index, BidiMap id) { -// V v1 = id.getKey(v1Index); -// V v2 = id.getKey(v2Index); -// return g.findEdge(v1, v2); -// } - - public void testRanker() { - UndirectedGraph graph = - new UndirectedSparseGraph(); - for(int i=0; i<9; i++) { - graph.addVertex(i); - } - - int edge = 0; - graph.addEdge(edge++, 0,1); - graph.addEdge(edge++, 0,6); - graph.addEdge(edge++, 1,2); - graph.addEdge(edge++, 1,3); - graph.addEdge(edge++, 2,4); - graph.addEdge(edge++, 3,4); - graph.addEdge(edge++, 4,5); - graph.addEdge(edge++, 5,8); - graph.addEdge(edge++, 7,8); - graph.addEdge(edge++, 6,7); - - BetweennessCentrality bc = - new BetweennessCentrality(graph); - bc.setRemoveRankScoresOnFinalize(false); - bc.evaluate(); - -// System.out.println("ranking"); -// for (int i = 0; i < 9; i++) -// System.out.println(String.format("%d: %f", i, bc.getVertexRankScore(i))); - - Assert.assertEquals(bc.getVertexRankScore(0)/28.0,0.2142,.001); - Assert.assertEquals(bc.getVertexRankScore(1)/28.0,0.2797,.001); - Assert.assertEquals(bc.getVertexRankScore(2)/28.0,0.0892,.001); - Assert.assertEquals(bc.getVertexRankScore(3)/28.0,0.0892,.001); - Assert.assertEquals(bc.getVertexRankScore(4)/28.0,0.2797,.001); - Assert.assertEquals(bc.getVertexRankScore(5)/28.0,0.2142,.001); - Assert.assertEquals(bc.getVertexRankScore(6)/28.0,0.1666,.001); - Assert.assertEquals(bc.getVertexRankScore(7)/28.0,0.1428,.001); - Assert.assertEquals(bc.getVertexRankScore(8)/28.0,0.1666,.001); - - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0,1)), - 10.66666,.001); - - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0,1)),10.66666,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0,6)),9.33333,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1,2)),6.5,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1,3)),6.5,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(2,4)),6.5,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(3,4)),6.5,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(4,5)),10.66666,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(5,8)),9.33333,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(6,7)),8.0,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(7,8)),8.0,.001); + // private static E getEdge(Graph g, int v1Index, int v2Index, BidiMap id) { + // V v1 = id.getKey(v1Index); + // V v2 = id.getKey(v2Index); + // return g.findEdge(v1, v2); + // } + + public void testRanker() { + UndirectedGraph graph = new UndirectedSparseGraph(); + for (int i = 0; i < 9; i++) { + graph.addVertex(i); } - - public void testRankerDirected() { - DirectedGraph graph = new DirectedSparseGraph(); - for(int i=0; i<5; i++) { - graph.addVertex(i); - } - - int edge=0; - graph.addEdge(edge++, 0,1); - graph.addEdge(edge++, 1,2); - graph.addEdge(edge++, 3,1); - graph.addEdge(edge++, 4,2); - - BetweennessCentrality bc = - new BetweennessCentrality(graph); - bc.setRemoveRankScoresOnFinalize(false); - bc.evaluate(); - - Assert.assertEquals(bc.getVertexRankScore(0),0,.001); - Assert.assertEquals(bc.getVertexRankScore(1),2,.001); - Assert.assertEquals(bc.getVertexRankScore(2),0,.001); - Assert.assertEquals(bc.getVertexRankScore(3),0,.001); - Assert.assertEquals(bc.getVertexRankScore(4),0,.001); - - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0,1)),2,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1,2)),3,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(3,1)),2,.001); - Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(4,2)),1,.001); + + int edge = 0; + graph.addEdge(edge++, 0, 1); + graph.addEdge(edge++, 0, 6); + graph.addEdge(edge++, 1, 2); + graph.addEdge(edge++, 1, 3); + graph.addEdge(edge++, 2, 4); + graph.addEdge(edge++, 3, 4); + graph.addEdge(edge++, 4, 5); + graph.addEdge(edge++, 5, 8); + graph.addEdge(edge++, 7, 8); + graph.addEdge(edge++, 6, 7); + + BetweennessCentrality bc = new BetweennessCentrality(graph); + bc.setRemoveRankScoresOnFinalize(false); + bc.evaluate(); + + // System.out.println("ranking"); + // for (int i = 0; i < 9; i++) + // System.out.println(String.format("%d: %f", i, bc.getVertexRankScore(i))); + + Assert.assertEquals(bc.getVertexRankScore(0) / 28.0, 0.2142, .001); + Assert.assertEquals(bc.getVertexRankScore(1) / 28.0, 0.2797, .001); + Assert.assertEquals(bc.getVertexRankScore(2) / 28.0, 0.0892, .001); + Assert.assertEquals(bc.getVertexRankScore(3) / 28.0, 0.0892, .001); + Assert.assertEquals(bc.getVertexRankScore(4) / 28.0, 0.2797, .001); + Assert.assertEquals(bc.getVertexRankScore(5) / 28.0, 0.2142, .001); + Assert.assertEquals(bc.getVertexRankScore(6) / 28.0, 0.1666, .001); + Assert.assertEquals(bc.getVertexRankScore(7) / 28.0, 0.1428, .001); + Assert.assertEquals(bc.getVertexRankScore(8) / 28.0, 0.1666, .001); + + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0, 1)), 10.66666, .001); + + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0, 1)), 10.66666, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0, 6)), 9.33333, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1, 2)), 6.5, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1, 3)), 6.5, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(2, 4)), 6.5, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(3, 4)), 6.5, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(4, 5)), 10.66666, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(5, 8)), 9.33333, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(6, 7)), 8.0, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(7, 8)), 8.0, .001); + } + + public void testRankerDirected() { + DirectedGraph graph = new DirectedSparseGraph(); + for (int i = 0; i < 5; i++) { + graph.addVertex(i); } + + int edge = 0; + graph.addEdge(edge++, 0, 1); + graph.addEdge(edge++, 1, 2); + graph.addEdge(edge++, 3, 1); + graph.addEdge(edge++, 4, 2); + + BetweennessCentrality bc = new BetweennessCentrality(graph); + bc.setRemoveRankScoresOnFinalize(false); + bc.evaluate(); + + Assert.assertEquals(bc.getVertexRankScore(0), 0, .001); + Assert.assertEquals(bc.getVertexRankScore(1), 2, .001); + Assert.assertEquals(bc.getVertexRankScore(2), 0, .001); + Assert.assertEquals(bc.getVertexRankScore(3), 0, .001); + Assert.assertEquals(bc.getVertexRankScore(4), 0, .001); + + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(0, 1)), 2, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(1, 2)), 3, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(3, 1)), 2, .001); + Assert.assertEquals(bc.getEdgeRankScore(graph.findEdge(4, 2)), 1, .001); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestKStepMarkov.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestKStepMarkov.java index 9ff58be0..136c7697 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestKStepMarkov.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestKStepMarkov.java @@ -1,86 +1,77 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; - import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; - /** * @author Scott White * @author Tom Nelson - adapted to jung2 */ public class TestKStepMarkov extends TestCase { - public final static String EDGE_WEIGHT = "edu.uci.ics.jung.edge_weight"; - DirectedGraph mGraph; - double[][] mTransitionMatrix; - Map edgeWeights = new HashMap(); + public static final String EDGE_WEIGHT = "edu.uci.ics.jung.edge_weight"; + DirectedGraph mGraph; + double[][] mTransitionMatrix; + Map edgeWeights = new HashMap(); - public static Test suite() { - return new TestSuite(TestKStepMarkov.class); - } + public static Test suite() { + return new TestSuite(TestKStepMarkov.class); + } - @Override - protected void setUp() - { - mGraph = new DirectedSparseMultigraph(); - mTransitionMatrix = new double[][] - {{0.0, 0.5, 0.5}, - {1.0/3.0, 0.0, 2.0/3.0}, - {1.0/3.0, 2.0/3.0, 0.0}}; + @Override + protected void setUp() { + mGraph = new DirectedSparseMultigraph(); + mTransitionMatrix = + new double[][] {{0.0, 0.5, 0.5}, {1.0 / 3.0, 0.0, 2.0 / 3.0}, {1.0 / 3.0, 2.0 / 3.0, 0.0}}; - for (int i = 0; i < mTransitionMatrix.length; i++) - mGraph.addVertex(i); + for (int i = 0; i < mTransitionMatrix.length; i++) mGraph.addVertex(i); - for (int i = 0; i < mTransitionMatrix.length; i++) { - for (int j = 0; j < mTransitionMatrix[i].length; j++) - { - if (mTransitionMatrix[i][j] > 0) - { - int edge = i*mTransitionMatrix.length+j; - mGraph.addEdge(edge, i, j); - edgeWeights.put(edge, mTransitionMatrix[i][j]); - } - } + for (int i = 0; i < mTransitionMatrix.length; i++) { + for (int j = 0; j < mTransitionMatrix[i].length; j++) { + if (mTransitionMatrix[i][j] > 0) { + int edge = i * mTransitionMatrix.length + j; + mGraph.addEdge(edge, i, j); + edgeWeights.put(edge, mTransitionMatrix[i][j]); } + } } + } - public void testRanker() { + public void testRanker() { - Set priors = new HashSet(); - priors.add(1); - priors.add(2); - KStepMarkov ranker = new KStepMarkov(mGraph,priors,2,edgeWeights); -// ranker.evaluate(); -// System.out.println(ranker.getIterations()); + Set priors = new HashSet(); + priors.add(1); + priors.add(2); + KStepMarkov ranker = + new KStepMarkov(mGraph, priors, 2, edgeWeights); + // ranker.evaluate(); + // System.out.println(ranker.getIterations()); - for (int i = 0; i < 10; i++) - { -// System.out.println(ranker.getIterations()); -// for (Number n : mGraph.getVertices()) -// System.out.println(n + ": " + ranker.getVertexRankScore(n)); - - ranker.step(); - } - - - List> rankings = ranker.getRankings(); -// System.out.println(rankings); + for (int i = 0; i < 10; i++) { + // System.out.println(ranker.getIterations()); + // for (Number n : mGraph.getVertices()) + // System.out.println(n + ": " + ranker.getVertexRankScore(n)); + + ranker.step(); } + + List> rankings = ranker.getRankings(); + // System.out.println(rankings); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestWeightedNIPaths.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestWeightedNIPaths.java index 59c08054..19a625ac 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestWeightedNIPaths.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/importance/TestWeightedNIPaths.java @@ -1,85 +1,86 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.importance; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import java.util.HashSet; import java.util.Set; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Supplier; - -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; - -/** - * @author Scott White, adapted to jung2 by Tom Nelson - */ +/** @author Scott White, adapted to jung2 by Tom Nelson */ public class TestWeightedNIPaths extends TestCase { - - Supplier vertexFactory; - Supplier edgeFactory; - public static Test suite() { - return new TestSuite(TestWeightedNIPaths.class); - } + Supplier vertexFactory; + Supplier edgeFactory; - @Override - protected void setUp() { - vertexFactory = new Supplier() { - char a = 'A'; - public String get() { - return Character.toString(a++); - }}; - edgeFactory = new Supplier() { - int count; - public Number get() { - return count++; - }}; - } + public static Test suite() { + return new TestSuite(TestWeightedNIPaths.class); + } - public void testRanker() { + @Override + protected void setUp() { + vertexFactory = + new Supplier() { + char a = 'A'; - DirectedGraph graph = new DirectedSparseMultigraph(); - for(int i=0; i<5; i++) { - graph.addVertex(vertexFactory.get()); - } + public String get() { + return Character.toString(a++); + } + }; + edgeFactory = + new Supplier() { + int count; - graph.addEdge(edgeFactory.get(), "A", "B"); - graph.addEdge(edgeFactory.get(), "A", "C"); - graph.addEdge(edgeFactory.get(), "A", "D"); - graph.addEdge(edgeFactory.get(), "B", "A"); - graph.addEdge(edgeFactory.get(), "B", "E"); - graph.addEdge(edgeFactory.get(), "B", "D"); - graph.addEdge(edgeFactory.get(), "C", "A"); - graph.addEdge(edgeFactory.get(), "C", "E"); - graph.addEdge(edgeFactory.get(), "C", "D"); - graph.addEdge(edgeFactory.get(), "D", "A"); - graph.addEdge(edgeFactory.get(), "D", "B"); - graph.addEdge(edgeFactory.get(), "D", "C"); - graph.addEdge(edgeFactory.get(), "D", "E"); - - Set priors = new HashSet(); - priors.add("A"); + public Number get() { + return count++; + } + }; + } - WeightedNIPaths ranker = - new WeightedNIPaths(graph, vertexFactory, edgeFactory, 2.0,3,priors); - ranker.evaluate(); + public void testRanker() { - Assert.assertEquals(ranker.getRankings().get(0).rankScore,0.277787,.0001); - Assert.assertEquals(ranker.getRankings().get(1).rankScore,0.222222,.0001); - Assert.assertEquals(ranker.getRankings().get(2).rankScore,0.166676,.0001); - Assert.assertEquals(ranker.getRankings().get(3).rankScore,0.166676,.0001); - Assert.assertEquals(ranker.getRankings().get(4).rankScore,0.166676,.0001); + DirectedGraph graph = new DirectedSparseMultigraph(); + for (int i = 0; i < 5; i++) { + graph.addVertex(vertexFactory.get()); } -} \ No newline at end of file + + graph.addEdge(edgeFactory.get(), "A", "B"); + graph.addEdge(edgeFactory.get(), "A", "C"); + graph.addEdge(edgeFactory.get(), "A", "D"); + graph.addEdge(edgeFactory.get(), "B", "A"); + graph.addEdge(edgeFactory.get(), "B", "E"); + graph.addEdge(edgeFactory.get(), "B", "D"); + graph.addEdge(edgeFactory.get(), "C", "A"); + graph.addEdge(edgeFactory.get(), "C", "E"); + graph.addEdge(edgeFactory.get(), "C", "D"); + graph.addEdge(edgeFactory.get(), "D", "A"); + graph.addEdge(edgeFactory.get(), "D", "B"); + graph.addEdge(edgeFactory.get(), "D", "C"); + graph.addEdge(edgeFactory.get(), "D", "E"); + + Set priors = new HashSet(); + priors.add("A"); + + WeightedNIPaths ranker = + new WeightedNIPaths(graph, vertexFactory, edgeFactory, 2.0, 3, priors); + ranker.evaluate(); + + Assert.assertEquals(ranker.getRankings().get(0).rankScore, 0.277787, .0001); + Assert.assertEquals(ranker.getRankings().get(1).rankScore, 0.222222, .0001); + Assert.assertEquals(ranker.getRankings().get(2).rankScore, 0.166676, .0001); + Assert.assertEquals(ranker.getRankings().get(3).rankScore, 0.166676, .0001); + Assert.assertEquals(ranker.getRankings().get(4).rankScore, 0.166676, .0001); + } +} diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayout2Test.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayout2Test.java index 71822ac9..4b31b977 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayout2Test.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayout2Test.java @@ -1,31 +1,30 @@ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.util.HashSet; -import java.util.Set; - -import junit.framework.TestCase; import edu.uci.ics.jung.algorithms.layout.util.Relaxer; import edu.uci.ics.jung.algorithms.layout.util.VisRunner; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.TestGraphs; +import java.awt.Dimension; +import java.util.HashSet; +import java.util.Set; +import junit.framework.TestCase; public class FRLayout2Test extends TestCase { - - protected Set seedVertices = new HashSet(); - public void testFRLayout() { - - Graph graph = TestGraphs.getOneComponentGraph(); + protected Set seedVertices = new HashSet(); + + public void testFRLayout() { + + Graph graph = TestGraphs.getOneComponentGraph(); - Layout layout = new FRLayout2(graph); - layout.setSize(new Dimension(600,600)); - if(layout instanceof IterativeContext) { - layout.initialize(); - Relaxer relaxer = new VisRunner((IterativeContext)layout); - relaxer.prerelax(); - relaxer.relax(); - } - } + Layout layout = new FRLayout2(graph); + layout.setSize(new Dimension(600, 600)); + if (layout instanceof IterativeContext) { + layout.initialize(); + Relaxer relaxer = new VisRunner((IterativeContext) layout); + relaxer.prerelax(); + relaxer.relax(); + } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayoutTest.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayoutTest.java index 9af60c16..67791288 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayoutTest.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/layout/FRLayoutTest.java @@ -1,31 +1,30 @@ package edu.uci.ics.jung.algorithms.layout; -import java.awt.Dimension; -import java.util.HashSet; -import java.util.Set; - -import junit.framework.TestCase; import edu.uci.ics.jung.algorithms.layout.util.Relaxer; import edu.uci.ics.jung.algorithms.layout.util.VisRunner; import edu.uci.ics.jung.algorithms.util.IterativeContext; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.util.TestGraphs; +import java.awt.Dimension; +import java.util.HashSet; +import java.util.Set; +import junit.framework.TestCase; public class FRLayoutTest extends TestCase { - - protected Set seedVertices = new HashSet(); - public void testFRLayout() { - - Graph graph = TestGraphs.getOneComponentGraph(); + protected Set seedVertices = new HashSet(); + + public void testFRLayout() { + + Graph graph = TestGraphs.getOneComponentGraph(); - Layout layout = new FRLayout(graph); - layout.setSize(new Dimension(600,600)); - if(layout instanceof IterativeContext) { - layout.initialize(); - Relaxer relaxer = new VisRunner((IterativeContext)layout); - relaxer.prerelax(); - relaxer.relax(); - } - } + Layout layout = new FRLayout(graph); + layout.setSize(new Dimension(600, 600)); + if (layout instanceof IterativeContext) { + layout.initialize(); + Relaxer relaxer = new VisRunner((IterativeContext) layout); + relaxer.prerelax(); + relaxer.relax(); + } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/metrics/TestTriad.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/metrics/TestTriad.java index f221e8c8..11899ec9 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/metrics/TestTriad.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/metrics/TestTriad.java @@ -1,166 +1,165 @@ package edu.uci.ics.jung.algorithms.metrics; -import junit.framework.TestCase; -import edu.uci.ics.jung.algorithms.metrics.TriadicCensus; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; +import junit.framework.TestCase; public class TestTriad extends TestCase { - public void testConfigurationFromPaper() { - DirectedGraph g = new DirectedSparseMultigraph(); - char u = 'u'; - g.addVertex(u); - char v = 'v'; - g.addVertex(v); - char w = 'w'; - g.addVertex(w); - g.addEdge(0, w, u); - g.addEdge(1, u, v); - g.addEdge(2, v, u); - - assertEquals(35, TriadicCensus.triCode(g, u, v, w)); - assertEquals(7, TriadicCensus.triType(35)); - assertEquals("111D", TriadicCensus.TRIAD_NAMES[7]); - - assertEquals(7, TriadicCensus.triType(TriadicCensus.triCode(g, u, w, v))); - assertEquals(7, TriadicCensus.triType(TriadicCensus.triCode(g, v, u, w))); - - long[] counts = TriadicCensus.getCounts(g); - - for (int i = 1; i <= 16; i++) { - if (i == 7) { - assertEquals(1, counts[i]); - } else { - assertEquals(0, counts[i]); - } - } - } - - public void testFourVertexGraph() { - // we'll set up a graph of - // t->u - // u->v - // and that's it. - // total count: - // 2: 1(t, u, w)(u, v, w) - // 6: 1(t, u, v) - // 1: 1(u, v, w) - DirectedGraph g = new DirectedSparseMultigraph(); - char u = 'u'; - g.addVertex(u); - char v = 'v'; - g.addVertex(v); - char w = 'w'; - g.addVertex(w); - char t = 't'; - g.addVertex(t); - - g.addEdge(0, t, u ); - g.addEdge(1, u, v ); - - long[] counts = TriadicCensus.getCounts(g); - for (int i = 1; i <= 16; i++) { - if( i == 2 ) { - assertEquals("On " + i, 2, counts[i]); - } else if (i == 6 || i == 1 ) { - assertEquals("On " + i, 1, counts[i]); - } else { - assertEquals(0, counts[i]); - } - } - - // now let's tweak to - // t->u, u->v, v->t - // w->u, v->w - g.addEdge(2, v, t ); - g.addEdge(3, w, u ); - g.addEdge(4, v, w ); - - // that's two 030Cs. it's a 021D (v-t, v-w) and an 021U (t-u, w-u) - counts = TriadicCensus.getCounts(g); - - for (int i = 1; i <= 16; i++) { - if( i == 10 /* 030C */ ) { - assertEquals("On " + i, 2, counts[i]); - } else if (i == 4 || i == 5 ) { - assertEquals("On " + i, 1, counts[i]); - } else { - assertEquals("On " + i , 0, counts[i]); - } - } - } - - public void testThreeDotsThreeDashes() { - DirectedGraph g = new DirectedSparseMultigraph(); - char u = 'u'; - g.addVertex(u); - char v = 'v'; - g.addVertex(v); - char w = 'w'; - g.addVertex(w); - - long[] counts = TriadicCensus.getCounts(g); - - for (int i = 1; i <= 16; i++) { - if (i == 1) { - assertEquals(1, counts[i]); - } else { - assertEquals(0, counts[i]); - } - } - - g.addEdge(0, v, u); - g.addEdge(1, u, v); - g.addEdge(2, v, w); - g.addEdge(3, w, v); - g.addEdge(4, u, w); - g.addEdge(5, w, u); - - counts = TriadicCensus.getCounts(g); - - for (int i = 1; i <= 16; i++) { - if (i == 16) { - assertEquals(1, counts[i]); - } else { - assertEquals("Count on " + i + " failed", 0, counts[i]); - } - } - } - - /** **************Boring accounting for zero graphs*********** */ - public void testNull() { - DirectedGraph g = new DirectedSparseMultigraph(); - long[] counts = TriadicCensus.getCounts(g); - - // t looks like a hashtable for the twelve keys - for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { - assertEquals("Empty Graph doesn't have count 0", 0, counts[i]); - } - } - - public void testOneVertex() { - DirectedGraph g = new DirectedSparseMultigraph(); - g.addVertex('u'); - long[] counts = TriadicCensus.getCounts(g); - - // t looks like a hashtable for the twelve keys - for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { - assertEquals("One vertex Graph doesn't have count 0", 0, counts[i]); - } - } - - public void testTwoVertices() { - DirectedGraph g = new DirectedSparseMultigraph(); - char v1, v2; - g.addVertex(v1 = 'u'); - g.addVertex(v2 = 'v'); - g.addEdge(0, v1, v2); - long[] counts = TriadicCensus.getCounts(g); - - // t looks like a hashtable for the twelve keys - for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { - assertEquals("Two vertex Graph doesn't have count 0", 0, counts[i]); - } - } + public void testConfigurationFromPaper() { + DirectedGraph g = new DirectedSparseMultigraph(); + char u = 'u'; + g.addVertex(u); + char v = 'v'; + g.addVertex(v); + char w = 'w'; + g.addVertex(w); + g.addEdge(0, w, u); + g.addEdge(1, u, v); + g.addEdge(2, v, u); + + assertEquals(35, TriadicCensus.triCode(g, u, v, w)); + assertEquals(7, TriadicCensus.triType(35)); + assertEquals("111D", TriadicCensus.TRIAD_NAMES[7]); + + assertEquals(7, TriadicCensus.triType(TriadicCensus.triCode(g, u, w, v))); + assertEquals(7, TriadicCensus.triType(TriadicCensus.triCode(g, v, u, w))); + + long[] counts = TriadicCensus.getCounts(g); + + for (int i = 1; i <= 16; i++) { + if (i == 7) { + assertEquals(1, counts[i]); + } else { + assertEquals(0, counts[i]); + } + } + } + + public void testFourVertexGraph() { + // we'll set up a graph of + // t->u + // u->v + // and that's it. + // total count: + // 2: 1(t, u, w)(u, v, w) + // 6: 1(t, u, v) + // 1: 1(u, v, w) + DirectedGraph g = new DirectedSparseMultigraph(); + char u = 'u'; + g.addVertex(u); + char v = 'v'; + g.addVertex(v); + char w = 'w'; + g.addVertex(w); + char t = 't'; + g.addVertex(t); + + g.addEdge(0, t, u); + g.addEdge(1, u, v); + + long[] counts = TriadicCensus.getCounts(g); + for (int i = 1; i <= 16; i++) { + if (i == 2) { + assertEquals("On " + i, 2, counts[i]); + } else if (i == 6 || i == 1) { + assertEquals("On " + i, 1, counts[i]); + } else { + assertEquals(0, counts[i]); + } + } + + // now let's tweak to + // t->u, u->v, v->t + // w->u, v->w + g.addEdge(2, v, t); + g.addEdge(3, w, u); + g.addEdge(4, v, w); + + // that's two 030Cs. it's a 021D (v-t, v-w) and an 021U (t-u, w-u) + counts = TriadicCensus.getCounts(g); + + for (int i = 1; i <= 16; i++) { + if (i == 10 /* 030C */) { + assertEquals("On " + i, 2, counts[i]); + } else if (i == 4 || i == 5) { + assertEquals("On " + i, 1, counts[i]); + } else { + assertEquals("On " + i, 0, counts[i]); + } + } + } + + public void testThreeDotsThreeDashes() { + DirectedGraph g = new DirectedSparseMultigraph(); + char u = 'u'; + g.addVertex(u); + char v = 'v'; + g.addVertex(v); + char w = 'w'; + g.addVertex(w); + + long[] counts = TriadicCensus.getCounts(g); + + for (int i = 1; i <= 16; i++) { + if (i == 1) { + assertEquals(1, counts[i]); + } else { + assertEquals(0, counts[i]); + } + } + + g.addEdge(0, v, u); + g.addEdge(1, u, v); + g.addEdge(2, v, w); + g.addEdge(3, w, v); + g.addEdge(4, u, w); + g.addEdge(5, w, u); + + counts = TriadicCensus.getCounts(g); + + for (int i = 1; i <= 16; i++) { + if (i == 16) { + assertEquals(1, counts[i]); + } else { + assertEquals("Count on " + i + " failed", 0, counts[i]); + } + } + } + + /** **************Boring accounting for zero graphs*********** */ + public void testNull() { + DirectedGraph g = new DirectedSparseMultigraph(); + long[] counts = TriadicCensus.getCounts(g); + + // t looks like a hashtable for the twelve keys + for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { + assertEquals("Empty Graph doesn't have count 0", 0, counts[i]); + } + } + + public void testOneVertex() { + DirectedGraph g = new DirectedSparseMultigraph(); + g.addVertex('u'); + long[] counts = TriadicCensus.getCounts(g); + + // t looks like a hashtable for the twelve keys + for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { + assertEquals("One vertex Graph doesn't have count 0", 0, counts[i]); + } + } + + public void testTwoVertices() { + DirectedGraph g = new DirectedSparseMultigraph(); + char v1, v2; + g.addVertex(v1 = 'u'); + g.addVertex(v2 = 'v'); + g.addEdge(0, v1, v2); + long[] counts = TriadicCensus.getCounts(g); + + // t looks like a hashtable for the twelve keys + for (int i = 1; i < TriadicCensus.MAX_TRIADS; i++) { + assertEquals("Two vertex Graph doesn't have count 0", 0, counts[i]); + } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestBetweennessCentrality.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestBetweennessCentrality.java index eda3f08d..e8ae4828 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestBetweennessCentrality.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestBetweennessCentrality.java @@ -1,138 +1,130 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Sep 17, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Sep 17, 2008 */ package edu.uci.ics.jung.algorithms.scoring; -import junit.framework.TestCase; - import com.google.common.base.Function; - import edu.uci.ics.jung.graph.DirectedSparseGraph; import edu.uci.ics.jung.graph.Graph; +import junit.framework.TestCase; -/** - * - */ -public class TestBetweennessCentrality extends TestCase -{ -// public void testUndirected() { -// UndirectedGraph graph = -// new UndirectedSparseGraph(); -// for(int i=0; i<9; i++) { -// graph.addVertex(i); -// } -// -// int edge = 0; -// graph.addEdge(edge++, 0,1); -// graph.addEdge(edge++, 0,6); -// graph.addEdge(edge++, 1,2); -// graph.addEdge(edge++, 1,3); -// graph.addEdge(edge++, 2,4); -// graph.addEdge(edge++, 3,4); -// graph.addEdge(edge++, 4,5); -// graph.addEdge(edge++, 5,8); -// graph.addEdge(edge++, 7,8); -// graph.addEdge(edge++, 6,7); -// -// BetweennessCentrality bc = -// new BetweennessCentrality(graph); -// -//// System.out.println("scoring"); -//// for (int i = 0; i < graph.getVertexCount(); i++) -//// System.out.println(String.format("%d: %f", i, bc.getVertexScore(i))); -// -// Assert.assertEquals(bc.getVertexScore(0),6.000,.001); -// Assert.assertEquals(bc.getVertexScore(1),7.833,.001); -// Assert.assertEquals(bc.getVertexScore(2),2.500,.001); -// Assert.assertEquals(bc.getVertexScore(3),2.500,.001); -// Assert.assertEquals(bc.getVertexScore(4),7.833,.001); -// Assert.assertEquals(bc.getVertexScore(5),6.000,.001); -// Assert.assertEquals(bc.getVertexScore(6),4.666,.001); -// Assert.assertEquals(bc.getVertexScore(7),4.000,.001); -// Assert.assertEquals(bc.getVertexScore(8),4.666,.001); -// -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,1)),10.666,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,6)),9.333,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,2)),6.500,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,3)),6.500,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(2,4)),6.500,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(3,4)),6.500,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(4,5)),10.666,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(5,8)),9.333,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(6,7)),8.000,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(7,8)),8.000,.001); -// } -// -// public void testDirected() -// { -// DirectedGraph graph = new DirectedSparseGraph(); -// for(int i=0; i<5; i++) -// graph.addVertex(i); -// -// int edge=0; -// graph.addEdge(edge++, 0,1); -// graph.addEdge(edge++, 1,2); -// graph.addEdge(edge++, 3,1); -// graph.addEdge(edge++, 4,2); -// -// BetweennessCentrality bc = -// new BetweennessCentrality(graph); -// -// Assert.assertEquals(bc.getVertexScore(0),0,.001); -// Assert.assertEquals(bc.getVertexScore(1),2,.001); -// Assert.assertEquals(bc.getVertexScore(2),0,.001); -// Assert.assertEquals(bc.getVertexScore(3),0,.001); -// Assert.assertEquals(bc.getVertexScore(4),0,.001); -// -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,1)),2,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,2)),3,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(3,1)),2,.001); -// Assert.assertEquals(bc.getEdgeScore(graph.findEdge(4,2)),1,.001); -// } - - public void testWeighted() - { - Graph graph = new DirectedSparseGraph(); - - for(int i=0; i<5; i++) - graph.addVertex(i); +/** */ +public class TestBetweennessCentrality extends TestCase { + // public void testUndirected() { + // UndirectedGraph graph = + // new UndirectedSparseGraph(); + // for(int i=0; i<9; i++) { + // graph.addVertex(i); + // } + // + // int edge = 0; + // graph.addEdge(edge++, 0,1); + // graph.addEdge(edge++, 0,6); + // graph.addEdge(edge++, 1,2); + // graph.addEdge(edge++, 1,3); + // graph.addEdge(edge++, 2,4); + // graph.addEdge(edge++, 3,4); + // graph.addEdge(edge++, 4,5); + // graph.addEdge(edge++, 5,8); + // graph.addEdge(edge++, 7,8); + // graph.addEdge(edge++, 6,7); + // + // BetweennessCentrality bc = + // new BetweennessCentrality(graph); + // + //// System.out.println("scoring"); + //// for (int i = 0; i < graph.getVertexCount(); i++) + //// System.out.println(String.format("%d: %f", i, bc.getVertexScore(i))); + // + // Assert.assertEquals(bc.getVertexScore(0),6.000,.001); + // Assert.assertEquals(bc.getVertexScore(1),7.833,.001); + // Assert.assertEquals(bc.getVertexScore(2),2.500,.001); + // Assert.assertEquals(bc.getVertexScore(3),2.500,.001); + // Assert.assertEquals(bc.getVertexScore(4),7.833,.001); + // Assert.assertEquals(bc.getVertexScore(5),6.000,.001); + // Assert.assertEquals(bc.getVertexScore(6),4.666,.001); + // Assert.assertEquals(bc.getVertexScore(7),4.000,.001); + // Assert.assertEquals(bc.getVertexScore(8),4.666,.001); + // + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,1)),10.666,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,6)),9.333,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,2)),6.500,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,3)),6.500,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(2,4)),6.500,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(3,4)),6.500,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(4,5)),10.666,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(5,8)),9.333,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(6,7)),8.000,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(7,8)),8.000,.001); + // } + // + // public void testDirected() + // { + // DirectedGraph graph = new DirectedSparseGraph(); + // for(int i=0; i<5; i++) + // graph.addVertex(i); + // + // int edge=0; + // graph.addEdge(edge++, 0,1); + // graph.addEdge(edge++, 1,2); + // graph.addEdge(edge++, 3,1); + // graph.addEdge(edge++, 4,2); + // + // BetweennessCentrality bc = + // new BetweennessCentrality(graph); + // + // Assert.assertEquals(bc.getVertexScore(0),0,.001); + // Assert.assertEquals(bc.getVertexScore(1),2,.001); + // Assert.assertEquals(bc.getVertexScore(2),0,.001); + // Assert.assertEquals(bc.getVertexScore(3),0,.001); + // Assert.assertEquals(bc.getVertexScore(4),0,.001); + // + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(0,1)),2,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(1,2)),3,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(3,1)),2,.001); + // Assert.assertEquals(bc.getEdgeScore(graph.findEdge(4,2)),1,.001); + // } + + public void testWeighted() { + Graph graph = new DirectedSparseGraph(); + + for (int i = 0; i < 5; i++) graph.addVertex(i); + + char edge = 'a'; + graph.addEdge(edge++, 0, 1); + graph.addEdge(edge++, 0, 2); + graph.addEdge(edge++, 2, 3); + graph.addEdge(edge++, 3, 1); + graph.addEdge(edge++, 1, 4); + + final int weights[] = {1, 1, 1, 1, 1}; - char edge='a'; - graph.addEdge(edge++, 0,1); - graph.addEdge(edge++, 0,2); - graph.addEdge(edge++, 2,3); - graph.addEdge(edge++, 3,1); - graph.addEdge(edge++, 1,4); + Function edge_weights = + new Function() { + public Integer apply(Character arg0) { + return weights[arg0 - 'a']; + } + }; - final int weights[] = {1, 1, 1, 1, 1}; - - Function edge_weights = new Function() - { - public Integer apply(Character arg0) { return weights[arg0 - 'a']; } - }; - - BetweennessCentrality bc = - new BetweennessCentrality(graph, edge_weights); + BetweennessCentrality bc = + new BetweennessCentrality(graph, edge_weights); -// System.out.println("scoring"); -// System.out.println("(weighted)"); -// System.out.println("vertices:"); -// for (int i = 0; i < graph.getVertexCount(); i++) -// System.out.println(String.format("%d: %f", i, bc.getVertexScore(i))); -// System.out.println("edges:"); -// for (int i = 0; i < graph.getEdgeCount(); i++) -// { -// char e = (char)(i + 'a'); -// System.out.println(String.format("%c: (weight: %d), %f", e, -// edge_weights.apply(e), bc.getEdgeScore(e))); -// } - } + // System.out.println("scoring"); + // System.out.println("(weighted)"); + // System.out.println("vertices:"); + // for (int i = 0; i < graph.getVertexCount(); i++) + // System.out.println(String.format("%d: %f", i, bc.getVertexScore(i))); + // System.out.println("edges:"); + // for (int i = 0; i < graph.getEdgeCount(); i++) + // { + // char e = (char)(i + 'a'); + // System.out.println(String.format("%c: (weight: %d), %f", e, + // edge_weights.apply(e), bc.getEdgeScore(e))); + // } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITS.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITS.java index 76cccd2f..c0cc4ddc 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITS.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITS.java @@ -1,21 +1,20 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.scoring; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; - /** * @author Scott White @@ -23,97 +22,94 @@ */ public class TestHITS extends TestCase { - DirectedGraph graph; - - public static Test suite() { - return new TestSuite(TestHITS.class); - } + DirectedGraph graph; - @Override - protected void setUp() { - graph = new DirectedSparseMultigraph(); - for(int i=0; i<5; i++) { - graph.addVertex(i); - } + public static Test suite() { + return new TestSuite(TestHITS.class); + } - int j=0; - graph.addEdge(j++, 0, 1); - graph.addEdge(j++, 1, 2); - graph.addEdge(j++, 2, 3); - graph.addEdge(j++, 3, 0); - graph.addEdge(j++, 2, 1); + @Override + protected void setUp() { + graph = new DirectedSparseMultigraph(); + for (int i = 0; i < 5; i++) { + graph.addVertex(i); } - public void testRanker() { + int j = 0; + graph.addEdge(j++, 0, 1); + graph.addEdge(j++, 1, 2); + graph.addEdge(j++, 2, 3); + graph.addEdge(j++, 3, 0); + graph.addEdge(j++, 2, 1); + } - HITS ranker = new HITS(graph); - for (int i = 0; i < 10; i++) - { - ranker.step(); -// // check hub scores in terms of previous authority scores -// Assert.assertEquals(t.transform(0).hub, -// 0.5*ranker.getAuthScore(1) + 0.2*ranker.getAuthScore(4)); -// Assert.assertEquals(t.transform(1).hub, -// ranker.getAuthScore(2) + 0.2*ranker.getAuthScore(4)); -// Assert.assertEquals(t.transform(2).hub, -// 0.5*ranker.getAuthScore(1) + ranker.getAuthScore(3) + 0.2*ranker.getAuthScore(4)); -// Assert.assertEquals(t.transform(3).hub, -// ranker.getAuthScore(0) + 0.2*ranker.getAuthScore(4)); -// Assert.assertEquals(t.transform(4).hub, -// 0.2*ranker.getAuthScore(4)); -// -// // check authority scores in terms of previous hub scores -// Assert.assertEquals(t.transform(0).authority, -// ranker.getVertexScore(3) + 0.2*ranker.getVertexScore(4)); -// Assert.assertEquals(t.transform(1).authority, -// ranker.getVertexScore(0) + 0.5 * ranker.getVertexScore(2) + 0.2*ranker.getVertexScore(4)); -// Assert.assertEquals(t.transform(2).authority, -// ranker.getVertexScore(1) + 0.2*ranker.getVertexScore(4)); -// Assert.assertEquals(t.transform(3).authority, -// 0.5*ranker.getVertexScore(2) + 0.2*ranker.getVertexScore(4)); -// Assert.assertEquals(t.transform(4).authority, -// 0.2*ranker.getVertexScore(4)); -// - // verify that sums of each scores are 1.0 - double auth_sum = 0; - double hub_sum = 0; - for (int j = 0; j < 5; j++) - { -// auth_sum += ranker.getAuthScore(j); -// hub_sum += ranker.getVertexScore(j); -// auth_sum += (ranker.getAuthScore(j) * ranker.getAuthScore(j)); -// hub_sum += (ranker.getVertexScore(j) * ranker.getVertexScore(j)); - HITS.Scores score = ranker.getVertexScore(j); - auth_sum += score.authority * score.authority; - hub_sum += score.hub * score.hub; - } - Assert.assertEquals(auth_sum, 1.0, .0001); - Assert.assertEquals(hub_sum, 1.0, 0.0001); - } - - ranker.evaluate(); + public void testRanker() { - Assert.assertEquals(ranker.getVertexScore(0).authority, 0, .0001); - Assert.assertEquals(ranker.getVertexScore(1).authority, 0.8507, .001); - Assert.assertEquals(ranker.getVertexScore(2).authority, 0.0, .0001); - Assert.assertEquals(ranker.getVertexScore(3).authority, 0.5257, .001); + HITS ranker = new HITS(graph); + for (int i = 0; i < 10; i++) { + ranker.step(); + // // check hub scores in terms of previous authority scores + // Assert.assertEquals(t.transform(0).hub, + // 0.5*ranker.getAuthScore(1) + 0.2*ranker.getAuthScore(4)); + // Assert.assertEquals(t.transform(1).hub, + // ranker.getAuthScore(2) + 0.2*ranker.getAuthScore(4)); + // Assert.assertEquals(t.transform(2).hub, + // 0.5*ranker.getAuthScore(1) + ranker.getAuthScore(3) + 0.2*ranker.getAuthScore(4)); + // Assert.assertEquals(t.transform(3).hub, + // ranker.getAuthScore(0) + 0.2*ranker.getAuthScore(4)); + // Assert.assertEquals(t.transform(4).hub, + // 0.2*ranker.getAuthScore(4)); + // + // // check authority scores in terms of previous hub scores + // Assert.assertEquals(t.transform(0).authority, + // ranker.getVertexScore(3) + 0.2*ranker.getVertexScore(4)); + // Assert.assertEquals(t.transform(1).authority, + // ranker.getVertexScore(0) + 0.5 * ranker.getVertexScore(2) + 0.2*ranker.getVertexScore(4)); + // Assert.assertEquals(t.transform(2).authority, + // ranker.getVertexScore(1) + 0.2*ranker.getVertexScore(4)); + // Assert.assertEquals(t.transform(3).authority, + // 0.5*ranker.getVertexScore(2) + 0.2*ranker.getVertexScore(4)); + // Assert.assertEquals(t.transform(4).authority, + // 0.2*ranker.getVertexScore(4)); + // + // verify that sums of each scores are 1.0 + double auth_sum = 0; + double hub_sum = 0; + for (int j = 0; j < 5; j++) { + // auth_sum += ranker.getAuthScore(j); + // hub_sum += ranker.getVertexScore(j); + // auth_sum += (ranker.getAuthScore(j) * ranker.getAuthScore(j)); + // hub_sum += (ranker.getVertexScore(j) * ranker.getVertexScore(j)); + HITS.Scores score = ranker.getVertexScore(j); + auth_sum += score.authority * score.authority; + hub_sum += score.hub * score.hub; + } + Assert.assertEquals(auth_sum, 1.0, .0001); + Assert.assertEquals(hub_sum, 1.0, 0.0001); + } - Assert.assertEquals(ranker.getVertexScore(0).hub, 0.5257, .001); - Assert.assertEquals(ranker.getVertexScore(1).hub, 0.0, .0001); - Assert.assertEquals(ranker.getVertexScore(2).hub, 0.8507, .0001); - Assert.assertEquals(ranker.getVertexScore(3).hub, 0.0, .0001); + ranker.evaluate(); - // the values below assume scores sum to 1 - // (rather than that sum of squares of scores sum to 1) -// Assert.assertEquals(ranker.getVertexScore(0).authority, 0, .0001); -// Assert.assertEquals(ranker.getVertexScore(1).authority, 0.618, .001); -// Assert.assertEquals(ranker.getVertexScore(2).authority, 0.0, .0001); -// Assert.assertEquals(ranker.getVertexScore(3).authority, 0.3819, .001); -// -// Assert.assertEquals(ranker.getVertexScore(0).hub, 0.38196, .001); -// Assert.assertEquals(ranker.getVertexScore(1).hub, 0.0, .0001); -// Assert.assertEquals(ranker.getVertexScore(2).hub, 0.618, .0001); -// Assert.assertEquals(ranker.getVertexScore(3).hub, 0.0, .0001); - } + Assert.assertEquals(ranker.getVertexScore(0).authority, 0, .0001); + Assert.assertEquals(ranker.getVertexScore(1).authority, 0.8507, .001); + Assert.assertEquals(ranker.getVertexScore(2).authority, 0.0, .0001); + Assert.assertEquals(ranker.getVertexScore(3).authority, 0.5257, .001); + + Assert.assertEquals(ranker.getVertexScore(0).hub, 0.5257, .001); + Assert.assertEquals(ranker.getVertexScore(1).hub, 0.0, .0001); + Assert.assertEquals(ranker.getVertexScore(2).hub, 0.8507, .0001); + Assert.assertEquals(ranker.getVertexScore(3).hub, 0.0, .0001); + // the values below assume scores sum to 1 + // (rather than that sum of squares of scores sum to 1) + // Assert.assertEquals(ranker.getVertexScore(0).authority, 0, .0001); + // Assert.assertEquals(ranker.getVertexScore(1).authority, 0.618, .001); + // Assert.assertEquals(ranker.getVertexScore(2).authority, 0.0, .0001); + // Assert.assertEquals(ranker.getVertexScore(3).authority, 0.3819, .001); + // + // Assert.assertEquals(ranker.getVertexScore(0).hub, 0.38196, .001); + // Assert.assertEquals(ranker.getVertexScore(1).hub, 0.0, .0001); + // Assert.assertEquals(ranker.getVertexScore(2).hub, 0.618, .0001); + // Assert.assertEquals(ranker.getVertexScore(3).hub, 0.0, .0001); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITSWithPriors.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITSWithPriors.java index af640434..dc356082 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITSWithPriors.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestHITSWithPriors.java @@ -1,77 +1,72 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.scoring; +import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import java.util.HashSet; import java.util.Set; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; - -/** - * Tests HITSWithPriors. - */ +/** Tests HITSWithPriors. */ public class TestHITSWithPriors extends TestCase { - DirectedGraph graph; - Set roots; - - public static Test suite() { - return new TestSuite(TestHITSWithPriors.class); - } + DirectedGraph graph; + Set roots; - @Override - protected void setUp() { - graph = new DirectedSparseMultigraph(); - for(int i=0; i<4; i++) { - graph.addVertex(i); - } - int j=0; - graph.addEdge(j++, 0, 1); - graph.addEdge(j++, 1, 2); - graph.addEdge(j++, 2, 3); - graph.addEdge(j++, 3, 0); - graph.addEdge(j++, 2, 1); + public static Test suite() { + return new TestSuite(TestHITSWithPriors.class); + } - roots = new HashSet(); - roots.add(2); + @Override + protected void setUp() { + graph = new DirectedSparseMultigraph(); + for (int i = 0; i < 4; i++) { + graph.addVertex(i); } + int j = 0; + graph.addEdge(j++, 0, 1); + graph.addEdge(j++, 1, 2); + graph.addEdge(j++, 2, 3); + graph.addEdge(j++, 3, 0); + graph.addEdge(j++, 2, 1); + + roots = new HashSet(); + roots.add(2); + } + + public void testRankings() { - public void testRankings() { + HITSWithPriors ranker = + new HITSWithPriors(graph, ScoringUtils.getHITSUniformRootPrior(roots), 0.3); + ranker.evaluate(); - HITSWithPriors ranker = - new HITSWithPriors(graph, ScoringUtils.getHITSUniformRootPrior(roots), 0.3); - ranker.evaluate(); - - double[] expected_auth = {0.0, 0.765, 0.365, 0.530}; - double[] expected_hub = {0.398, 0.190, 0.897, 0.0}; + double[] expected_auth = {0.0, 0.765, 0.365, 0.530}; + double[] expected_hub = {0.398, 0.190, 0.897, 0.0}; - double hub_sum = 0; - double auth_sum = 0; - for (Number n : graph.getVertices()) - { - int i = n.intValue(); - double auth = ranker.getVertexScore(i).authority; - double hub = ranker.getVertexScore(i).hub; - Assert.assertEquals(auth, expected_auth[i], 0.001); - Assert.assertEquals(hub, expected_hub[i], 0.001); - hub_sum += hub * hub; - auth_sum += auth * auth; - } - Assert.assertEquals(1.0, hub_sum, 0.001); - Assert.assertEquals(1.0, auth_sum, 0.001); + double hub_sum = 0; + double auth_sum = 0; + for (Number n : graph.getVertices()) { + int i = n.intValue(); + double auth = ranker.getVertexScore(i).authority; + double hub = ranker.getVertexScore(i).hub; + Assert.assertEquals(auth, expected_auth[i], 0.001); + Assert.assertEquals(hub, expected_hub[i], 0.001); + hub_sum += hub * hub; + auth_sum += auth * auth; } + Assert.assertEquals(1.0, hub_sum, 0.001); + Assert.assertEquals(1.0, auth_sum, 0.001); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestKStepMarkov.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestKStepMarkov.java index 3d5414d3..138b835e 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestKStepMarkov.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestKStepMarkov.java @@ -1,70 +1,58 @@ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Functions; +import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; - import junit.framework.TestCase; -import com.google.common.base.Functions; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; - -public class TestKStepMarkov extends TestCase -{ - DirectedGraph mGraph; - double[][] mTransitionMatrix; - Map edgeWeights = new HashMap(); - - @Override - protected void setUp() - { - mGraph = new DirectedSparseMultigraph(); - mTransitionMatrix = new double[][] - {{0.0, 0.5, 0.5}, - {1.0/3.0, 0.0, 2.0/3.0}, - {1.0/3.0, 2.0/3.0, 0.0}}; - - for (int i = 0; i < mTransitionMatrix.length; i++) - mGraph.addVertex(i); - - for (int i = 0; i < mTransitionMatrix.length; i++) { - for (int j = 0; j < mTransitionMatrix[i].length; j++) - { - if (mTransitionMatrix[i][j] > 0) - { - int edge = i*mTransitionMatrix.length+j; - mGraph.addEdge(edge, i, j); - edgeWeights.put(edge, mTransitionMatrix[i][j]); - } - } +public class TestKStepMarkov extends TestCase { + DirectedGraph mGraph; + double[][] mTransitionMatrix; + Map edgeWeights = new HashMap(); + + @Override + protected void setUp() { + mGraph = new DirectedSparseMultigraph(); + mTransitionMatrix = + new double[][] {{0.0, 0.5, 0.5}, {1.0 / 3.0, 0.0, 2.0 / 3.0}, {1.0 / 3.0, 2.0 / 3.0, 0.0}}; + + for (int i = 0; i < mTransitionMatrix.length; i++) mGraph.addVertex(i); + + for (int i = 0; i < mTransitionMatrix.length; i++) { + for (int j = 0; j < mTransitionMatrix[i].length; j++) { + if (mTransitionMatrix[i][j] > 0) { + int edge = i * mTransitionMatrix.length + j; + mGraph.addEdge(edge, i, j); + edgeWeights.put(edge, mTransitionMatrix[i][j]); } + } } - - public void testRanker() { - - Set priors = new HashSet(); - priors.add(1); - priors.add(2); - KStepMarkov ranker = - new KStepMarkov(mGraph, Functions.forMap(edgeWeights), - ScoringUtils.getUniformRootPrior(priors),2); -// ranker.evaluate(); -// System.out.println(ranker.getIterations()); - - for (int i = 0; i < 10; i++) - { -// System.out.println(ranker.getIterations()); -// for (Number n : mGraph.getVertices()) -// System.out.println(n + ": " + ranker.getVertexScore(n)); - ranker.step(); - } -// List> rankings = ranker.getRankings(); -// System.out.println("New version:"); -// System.out.println(rankings); + } + + public void testRanker() { + + Set priors = new HashSet(); + priors.add(1); + priors.add(2); + KStepMarkov ranker = + new KStepMarkov( + mGraph, Functions.forMap(edgeWeights), ScoringUtils.getUniformRootPrior(priors), 2); + // ranker.evaluate(); + // System.out.println(ranker.getIterations()); + + for (int i = 0; i < 10; i++) { + // System.out.println(ranker.getIterations()); + // for (Number n : mGraph.getVertices()) + // System.out.println(n + ": " + ranker.getVertexScore(n)); + ranker.step(); } - + // List> rankings = ranker.getRankings(); + // System.out.println("New version:"); + // System.out.println(rankings); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRank.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRank.java index 4a9d3629..3c395e5e 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRank.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRank.java @@ -1,80 +1,79 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Functions; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; +import edu.uci.ics.jung.graph.Graph; import java.util.HashMap; import java.util.Map; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Functions; -import com.google.common.base.Supplier; +/** @author Joshua O'Madadhain */ +public class TestPageRank extends TestCase { -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.Graph; + private Map edgeWeights; + private DirectedGraph graph; + private Supplier edgeFactory; -/** - * @author Joshua O'Madadhain - */ -public class TestPageRank extends TestCase { - - private Map edgeWeights; - private DirectedGraph graph; - private Supplier edgeFactory; - - public static Test suite() { - return new TestSuite(TestPageRank.class); - } + public static Test suite() { + return new TestSuite(TestPageRank.class); + } - @Override - protected void setUp() { - edgeWeights = new HashMap(); - edgeFactory = new Supplier() { - int i=0; - public Integer get() { - return i++; - }}; - } + @Override + protected void setUp() { + edgeWeights = new HashMap(); + edgeFactory = + new Supplier() { + int i = 0; + + public Integer get() { + return i++; + } + }; + } - private void addEdge(Graph G, Integer v1, Integer v2, double weight) { - Integer edge = edgeFactory.get(); - graph.addEdge(edge, v1, v2); - edgeWeights.put(edge, weight); + private void addEdge(Graph G, Integer v1, Integer v2, double weight) { + Integer edge = edgeFactory.get(); + graph.addEdge(edge, v1, v2); + edgeWeights.put(edge, weight); + } + + public void testRanker() { + graph = new DirectedSparseMultigraph(); + for (int i = 0; i < 4; i++) { + graph.addVertex(i); } + addEdge(graph, 0, 1, 1.0); + addEdge(graph, 1, 2, 1.0); + addEdge(graph, 2, 3, 0.5); + addEdge(graph, 3, 1, 1.0); + addEdge(graph, 2, 1, 0.5); - public void testRanker() { - graph = new DirectedSparseMultigraph(); - for(int i=0; i<4; i++) { - graph.addVertex(i); - } - addEdge(graph,0,1,1.0); - addEdge(graph,1,2,1.0); - addEdge(graph,2,3,0.5); - addEdge(graph,3,1,1.0); - addEdge(graph,2,1,0.5); + PageRankWithPriors pr = + new PageRank(graph, Functions.forMap(edgeWeights), 0); + pr.evaluate(); - PageRankWithPriors pr = new PageRank(graph, Functions.forMap(edgeWeights), 0); - pr.evaluate(); - - Assert.assertEquals(pr.getVertexScore(0), 0.0, pr.getTolerance()); - Assert.assertEquals(pr.getVertexScore(1), 0.4, pr.getTolerance()); - Assert.assertEquals(pr.getVertexScore(2), 0.4, pr.getTolerance()); - Assert.assertEquals(pr.getVertexScore(3), 0.2, pr.getTolerance()); + Assert.assertEquals(pr.getVertexScore(0), 0.0, pr.getTolerance()); + Assert.assertEquals(pr.getVertexScore(1), 0.4, pr.getTolerance()); + Assert.assertEquals(pr.getVertexScore(2), 0.4, pr.getTolerance()); + Assert.assertEquals(pr.getVertexScore(3), 0.2, pr.getTolerance()); -// Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(0)).rankScore,0.4,.001)); -// Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(1)).rankScore,0.4,.001)); -// Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(2)).rankScore,0.2,.001)); -// Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(3)).rankScore,0,.001)); - } + // Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(0)).rankScore,0.4,.001)); + // Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(1)).rankScore,0.4,.001)); + // Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(2)).rankScore,0.2,.001)); + // Assert.assertTrue(NumericalPrecision.equal(((Ranking)ranker.getRankings().get(3)).rankScore,0,.001)); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRankWithPriors.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRankWithPriors.java index cb6756ce..0357e5c6 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRankWithPriors.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestPageRankWithPriors.java @@ -1,91 +1,86 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Supplier; +import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; +import edu.uci.ics.jung.graph.DirectedGraph; +import edu.uci.ics.jung.graph.DirectedSparseMultigraph; +import edu.uci.ics.jung.graph.Graph; import java.util.HashSet; import java.util.Set; - import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import com.google.common.base.Supplier; - -import edu.uci.ics.jung.algorithms.scoring.util.ScoringUtils; -import edu.uci.ics.jung.graph.DirectedGraph; -import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.Graph; - - -/** - * @author Scott White - */ +/** @author Scott White */ public class TestPageRankWithPriors extends TestCase { -// private Map edgeWeights; - private DirectedGraph graph; - private Supplier edgeFactory; + // private Map edgeWeights; + private DirectedGraph graph; + private Supplier edgeFactory; - public static Test suite() { - return new TestSuite(TestPageRankWithPriors.class); - } + public static Test suite() { + return new TestSuite(TestPageRankWithPriors.class); + } - @Override - protected void setUp() { -// edgeWeights = new HashMap(); - edgeFactory = new Supplier() { - int i=0; - public Integer get() { - return i++; - }}; - } - - private void addEdge(Graph G, Integer v1, Integer v2) - { - Integer edge = edgeFactory.get(); - graph.addEdge(edge, v1, v2); -// edgeWeights.put(edge, weight); - } + @Override + protected void setUp() { + // edgeWeights = new HashMap(); + edgeFactory = + new Supplier() { + int i = 0; - public void testGraphScoring() { - graph = new DirectedSparseMultigraph(); + public Integer get() { + return i++; + } + }; + } - double[] expected_score = new double[]{0.1157, 0.2463, 0.4724, 0.1653}; - - for(int i=0; i<4; i++) { - graph.addVertex(i); - } - addEdge(graph,0,1); - addEdge(graph,1,2); - addEdge(graph,2,3); - addEdge(graph,3,0); - addEdge(graph,2,1); + private void addEdge(Graph G, Integer v1, Integer v2) { + Integer edge = edgeFactory.get(); + graph.addEdge(edge, v1, v2); + // edgeWeights.put(edge, weight); + } - Set priors = new HashSet(); - priors.add(2); + public void testGraphScoring() { + graph = new DirectedSparseMultigraph(); - PageRankWithPriors pr = - new PageRankWithPriors(graph, ScoringUtils.getUniformRootPrior(priors), 0.3); - pr.evaluate(); + double[] expected_score = new double[] {0.1157, 0.2463, 0.4724, 0.1653}; - double score_sum = 0; - for (int i = 0; i < graph.getVertexCount(); i++) - { - double score = pr.getVertexScore(i); - Assert.assertEquals(expected_score[i], score, pr.getTolerance()); - score_sum += score; - } - Assert.assertEquals(1.0, score_sum, pr.getTolerance() * graph.getVertexCount()); + for (int i = 0; i < 4; i++) { + graph.addVertex(i); } - - public void testHypergraphScoring() { + addEdge(graph, 0, 1); + addEdge(graph, 1, 2); + addEdge(graph, 2, 3); + addEdge(graph, 3, 0); + addEdge(graph, 2, 1); + + Set priors = new HashSet(); + priors.add(2); + + PageRankWithPriors pr = + new PageRankWithPriors( + graph, ScoringUtils.getUniformRootPrior(priors), 0.3); + pr.evaluate(); + + double score_sum = 0; + for (int i = 0; i < graph.getVertexCount(); i++) { + double score = pr.getVertexScore(i); + Assert.assertEquals(expected_score[i], score, pr.getTolerance()); + score_sum += score; } + Assert.assertEquals(1.0, score_sum, pr.getTolerance() * graph.getVertexCount()); + } + + public void testHypergraphScoring() {} } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestVoltageScore.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestVoltageScore.java index 907bbb6f..325c4469 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestVoltageScore.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/scoring/TestVoltageScore.java @@ -1,79 +1,69 @@ /** - * Copyright (c) 2008, The JUNG Authors + * Copyright (c) 2008, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jul 14, 2008 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jul 14, 2008 */ package edu.uci.ics.jung.algorithms.scoring; +import com.google.common.base.Functions; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; - import junit.framework.TestCase; -import com.google.common.base.Functions; +/** @author jrtom */ +public class TestVoltageScore extends TestCase { + protected Graph g; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; + @Override + public void setUp() { + g = new UndirectedSparseMultigraph(); + for (int i = 0; i < 7; i++) { + g.addVertex(i); + } -/** - * @author jrtom - * - */ -public class TestVoltageScore extends TestCase -{ - protected Graph g; - - @Override - public void setUp() { - g = new UndirectedSparseMultigraph(); - for (int i = 0; i < 7; i++) { - g.addVertex(i); - } + int j = 0; + g.addEdge(j++, 0, 1); + g.addEdge(j++, 0, 2); + g.addEdge(j++, 1, 3); + g.addEdge(j++, 2, 3); + g.addEdge(j++, 3, 4); + g.addEdge(j++, 3, 5); + g.addEdge(j++, 4, 6); + g.addEdge(j++, 5, 6); + } - int j = 0; - g.addEdge(j++,0,1); - g.addEdge(j++,0,2); - g.addEdge(j++,1,3); - g.addEdge(j++,2,3); - g.addEdge(j++,3,4); - g.addEdge(j++,3,5); - g.addEdge(j++,4,6); - g.addEdge(j++,5,6); - } - - public final void testCalculateVoltagesSourceTarget() { - VoltageScorer vr = new VoltageScorer(g, Functions.constant(1), 0, 6); - double[] voltages = {1.0, 0.75, 0.75, 0.5, 0.25, 0.25, 0}; - - vr.evaluate(); - for (int i = 0; i < 7; i++) { - assertEquals(vr.getVertexScore(i), voltages[i], 0.01); - } + public final void testCalculateVoltagesSourceTarget() { + VoltageScorer vr = + new VoltageScorer(g, Functions.constant(1), 0, 6); + double[] voltages = {1.0, 0.75, 0.75, 0.5, 0.25, 0.25, 0}; + + vr.evaluate(); + for (int i = 0; i < 7; i++) { + assertEquals(vr.getVertexScore(i), voltages[i], 0.01); } - - public final void testCalculateVoltagesSourcesTargets() - { - Map sources = new HashMap(); - sources.put(0, new Double(1.0)); - sources.put(1, new Double(0.5)); - Set sinks = new HashSet(); - sinks.add(6); - sinks.add(5); - VoltageScorer vr = - new VoltageScorer(g, Functions.constant(1), sources, sinks); - double[] voltages = {1.0, 0.5, 0.66, 0.33, 0.16, 0, 0}; - - vr.evaluate(); - for (int i = 0; i < 7; i++) { - assertEquals(vr.getVertexScore(i), voltages[i], 0.01); - } + } + + public final void testCalculateVoltagesSourcesTargets() { + Map sources = new HashMap(); + sources.put(0, new Double(1.0)); + sources.put(1, new Double(0.5)); + Set sinks = new HashSet(); + sinks.add(6); + sinks.add(5); + VoltageScorer vr = + new VoltageScorer(g, Functions.constant(1), sources, sinks); + double[] voltages = {1.0, 0.5, 0.66, 0.33, 0.16, 0, 0}; + + vr.evaluate(); + for (int i = 0; i < 7; i++) { + assertEquals(vr.getVertexScore(i), voltages[i], 0.01); } + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestBFSDistanceLabeler.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestBFSDistanceLabeler.java index 93375a83..dc846627 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestBFSDistanceLabeler.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestBFSDistanceLabeler.java @@ -1,67 +1,61 @@ /* -* Copyright (c) 2003, The JUNG Authors -* -* All rights reserved. -* -* This software is open-source under the BSD license; see either -* "license.txt" or -* https://github.com/jrtom/jung/blob/master/LICENSE for a description. -*/ + * Copyright (c) 2003, The JUNG Authors + * + * All rights reserved. + * + * This software is open-source under the BSD license; see either + * "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. + */ package edu.uci.ics.jung.algorithms.shortestpath; +import edu.uci.ics.jung.graph.Graph; +import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; import junit.framework.Assert; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; -import edu.uci.ics.jung.algorithms.shortestpath.BFSDistanceLabeler; -import edu.uci.ics.jung.graph.Graph; -import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; -/** - * @author Scott White, adapted to jung2 by Tom Nelson - */ +/** @author Scott White, adapted to jung2 by Tom Nelson */ public class TestBFSDistanceLabeler extends TestCase { - public static Test suite() { - return new TestSuite(TestBFSDistanceLabeler.class); - } - - @Override - protected void setUp() { - - } - - public void test() { - Graph graph = new UndirectedSparseMultigraph(); - for(int i=0; i<6; i++) { - graph.addVertex(i); - } - int j = 0; - graph.addEdge(j++,0,1); - graph.addEdge(j++,0,5); - graph.addEdge(j++,0,3); - graph.addEdge(j++,0,4); - graph.addEdge(j++,1,5); - graph.addEdge(j++,3,4); - graph.addEdge(j++,3,2); - graph.addEdge(j++,5,2); - Number root = 0; - - BFSDistanceLabeler labeler = new BFSDistanceLabeler(); - labeler.labelDistances(graph,root); - - Assert.assertEquals(labeler.getPredecessors(root).size(),0); - Assert.assertEquals(labeler.getPredecessors(1).size(),1); - Assert.assertEquals(labeler.getPredecessors(2).size(),2); - Assert.assertEquals(labeler.getPredecessors(3).size(),1); - Assert.assertEquals(labeler.getPredecessors(4).size(),1); - Assert.assertEquals(labeler.getPredecessors(5).size(),1); - - Assert.assertEquals(labeler.getDistance(graph,0),0); - Assert.assertEquals(labeler.getDistance(graph,1),1); - Assert.assertEquals(labeler.getDistance(graph,2),2); - Assert.assertEquals(labeler.getDistance(graph,3),1); - Assert.assertEquals(labeler.getDistance(graph,4),1); - Assert.assertEquals(labeler.getDistance(graph,5),1); - - } -} \ No newline at end of file + public static Test suite() { + return new TestSuite(TestBFSDistanceLabeler.class); + } + + @Override + protected void setUp() {} + + public void test() { + Graph graph = new UndirectedSparseMultigraph(); + for (int i = 0; i < 6; i++) { + graph.addVertex(i); + } + int j = 0; + graph.addEdge(j++, 0, 1); + graph.addEdge(j++, 0, 5); + graph.addEdge(j++, 0, 3); + graph.addEdge(j++, 0, 4); + graph.addEdge(j++, 1, 5); + graph.addEdge(j++, 3, 4); + graph.addEdge(j++, 3, 2); + graph.addEdge(j++, 5, 2); + Number root = 0; + + BFSDistanceLabeler labeler = new BFSDistanceLabeler(); + labeler.labelDistances(graph, root); + + Assert.assertEquals(labeler.getPredecessors(root).size(), 0); + Assert.assertEquals(labeler.getPredecessors(1).size(), 1); + Assert.assertEquals(labeler.getPredecessors(2).size(), 2); + Assert.assertEquals(labeler.getPredecessors(3).size(), 1); + Assert.assertEquals(labeler.getPredecessors(4).size(), 1); + Assert.assertEquals(labeler.getPredecessors(5).size(), 1); + + Assert.assertEquals(labeler.getDistance(graph, 0), 0); + Assert.assertEquals(labeler.getDistance(graph, 1), 1); + Assert.assertEquals(labeler.getDistance(graph, 2), 2); + Assert.assertEquals(labeler.getDistance(graph, 3), 1); + Assert.assertEquals(labeler.getDistance(graph, 4), 1); + Assert.assertEquals(labeler.getDistance(graph, 5), 1); + } +} diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestPrimMinimumSpanningTree.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestPrimMinimumSpanningTree.java index 184a844e..a03afebb 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestPrimMinimumSpanningTree.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestPrimMinimumSpanningTree.java @@ -1,62 +1,61 @@ package edu.uci.ics.jung.algorithms.shortestpath; -import junit.framework.TestCase; +import edu.uci.ics.jung.graph.DelegateTree; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; -import edu.uci.ics.jung.graph.DelegateTree; import edu.uci.ics.jung.graph.Tree; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; +import junit.framework.TestCase; public class TestPrimMinimumSpanningTree extends TestCase { - - public void testSimpleTree() { - Tree tree = new DelegateTree(); - tree.addVertex("A"); - tree.addEdge(0,"A","B0"); - tree.addEdge(1,"A","B1"); - -// System.err.println("tree = "+tree); - PrimMinimumSpanningTree pmst = - new PrimMinimumSpanningTree(DelegateTree.getFactory()); - -// Graph mst = - pmst.apply(tree); -// System.err.println("mst = "+mst); - -// assertEquals(tree.getVertices(), mst.getVertices()); -// assertEquals(tree.getEdges(), mst.getEdges()); - - } - - public void testDAG() { - DirectedGraph graph = new DirectedSparseMultigraph(); - graph.addVertex("B0"); - graph.addEdge(0, "A", "B0"); - graph.addEdge(1, "A", "B1"); -// System.err.println("graph = "+graph); - PrimMinimumSpanningTree pmst = - new PrimMinimumSpanningTree(DelegateTree.getFactory()); - -// Graph mst = - pmst.apply(graph); -// System.err.println("mst = "+mst); - - } - - public void testUAG() { - UndirectedGraph graph = new UndirectedSparseMultigraph(); - graph.addVertex("B0"); - graph.addEdge(0, "A", "B0"); - graph.addEdge(1, "A", "B1"); -// System.err.println("graph = "+graph); - PrimMinimumSpanningTree pmst = - new PrimMinimumSpanningTree(DelegateTree.getFactory()); - -// Graph mst = - pmst.apply(graph); -// System.err.println("mst = "+mst); - - } + public void testSimpleTree() { + Tree tree = new DelegateTree(); + tree.addVertex("A"); + tree.addEdge(0, "A", "B0"); + tree.addEdge(1, "A", "B1"); + + // System.err.println("tree = "+tree); + PrimMinimumSpanningTree pmst = + new PrimMinimumSpanningTree(DelegateTree.getFactory()); + + // Graph mst = + pmst.apply(tree); + // System.err.println("mst = "+mst); + + // assertEquals(tree.getVertices(), mst.getVertices()); + // assertEquals(tree.getEdges(), mst.getEdges()); + + } + + public void testDAG() { + DirectedGraph graph = new DirectedSparseMultigraph(); + graph.addVertex("B0"); + graph.addEdge(0, "A", "B0"); + graph.addEdge(1, "A", "B1"); + // System.err.println("graph = "+graph); + PrimMinimumSpanningTree pmst = + new PrimMinimumSpanningTree(DelegateTree.getFactory()); + + // Graph mst = + pmst.apply(graph); + // System.err.println("mst = "+mst); + + } + + public void testUAG() { + UndirectedGraph graph = new UndirectedSparseMultigraph(); + graph.addVertex("B0"); + graph.addEdge(0, "A", "B0"); + graph.addEdge(1, "A", "B1"); + // System.err.println("graph = "+graph); + PrimMinimumSpanningTree pmst = + new PrimMinimumSpanningTree(DelegateTree.getFactory()); + + // Graph mst = + pmst.apply(graph); + // System.err.println("mst = "+mst); + + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestShortestPath.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestShortestPath.java index 309f2eb6..15a0c01b 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestShortestPath.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestShortestPath.java @@ -4,525 +4,816 @@ */ package edu.uci.ics.jung.algorithms.shortestpath; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.Set; - -import junit.framework.TestCase; - import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Supplier; import com.google.common.collect.BiMap; - import edu.uci.ics.jung.algorithms.util.Indexer; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.Graph; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Set; +import junit.framework.TestCase; +/** @author Joshua O'Madadhain */ +public class TestShortestPath extends TestCase { + private DirectedGraph dg; + private UndirectedGraph ug; + // graph based on Weiss, _Data Structures and Algorithm Analysis_, + // 1992, p. 292 + private static int[][] edges = { + {1, 2, 2}, // 0 + {1, 4, 1}, // 1 + {2, 4, 3}, // 2 + {2, 5, 10}, // 3 + {3, 1, 4}, // 4 + {3, 6, 5}, // 5 + {4, 3, 2}, // 6 + {4, 5, 2}, // 7 + {4, 6, 8}, // 8 + {4, 7, 4}, // 9 + {5, 7, 6}, // 10 + {7, 6, 1}, // 11 + {8, 9, 4}, // (12) these three edges define a second connected component + {9, 10, 1}, // 13 + {10, 8, 2} // 14 + }; -/** - * @author Joshua O'Madadhain - */ -public class TestShortestPath extends TestCase -{ - private DirectedGraph dg; - private UndirectedGraph ug; - // graph based on Weiss, _Data Structures and Algorithm Analysis_, - // 1992, p. 292 - private static int[][] edges = - {{1,2,2}, {1,4,1}, // 0, 1 - {2,4,3}, {2,5,10}, // 2, 3 - {3,1,4}, {3,6,5}, // 4, 5 - {4,3,2}, {4,5,2}, {4,6,8}, {4,7,4}, // 6,7,8,9 - {5,7,6}, // 10 - {7,6,1}, // 11 - {8,9,4}, // (12) these three edges define a second connected component - {9,10,1}, // 13 - {10,8,2}}; // 14 - - private static Integer[][] ug_incomingEdges = + private static Integer[][] ug_incomingEdges = { { - {null, new Integer(0), new Integer(6), new Integer(1), new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {new Integer(0), null, new Integer(6), new Integer(2), new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {new Integer(1), new Integer(2), null, new Integer(6), new Integer(7), new Integer(5), new Integer(9), null, null, null}, - {new Integer(1), new Integer(2), new Integer(6), null, new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {new Integer(1), new Integer(2), new Integer(6), new Integer(7), null, new Integer(11), new Integer(10), null, null, null}, - {new Integer(1), new Integer(2), new Integer(5), new Integer(9), new Integer(10), null, new Integer(11), null, null, null}, - {new Integer(1), new Integer(2), new Integer(5), new Integer(9), new Integer(10), new Integer(11), null, null, null, null}, - {null, null, null, null, null, null, null, null, new Integer(13), new Integer(14)}, - {null, null, null, null, null, null, null, new Integer(14), null, new Integer(13)}, - {null, null, null, null, null, null, null, new Integer(14), new Integer(13), null}, - }; - - private static Integer[][] dg_incomingEdges = - { - {null, new Integer(0), new Integer(6), new Integer(1), new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {new Integer(4), null, new Integer(6), new Integer(2), new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {new Integer(4), new Integer(0), null, new Integer(1), new Integer(7), new Integer(5), new Integer(9), null, null, null}, - {new Integer(4), new Integer(0), new Integer(6), null, new Integer(7), new Integer(11), new Integer(9), null, null, null}, - {null, null, null, null, null, new Integer(11), new Integer(10), null, null, null}, - {null, null, null, null, null, null, null, null, null, null}, - {null, null, null, null, null, new Integer(11), null, null, null, null}, - {null, null, null, null, null, null, null, null, new Integer(12), new Integer(13)}, - {null, null, null, null, null, null, null, new Integer(14), null, new Integer(13)}, - {null, null, null, null, null, null, null, new Integer(14), new Integer(12), null} - }; - - private static double[][] dg_distances = + null, + new Integer(0), + new Integer(6), + new Integer(1), + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, { - {0, 2, 3, 1, 3, 6, 5, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {9, 0, 5, 3, 5, 8, 7, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {4, 6, 0, 5, 7, 5, 9, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {6, 8, 2, 0, 2, 5, 4, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 0, 7, 6, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 0, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 1, 0, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 0, 4, 5}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 3, 0, 1}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 2, 6, 0} - }; - - private static double[][] ug_distances = + new Integer(0), + null, + new Integer(6), + new Integer(2), + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, { - {0, 2, 3, 1, 3, 6, 5, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {2, 0, 5, 3, 5, 8, 7, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {3, 5, 0, 2, 4, 5, 6, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {1, 3, 2, 0, 2, 5, 4, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {3, 5, 4, 2, 0, 7, 6, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {6, 8, 5, 5, 7, 0, 1, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {5, 7, 6, 4, 6, 1, 0, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 0, 3, 2}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 3, 0, 1}, - {Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, 2, 1, 0} - }; - - - private static Integer[][] shortestPaths1 = + new Integer(1), + new Integer(2), + null, + new Integer(6), + new Integer(7), + new Integer(5), + new Integer(9), + null, + null, + null + }, { - null, - {new Integer(0)}, - {new Integer(1), new Integer(6)}, - {new Integer(1)}, - {new Integer(1), new Integer(7)}, - {new Integer(1), new Integer(9), new Integer(11)}, - {new Integer(1), new Integer(9)}, - null, - null, - null - }; - - private Map,Integer[]> edgeArrays; - - private Map edgeWeights; - - private Function nev; - - private Supplier vertexFactoryDG = - new Supplier() { - int count = 0; - public String get() { - return "V"+count++; - }}; - private Supplier vertexFactoryUG = - new Supplier() { - int count = 0; - public String get() { - return "U"+count++; - }}; - - BiMap did; - BiMap uid; - - @Override - protected void setUp() { - edgeWeights = new HashMap(); - nev = Functions.forMap(edgeWeights); - dg = new DirectedSparseMultigraph(); - for(int i=0; icreate(dg.getVertices(), 1); - Integer[] dg_array = new Integer[edges.length]; - addEdges(dg, did, dg_array); - - ug = new UndirectedSparseMultigraph(); - for(int i=0; icreate(ug.getVertices(),1); -// GraphUtils.addVertices(ug, ug_distances.length); -// Indexer.newIndexer(ug, 1); - Integer[] ug_array = new Integer[edges.length]; - addEdges(ug, uid, ug_array); - - edgeArrays = new HashMap,Integer[]>(); - edgeArrays.put(dg, dg_array); - edgeArrays.put(ug, ug_array); - } - - @Override - protected void tearDown() throws Exception { - } + new Integer(1), + new Integer(2), + new Integer(6), + null, + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, + { + new Integer(1), + new Integer(2), + new Integer(6), + new Integer(7), + null, + new Integer(11), + new Integer(10), + null, + null, + null + }, + { + new Integer(1), + new Integer(2), + new Integer(5), + new Integer(9), + new Integer(10), + null, + new Integer(11), + null, + null, + null + }, + { + new Integer(1), + new Integer(2), + new Integer(5), + new Integer(9), + new Integer(10), + new Integer(11), + null, + null, + null, + null + }, + {null, null, null, null, null, null, null, null, new Integer(13), new Integer(14)}, + {null, null, null, null, null, null, null, new Integer(14), null, new Integer(13)}, + {null, null, null, null, null, null, null, new Integer(14), new Integer(13), null}, + }; - public void exceptionTest(Graph g, BiMap indexer, int index) + private static Integer[][] dg_incomingEdges = { { - DijkstraShortestPath dsp = - new DijkstraShortestPath(g, nev); -// Indexer id = Indexer.getIndexer(g); - String start = indexer.inverse().get(index); - Integer e = null; - - String v = "NOT IN GRAPH"; - - try - { - dsp.getDistance(start, v); - fail("getDistance(): illegal destination vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getDistance(v, start); - fail("getDistance(): illegal source vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getDistanceMap(v, 1); - fail("getDistanceMap(): illegal source vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getDistanceMap(start, 0); - fail("getDistanceMap(): too few vertices requested"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getDistanceMap(start, g.getVertexCount()+1); - fail("getDistanceMap(): too many vertices requested"); - } - catch (IllegalArgumentException iae) {} + null, + new Integer(0), + new Integer(6), + new Integer(1), + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, + { + new Integer(4), + null, + new Integer(6), + new Integer(2), + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, + { + new Integer(4), + new Integer(0), + null, + new Integer(1), + new Integer(7), + new Integer(5), + new Integer(9), + null, + null, + null + }, + { + new Integer(4), + new Integer(0), + new Integer(6), + null, + new Integer(7), + new Integer(11), + new Integer(9), + null, + null, + null + }, + {null, null, null, null, null, new Integer(11), new Integer(10), null, null, null}, + {null, null, null, null, null, null, null, null, null, null}, + {null, null, null, null, null, new Integer(11), null, null, null, null}, + {null, null, null, null, null, null, null, null, new Integer(12), new Integer(13)}, + {null, null, null, null, null, null, null, new Integer(14), null, new Integer(13)}, + {null, null, null, null, null, null, null, new Integer(14), new Integer(12), null} + }; - try - { - dsp.getIncomingEdge(start, v); - fail("getIncomingEdge(): illegal destination vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getIncomingEdge(v, start); - fail("getIncomingEdge(): illegal source vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getIncomingEdgeMap(v, 1); - fail("getIncomingEdgeMap(): illegal source vertex"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getIncomingEdgeMap(start, 0); - fail("getIncomingEdgeMap(): too few vertices requested"); - } - catch (IllegalArgumentException iae) {} - try - { - dsp.getDistanceMap(start, g.getVertexCount()+1); - fail("getIncomingEdgeMap(): too many vertices requested"); - } - catch (IllegalArgumentException iae) {} - - try - { - // test negative edge weight exception - String v1 = indexer.inverse().get(1); - String v2 = indexer.inverse().get(7); - e = g.getEdgeCount()+1; - g.addEdge(e, v1, v2); - edgeWeights.put(e, -2); -// e.addUserDatum("weight", new Double(-2), UserData.REMOVE); - dsp.reset(); - dsp.getDistanceMap(start); -// for (Iterator it = g.getEdges().iterator(); it.hasNext(); ) -// { -// Edge edge = (Edge)it.next(); -// double weight = ((Number)edge.getUserDatum("weight")).doubleValue(); -// Pair p = edge.getEndpoints(); -// int i = id.getIndex((Vertex)p.getFirst()); -// int j = id.getIndex((Vertex)p.getSecond()); -// System.out.print("(" + i + "," + j + "): " + weight); -// if (weight < 0) -// System.out.print(" *******"); -// System.out.println(); -// } - fail("DijkstraShortestPath should not accept negative edge weights"); - } - catch (IllegalArgumentException iae) - { - g.removeEdge(e); - } - } - - public void testDijkstra() + private static double[][] dg_distances = { { - setUp(); - exceptionTest(dg, did, 1); - - setUp(); - exceptionTest(ug, uid, 1); - - setUp(); - getPathTest(dg, did, 1); - - setUp(); - getPathTest(ug, uid, 1); - - for (int i = 1; i <= dg_distances.length; i++) - { - setUp(); - weightedTest(dg, did, i, true); - - setUp(); - weightedTest(dg, did, i, false); - } - - for (int i = 1; i <= ug_distances.length; i++) - { - setUp(); - weightedTest(ug, uid, i, true); - - setUp(); - weightedTest(ug, uid, i, false); - } - + 0, + 2, + 3, + 1, + 3, + 6, + 5, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 9, + 0, + 5, + 3, + 5, + 8, + 7, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 4, + 6, + 0, + 5, + 7, + 5, + 9, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 6, + 8, + 2, + 0, + 2, + 5, + 4, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 0, + 7, + 6, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 0, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 1, + 0, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 0, + 4, + 5 + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 3, + 0, + 1 + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 2, + 6, + 0 } + }; - private void getPathTest(Graph g, BiMap indexer, int index) + private static double[][] ug_distances = { { - DijkstraShortestPath dsp = - new DijkstraShortestPath(g, nev); -// Indexer id = Indexer.getIndexer(g); - String start = indexer.inverse().get(index); - Integer[] edge_array = edgeArrays.get(g); - Integer[] incomingEdges1 = null; - if (g instanceof DirectedGraph) - incomingEdges1 = dg_incomingEdges[index-1]; - if (g instanceof UndirectedGraph) - incomingEdges1 = ug_incomingEdges[index-1]; - assertEquals(incomingEdges1.length, g.getVertexCount()); - - // test getShortestPath(start, v) - dsp.reset(); - for (int i = 1; i <= incomingEdges1.length; i++) - { - List shortestPath = dsp.getPath(start, indexer.inverse().get(i)); - Integer[] indices = shortestPaths1[i-1]; - for (ListIterator iter = shortestPath.listIterator(); iter.hasNext(); ) - { - int j = iter.nextIndex(); - Integer e = iter.next(); - if (e != null) - assertEquals(edge_array[indices[j].intValue()], e); - else - assertNull(indices[j]); - } - } + 0, + 2, + 3, + 1, + 3, + 6, + 5, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 2, + 0, + 5, + 3, + 5, + 8, + 7, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 3, + 5, + 0, + 2, + 4, + 5, + 6, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 1, + 3, + 2, + 0, + 2, + 5, + 4, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 3, + 5, + 4, + 2, + 0, + 7, + 6, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 6, + 8, + 5, + 5, + 7, + 0, + 1, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + 5, + 7, + 6, + 4, + 6, + 1, + 0, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 0, + 3, + 2 + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 3, + 0, + 1 + }, + { + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + Double.POSITIVE_INFINITY, + 2, + 1, + 0 } - - private void weightedTest(Graph g, BiMap indexer, int index, boolean cached) { -// Indexer id = Indexer.getIndexer(g); - String start = indexer.inverse().get(index); - double[] distances1 = null; - Integer[] incomingEdges1 = null; - if (g instanceof DirectedGraph) - { - distances1 = dg_distances[index-1]; - incomingEdges1 = dg_incomingEdges[index-1]; - } - if (g instanceof UndirectedGraph) - { - distances1 = ug_distances[index-1]; - incomingEdges1 = ug_incomingEdges[index-1]; - } - assertEquals(distances1.length, g.getVertexCount()); - assertEquals(incomingEdges1.length, g.getVertexCount()); - DijkstraShortestPath dsp = - new DijkstraShortestPath(g, nev, cached); - Integer[] edge_array = edgeArrays.get(g); - - // test getDistance(start, v) - for (int i = 1; i <= distances1.length; i++) { - String v = indexer.inverse().get(i); - Number n = dsp.getDistance(start, v); - double d = distances1[i-1]; - double dist; - if (n == null) - dist = Double.POSITIVE_INFINITY; - else - dist = n.doubleValue(); - - assertEquals(d, dist, .001); - } + }; - // test getIncomingEdge(start, v) - dsp.reset(); - for (int i = 1; i <= incomingEdges1.length; i++) - { - String v = indexer.inverse().get(i); - Integer e = dsp.getIncomingEdge(start, v); - if (e != null) - assertEquals(edge_array[incomingEdges1[i-1].intValue()], e); - else - assertNull(incomingEdges1[i-1]); - } - - // test getDistanceMap(v) - dsp.reset(); - Map distances = dsp.getDistanceMap(start); - assertTrue(distances.size() <= g.getVertexCount()); - double d_prev = 0; // smallest possible distance - Set reachable = new HashSet(); - for (Iterator d_iter = distances.keySet().iterator(); d_iter.hasNext(); ) - { - String cur = d_iter.next(); - double d_cur = ((Double)distances.get(cur)).doubleValue(); - assertTrue(d_cur >= d_prev); - - d_prev = d_cur; - int i = indexer.get(cur); - assertEquals(distances1[i-1], d_cur, .001); - reachable.add(cur); - } - // make sure that non-reachable vertices have no entries - for (Iterator v_iter = g.getVertices().iterator(); v_iter.hasNext(); ) - { - String v = v_iter.next(); - assertEquals(reachable.contains(v), distances.keySet().contains(v)); - } - - // test getIncomingEdgeMap(v) - dsp.reset(); - Map incomingEdgeMap = dsp.getIncomingEdgeMap(start); - assertTrue(incomingEdgeMap.size() <= g.getVertexCount()); - for (Iterator e_iter = incomingEdgeMap.keySet().iterator(); e_iter.hasNext(); ) - { - String v = e_iter.next(); - Integer e = incomingEdgeMap.get(v); - int i = indexer.get(v); -// if (e != null) -// { -// Pair endpoints = e.getEndpoints(); -// int j = id.getIndex((Vertex)endpoints.getFirst()); -// int k = id.getIndex((Vertex)endpoints.getSecond()); -// System.out.print(i + ": (" + j + "," + k + "); "); -// } -// else -// System.out.print(i + ": null; "); - if (e != null) - assertEquals(edge_array[incomingEdges1[i-1].intValue()], e); - else - assertNull(incomingEdges1[i-1]); - } - - // test getDistanceMap(v, k) - dsp.reset(); - for (int i = 1; i <= distances1.length; i++) - { - distances = dsp.getDistanceMap(start, i); - assertTrue(distances.size() <= i); - d_prev = 0; // smallest possible distance - - reachable.clear(); - for (Iterator d_iter = distances.keySet().iterator(); d_iter.hasNext(); ) - { - String cur = d_iter.next(); - double d_cur = ((Double)distances.get(cur)).doubleValue(); - assertTrue(d_cur >= d_prev); - - d_prev = d_cur; - int j = indexer.get(cur); - - assertEquals(distances1[j-1], d_cur, .001); - reachable.add(cur); - } - for (Iterator v_iter = g.getVertices().iterator(); v_iter.hasNext(); ) - { - String v = v_iter.next(); - assertEquals(reachable.contains(v), distances.keySet().contains(v)); - } + private static Integer[][] shortestPaths1 = { + null, + {new Integer(0)}, + {new Integer(1), new Integer(6)}, + {new Integer(1)}, + {new Integer(1), new Integer(7)}, + {new Integer(1), new Integer(9), new Integer(11)}, + {new Integer(1), new Integer(9)}, + null, + null, + null + }; + + private Map, Integer[]> edgeArrays; + + private Map edgeWeights; + + private Function nev; + + private Supplier vertexFactoryDG = + new Supplier() { + int count = 0; + + public String get() { + return "V" + count++; } - - // test getIncomingEdgeMap(v, k) - dsp.reset(); - for (int i = 1; i <= incomingEdges1.length; i++) - { - incomingEdgeMap = dsp.getIncomingEdgeMap(start, i); - assertTrue(incomingEdgeMap.size() <= i); - for (Iterator e_iter = incomingEdgeMap.keySet().iterator(); e_iter.hasNext(); ) - { - String v = e_iter.next(); - Integer e = incomingEdgeMap.get(v); - int j = indexer.get(v); - if (e != null) - assertEquals(edge_array[incomingEdges1[j-1].intValue()], e); - else - assertNull(incomingEdges1[j-1]); - } + }; + private Supplier vertexFactoryUG = + new Supplier() { + int count = 0; + + public String get() { + return "U" + count++; } + }; + + BiMap did; + BiMap uid; + + @Override + protected void setUp() { + edgeWeights = new HashMap(); + nev = Functions.forMap(edgeWeights); + dg = new DirectedSparseMultigraph(); + for (int i = 0; i < dg_distances.length; i++) { + dg.addVertex(vertexFactoryDG.get()); } - - public void addEdges(Graph g, BiMap indexer, Integer[] edge_array) - { - -// Indexer id = Indexer.getIndexer(g); - for (int i = 0; i < edges.length; i++) - { - int[] edge = edges[i]; - Integer e = i; - g.addEdge(i, indexer.inverse().get(edge[0]), indexer.inverse().get(edge[1])); - edge_array[i] = e; - if (edge.length > 2) { - edgeWeights.put(e, edge[2]); -// e.addUserDatum("weight", edge[2]); - } - } + did = Indexer.create(dg.getVertices(), 1); + Integer[] dg_array = new Integer[edges.length]; + addEdges(dg, did, dg_array); + + ug = new UndirectedSparseMultigraph(); + for (int i = 0; i < ug_distances.length; i++) { + ug.addVertex(vertexFactoryUG.get()); + } + uid = Indexer.create(ug.getVertices(), 1); + // GraphUtils.addVertices(ug, ug_distances.length); + // Indexer.newIndexer(ug, 1); + Integer[] ug_array = new Integer[edges.length]; + addEdges(ug, uid, ug_array); + + edgeArrays = new HashMap, Integer[]>(); + edgeArrays.put(dg, dg_array); + edgeArrays.put(ug, ug_array); + } + + @Override + protected void tearDown() throws Exception {} + + public void exceptionTest(Graph g, BiMap indexer, int index) { + DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev); + // Indexer id = Indexer.getIndexer(g); + String start = indexer.inverse().get(index); + Integer e = null; + + String v = "NOT IN GRAPH"; + + try { + dsp.getDistance(start, v); + fail("getDistance(): illegal destination vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getDistance(v, start); + fail("getDistance(): illegal source vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getDistanceMap(v, 1); + fail("getDistanceMap(): illegal source vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getDistanceMap(start, 0); + fail("getDistanceMap(): too few vertices requested"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getDistanceMap(start, g.getVertexCount() + 1); + fail("getDistanceMap(): too many vertices requested"); + } catch (IllegalArgumentException iae) { + } + + try { + dsp.getIncomingEdge(start, v); + fail("getIncomingEdge(): illegal destination vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getIncomingEdge(v, start); + fail("getIncomingEdge(): illegal source vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getIncomingEdgeMap(v, 1); + fail("getIncomingEdgeMap(): illegal source vertex"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getIncomingEdgeMap(start, 0); + fail("getIncomingEdgeMap(): too few vertices requested"); + } catch (IllegalArgumentException iae) { + } + try { + dsp.getDistanceMap(start, g.getVertexCount() + 1); + fail("getIncomingEdgeMap(): too many vertices requested"); + } catch (IllegalArgumentException iae) { + } + + try { + // test negative edge weight exception + String v1 = indexer.inverse().get(1); + String v2 = indexer.inverse().get(7); + e = g.getEdgeCount() + 1; + g.addEdge(e, v1, v2); + edgeWeights.put(e, -2); + // e.addUserDatum("weight", new Double(-2), UserData.REMOVE); + dsp.reset(); + dsp.getDistanceMap(start); + // for (Iterator it = g.getEdges().iterator(); it.hasNext(); ) + // { + // Edge edge = (Edge)it.next(); + // double weight = ((Number)edge.getUserDatum("weight")).doubleValue(); + // Pair p = edge.getEndpoints(); + // int i = id.getIndex((Vertex)p.getFirst()); + // int j = id.getIndex((Vertex)p.getSecond()); + // System.out.print("(" + i + "," + j + "): " + weight); + // if (weight < 0) + // System.out.print(" *******"); + // System.out.println(); + // } + fail("DijkstraShortestPath should not accept negative edge weights"); + } catch (IllegalArgumentException iae) { + g.removeEdge(e); + } + } + + public void testDijkstra() { + setUp(); + exceptionTest(dg, did, 1); + + setUp(); + exceptionTest(ug, uid, 1); + + setUp(); + getPathTest(dg, did, 1); + + setUp(); + getPathTest(ug, uid, 1); + + for (int i = 1; i <= dg_distances.length; i++) { + setUp(); + weightedTest(dg, did, i, true); + + setUp(); + weightedTest(dg, did, i, false); + } + + for (int i = 1; i <= ug_distances.length; i++) { + setUp(); + weightedTest(ug, uid, i, true); + + setUp(); + weightedTest(ug, uid, i, false); + } + } + + private void getPathTest(Graph g, BiMap indexer, int index) { + DijkstraShortestPath dsp = new DijkstraShortestPath(g, nev); + // Indexer id = Indexer.getIndexer(g); + String start = indexer.inverse().get(index); + Integer[] edge_array = edgeArrays.get(g); + Integer[] incomingEdges1 = null; + if (g instanceof DirectedGraph) incomingEdges1 = dg_incomingEdges[index - 1]; + if (g instanceof UndirectedGraph) incomingEdges1 = ug_incomingEdges[index - 1]; + assertEquals(incomingEdges1.length, g.getVertexCount()); + + // test getShortestPath(start, v) + dsp.reset(); + for (int i = 1; i <= incomingEdges1.length; i++) { + List shortestPath = dsp.getPath(start, indexer.inverse().get(i)); + Integer[] indices = shortestPaths1[i - 1]; + for (ListIterator iter = shortestPath.listIterator(); iter.hasNext(); ) { + int j = iter.nextIndex(); + Integer e = iter.next(); + if (e != null) assertEquals(edge_array[indices[j].intValue()], e); + else assertNull(indices[j]); + } + } + } + + private void weightedTest( + Graph g, BiMap indexer, int index, boolean cached) { + // Indexer id = Indexer.getIndexer(g); + String start = indexer.inverse().get(index); + double[] distances1 = null; + Integer[] incomingEdges1 = null; + if (g instanceof DirectedGraph) { + distances1 = dg_distances[index - 1]; + incomingEdges1 = dg_incomingEdges[index - 1]; + } + if (g instanceof UndirectedGraph) { + distances1 = ug_distances[index - 1]; + incomingEdges1 = ug_incomingEdges[index - 1]; + } + assertEquals(distances1.length, g.getVertexCount()); + assertEquals(incomingEdges1.length, g.getVertexCount()); + DijkstraShortestPath dsp = + new DijkstraShortestPath(g, nev, cached); + Integer[] edge_array = edgeArrays.get(g); + + // test getDistance(start, v) + for (int i = 1; i <= distances1.length; i++) { + String v = indexer.inverse().get(i); + Number n = dsp.getDistance(start, v); + double d = distances1[i - 1]; + double dist; + if (n == null) dist = Double.POSITIVE_INFINITY; + else dist = n.doubleValue(); + + assertEquals(d, dist, .001); + } + + // test getIncomingEdge(start, v) + dsp.reset(); + for (int i = 1; i <= incomingEdges1.length; i++) { + String v = indexer.inverse().get(i); + Integer e = dsp.getIncomingEdge(start, v); + if (e != null) assertEquals(edge_array[incomingEdges1[i - 1].intValue()], e); + else assertNull(incomingEdges1[i - 1]); + } + + // test getDistanceMap(v) + dsp.reset(); + Map distances = dsp.getDistanceMap(start); + assertTrue(distances.size() <= g.getVertexCount()); + double d_prev = 0; // smallest possible distance + Set reachable = new HashSet(); + for (Iterator d_iter = distances.keySet().iterator(); d_iter.hasNext(); ) { + String cur = d_iter.next(); + double d_cur = ((Double) distances.get(cur)).doubleValue(); + assertTrue(d_cur >= d_prev); + + d_prev = d_cur; + int i = indexer.get(cur); + assertEquals(distances1[i - 1], d_cur, .001); + reachable.add(cur); + } + // make sure that non-reachable vertices have no entries + for (Iterator v_iter = g.getVertices().iterator(); v_iter.hasNext(); ) { + String v = v_iter.next(); + assertEquals(reachable.contains(v), distances.keySet().contains(v)); + } + + // test getIncomingEdgeMap(v) + dsp.reset(); + Map incomingEdgeMap = dsp.getIncomingEdgeMap(start); + assertTrue(incomingEdgeMap.size() <= g.getVertexCount()); + for (Iterator e_iter = incomingEdgeMap.keySet().iterator(); e_iter.hasNext(); ) { + String v = e_iter.next(); + Integer e = incomingEdgeMap.get(v); + int i = indexer.get(v); + // if (e != null) + // { + // Pair endpoints = e.getEndpoints(); + // int j = id.getIndex((Vertex)endpoints.getFirst()); + // int k = id.getIndex((Vertex)endpoints.getSecond()); + // System.out.print(i + ": (" + j + "," + k + "); "); + // } + // else + // System.out.print(i + ": null; "); + if (e != null) assertEquals(edge_array[incomingEdges1[i - 1].intValue()], e); + else assertNull(incomingEdges1[i - 1]); + } + + // test getDistanceMap(v, k) + dsp.reset(); + for (int i = 1; i <= distances1.length; i++) { + distances = dsp.getDistanceMap(start, i); + assertTrue(distances.size() <= i); + d_prev = 0; // smallest possible distance + + reachable.clear(); + for (Iterator d_iter = distances.keySet().iterator(); d_iter.hasNext(); ) { + String cur = d_iter.next(); + double d_cur = ((Double) distances.get(cur)).doubleValue(); + assertTrue(d_cur >= d_prev); + + d_prev = d_cur; + int j = indexer.get(cur); + + assertEquals(distances1[j - 1], d_cur, .001); + reachable.add(cur); + } + for (Iterator v_iter = g.getVertices().iterator(); v_iter.hasNext(); ) { + String v = v_iter.next(); + assertEquals(reachable.contains(v), distances.keySet().contains(v)); + } + } + + // test getIncomingEdgeMap(v, k) + dsp.reset(); + for (int i = 1; i <= incomingEdges1.length; i++) { + incomingEdgeMap = dsp.getIncomingEdgeMap(start, i); + assertTrue(incomingEdgeMap.size() <= i); + for (Iterator e_iter = incomingEdgeMap.keySet().iterator(); e_iter.hasNext(); ) { + String v = e_iter.next(); + Integer e = incomingEdgeMap.get(v); + int j = indexer.get(v); + if (e != null) assertEquals(edge_array[incomingEdges1[j - 1].intValue()], e); + else assertNull(incomingEdges1[j - 1]); + } + } + } + + public void addEdges( + Graph g, BiMap indexer, Integer[] edge_array) { + + // Indexer id = Indexer.getIndexer(g); + for (int i = 0; i < edges.length; i++) { + int[] edge = edges[i]; + Integer e = i; + g.addEdge(i, indexer.inverse().get(edge[0]), indexer.inverse().get(edge[1])); + edge_array[i] = e; + if (edge.length > 2) { + edgeWeights.put(e, edge[2]); + // e.addUserDatum("weight", edge[2]); + } } + } - -// private class UserDataEdgeWeight implements NumberEdgeValue -// { -// private Object ud_key; -// -// public UserDataEdgeWeight(Object key) -// { -// ud_key = key; -// } -// -// /** -// * @see edu.uci.ics.jung.utils.NumberEdgeValue#getNumber(edu.uci.ics.jung.graph.ArchetypeEdge) -// */ -// public Number getNumber(ArchetypeEdge e) -// { -// return (Number)e.getUserDatum(ud_key); -// } -// -// /** -// * @see edu.uci.ics.jung.utils.NumberEdgeValue#setNumber(edu.uci.ics.jung.graph.ArchetypeEdge, java.lang.Number) -// */ -// public void setNumber(ArchetypeEdge e, Number n) -// { -// throw new UnsupportedOperationException(); -// } -// } + // private class UserDataEdgeWeight implements NumberEdgeValue + // { + // private Object ud_key; + // + // public UserDataEdgeWeight(Object key) + // { + // ud_key = key; + // } + // + // /** + // * @see edu.uci.ics.jung.utils.NumberEdgeValue#getNumber(edu.uci.ics.jung.graph.ArchetypeEdge) + // */ + // public Number getNumber(ArchetypeEdge e) + // { + // return (Number)e.getUserDatum(ud_key); + // } + // + // /** + // * @see edu.uci.ics.jung.utils.NumberEdgeValue#setNumber(edu.uci.ics.jung.graph.ArchetypeEdge, java.lang.Number) + // */ + // public void setNumber(ArchetypeEdge e, Number n) + // { + // throw new UnsupportedOperationException(); + // } + // } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestUnweightedShortestPath.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestUnweightedShortestPath.java index 89533ff9..2a975a29 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestUnweightedShortestPath.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/shortestpath/TestUnweightedShortestPath.java @@ -4,92 +4,87 @@ */ package edu.uci.ics.jung.algorithms.shortestpath; -import junit.framework.Assert; -import junit.framework.Test; -import junit.framework.TestCase; -import junit.framework.TestSuite; - import com.google.common.base.Supplier; import com.google.common.collect.BiMap; - import edu.uci.ics.jung.algorithms.util.Indexer; import edu.uci.ics.jung.graph.DirectedGraph; import edu.uci.ics.jung.graph.DirectedSparseMultigraph; import edu.uci.ics.jung.graph.UndirectedGraph; import edu.uci.ics.jung.graph.UndirectedSparseMultigraph; +import junit.framework.Assert; +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; -/** - * @author Scott White - */ -public class TestUnweightedShortestPath extends TestCase -{ - private Supplier vertexFactory = - new Supplier() { - int count = 0; - public String get() { - return "V"+count++; - }}; - - private Supplier edgeFactory = - new Supplier() { - int count = 0; - public Integer get() { - return count++; - }}; - BiMap id; +/** @author Scott White */ +public class TestUnweightedShortestPath extends TestCase { + private Supplier vertexFactory = + new Supplier() { + int count = 0; + + public String get() { + return "V" + count++; + } + }; + + private Supplier edgeFactory = + new Supplier() { + int count = 0; + + public Integer get() { + return count++; + } + }; + BiMap id; + + @Override + protected void setUp() {} - @Override - protected void setUp() { + public static Test suite() { + return new TestSuite(TestUnweightedShortestPath.class); + } + + public void testUndirected() { + UndirectedGraph ug = new UndirectedSparseMultigraph(); + for (int i = 0; i < 5; i++) { + ug.addVertex(vertexFactory.get()); } - public static Test suite() - { - return new TestSuite(TestUnweightedShortestPath.class); - } - - public void testUndirected() { - UndirectedGraph ug = - new UndirectedSparseMultigraph(); - for(int i=0; i<5; i++) { - ug.addVertex(vertexFactory.get()); - } - id = Indexer.create(ug.getVertices()); + id = Indexer.create(ug.getVertices()); + + // GraphUtils.addVertices(ug,5); + // Indexer id = Indexer.getIndexer(ug); + ug.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(1)); + ug.addEdge(edgeFactory.get(), id.inverse().get(1), id.inverse().get(2)); + ug.addEdge(edgeFactory.get(), id.inverse().get(2), id.inverse().get(3)); + ug.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(4)); + ug.addEdge(edgeFactory.get(), id.inverse().get(4), id.inverse().get(3)); -// GraphUtils.addVertices(ug,5); -// Indexer id = Indexer.getIndexer(ug); - ug.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(1)); - ug.addEdge(edgeFactory.get(), id.inverse().get(1), id.inverse().get(2)); - ug.addEdge(edgeFactory.get(), id.inverse().get(2), id.inverse().get(3)); - ug.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(4)); - ug.addEdge(edgeFactory.get(), id.inverse().get(4), id.inverse().get(3)); - - UnweightedShortestPath usp = - new UnweightedShortestPath(ug); - Assert.assertEquals(usp.getDistance(id.inverse().get(0),id.inverse().get(3)).intValue(),2); - Assert.assertEquals((usp.getDistanceMap(id.inverse().get(0)).get(id.inverse().get(3))).intValue(),2); - Assert.assertNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(0))); - Assert.assertNotNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(3))); - } - - public void testDirected() { - DirectedGraph dg = - new DirectedSparseMultigraph(); - for(int i=0; i<5; i++) { - dg.addVertex(vertexFactory.get()); - } - id = Indexer.create(dg.getVertices()); - dg.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(1)); - dg.addEdge(edgeFactory.get(), id.inverse().get(1), id.inverse().get(2)); - dg.addEdge(edgeFactory.get(), id.inverse().get(2), id.inverse().get(3)); - dg.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(4)); - dg.addEdge(edgeFactory.get(), id.inverse().get(4), id.inverse().get(3)); - dg.addEdge(edgeFactory.get(), id.inverse().get(3), id.inverse().get(0)); - - UnweightedShortestPath usp = - new UnweightedShortestPath(dg); - Assert.assertEquals(usp.getDistance(id.inverse().get(0),id.inverse().get(3)).intValue(),2); - Assert.assertEquals((usp.getDistanceMap(id.inverse().get(0)).get(id.inverse().get(3))).intValue(),2); - Assert.assertNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(0))); - Assert.assertNotNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(3))); + UnweightedShortestPath usp = new UnweightedShortestPath(ug); + Assert.assertEquals(usp.getDistance(id.inverse().get(0), id.inverse().get(3)).intValue(), 2); + Assert.assertEquals( + (usp.getDistanceMap(id.inverse().get(0)).get(id.inverse().get(3))).intValue(), 2); + Assert.assertNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(0))); + Assert.assertNotNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(3))); + } + + public void testDirected() { + DirectedGraph dg = new DirectedSparseMultigraph(); + for (int i = 0; i < 5; i++) { + dg.addVertex(vertexFactory.get()); + } + id = Indexer.create(dg.getVertices()); + dg.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(1)); + dg.addEdge(edgeFactory.get(), id.inverse().get(1), id.inverse().get(2)); + dg.addEdge(edgeFactory.get(), id.inverse().get(2), id.inverse().get(3)); + dg.addEdge(edgeFactory.get(), id.inverse().get(0), id.inverse().get(4)); + dg.addEdge(edgeFactory.get(), id.inverse().get(4), id.inverse().get(3)); + dg.addEdge(edgeFactory.get(), id.inverse().get(3), id.inverse().get(0)); - } + UnweightedShortestPath usp = new UnweightedShortestPath(dg); + Assert.assertEquals(usp.getDistance(id.inverse().get(0), id.inverse().get(3)).intValue(), 2); + Assert.assertEquals( + (usp.getDistanceMap(id.inverse().get(0)).get(id.inverse().get(3))).intValue(), 2); + Assert.assertNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(0))); + Assert.assertNotNull(usp.getIncomingEdgeMap(id.inverse().get(0)).get(id.inverse().get(3))); + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/NotRandom.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/NotRandom.java index 9d311e6e..01779fd1 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/NotRandom.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/NotRandom.java @@ -3,58 +3,50 @@ import java.util.Random; /** - * A decidedly non-random extension of {@code Random} that may be useful - * for testing random algorithms that accept an instance of {@code Random} - * as a parameter. This algorithm maintains internal counters which are - * incremented after each call, and returns values which are functions of - * those counter values. Thus the output is not only deterministic (as is - * necessarily true of all software with no externalities) but precisely - * predictable in distribution. - * + * A decidedly non-random extension of {@code Random} that may be useful for testing random + * algorithms that accept an instance of {@code Random} as a parameter. This algorithm maintains + * internal counters which are incremented after each call, and returns values which are functions + * of those counter values. Thus the output is not only deterministic (as is necessarily true of all + * software with no externalities) but precisely predictable in distribution. + * * @author Joshua O'Madadhain */ @SuppressWarnings("serial") -public class NotRandom extends Random -{ - private int i = 0; - private int d = 0; - private int size = 100; - - /** - * Creates an instance with the specified sample size. - * @param size the sample size - */ - public NotRandom(int size) - { - this.size = size; - } - - /** - * Returns the post-incremented value of the internal counter modulo n. - */ - @Override - public int nextInt(int n) - { - return i++ % n; - } - - /** - * Returns the post-incremented value of the internal counter modulo - * {@code size}, divided by {@code size}. - */ - @Override - public double nextDouble() - { - return (d++ % size) / (double)size; - } - - /** - * Returns the post-incremented value of the internal counter modulo - * {@code size}, divided by {@code size}. - */ - @Override - public float nextFloat() - { - return (d++ % size) / (float)size; - } +public class NotRandom extends Random { + private int i = 0; + private int d = 0; + private int size = 100; + + /** + * Creates an instance with the specified sample size. + * + * @param size the sample size + */ + public NotRandom(int size) { + this.size = size; + } + + /** Returns the post-incremented value of the internal counter modulo n. */ + @Override + public int nextInt(int n) { + return i++ % n; + } + + /** + * Returns the post-incremented value of the internal counter modulo {@code size}, divided by + * {@code size}. + */ + @Override + public double nextDouble() { + return (d++ % size) / (double) size; + } + + /** + * Returns the post-incremented value of the internal counter modulo {@code size}, divided by + * {@code size}. + */ + @Override + public float nextFloat() { + return (d++ % size) / (float) size; + } } diff --git a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/TestWeightedChoice.java b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/TestWeightedChoice.java index 3e89b3d9..a6b7ef2a 100644 --- a/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/TestWeightedChoice.java +++ b/jung-algorithms/src/test/java/edu/uci/ics/jung/algorithms/util/TestWeightedChoice.java @@ -1,78 +1,61 @@ /** - * Copyright (c) 2009, The JUNG Authors + * Copyright (c) 2009, The JUNG Authors * - * All rights reserved. + *

All rights reserved. * - * This software is open-source under the BSD license; see either - * "license.txt" or - * https://github.com/jrtom/jung/blob/master/LICENSE for a description. - * Created on Jan 13, 2009 - * + *

This software is open-source under the BSD license; see either "license.txt" or + * https://github.com/jrtom/jung/blob/master/LICENSE for a description. Created on Jan 13, 2009 */ package edu.uci.ics.jung.algorithms.util; import java.util.HashMap; import java.util.Map; - import junit.framework.TestCase; -/** - * @author jrtom - * - */ -public class TestWeightedChoice extends TestCase -{ - private WeightedChoice weighted_choice; - private Map item_weights = new HashMap(); - private Map item_counts = new HashMap(); - - @Override - public void tearDown() - { - item_weights.clear(); - item_counts.clear(); - } +/** @author jrtom */ +public class TestWeightedChoice extends TestCase { + private WeightedChoice weighted_choice; + private Map item_weights = new HashMap(); + private Map item_counts = new HashMap(); + + @Override + public void tearDown() { + item_weights.clear(); + item_counts.clear(); + } + + private void initializeWeights(double[] weights) { + item_weights.put("a", weights[0]); + item_weights.put("b", weights[1]); + item_weights.put("c", weights[2]); + item_weights.put("d", weights[3]); + + for (String key : item_weights.keySet()) item_counts.put(key, 0); + } + + private void runWeightedChoice() { + weighted_choice = new WeightedChoice(item_weights, new NotRandom(100)); + + int max_iterations = 10000; + for (int i = 0; i < max_iterations; i++) { + String item = weighted_choice.nextItem(); + int count = item_counts.get(item); + item_counts.put(item, count + 1); + } + + for (String key : item_weights.keySet()) + assertEquals((int) (item_weights.get(key) * max_iterations), item_counts.get(key).intValue()); + } + + public void testUniform() { + initializeWeights(new double[] {0.25, 0.25, 0.25, 0.25}); - private void initializeWeights(double[] weights) - { - item_weights.put("a", weights[0]); - item_weights.put("b", weights[1]); - item_weights.put("c", weights[2]); - item_weights.put("d", weights[3]); - - for (String key : item_weights.keySet()) - item_counts.put(key, 0); + runWeightedChoice(); + } - } + public void testNonUniform() { + initializeWeights(new double[] {0.45, 0.10, 0.13, 0.32}); - private void runWeightedChoice() - { - weighted_choice = new WeightedChoice(item_weights, new NotRandom(100)); - - int max_iterations = 10000; - for (int i = 0; i < max_iterations; i++) - { - String item = weighted_choice.nextItem(); - int count = item_counts.get(item); - item_counts.put(item, count+1); - } - - for (String key : item_weights.keySet()) - assertEquals((int)(item_weights.get(key) * max_iterations), - item_counts.get(key).intValue()); - } - - public void testUniform() - { - initializeWeights(new double[]{0.25, 0.25, 0.25, 0.25}); - - runWeightedChoice(); - } - - public void testNonUniform() - { - initializeWeights(new double[]{0.45, 0.10, 0.13, 0.32}); - - runWeightedChoice(); - } + runWeightedChoice(); + } }