Skip to content

Commit

Permalink
Benchmark tweaks (#1654)
Browse files Browse the repository at this point in the history
* clean up the loading code

* expose num_shards in graph constructor and use it in the benchmark to test the different options

* clean up imports
  • Loading branch information
ljeub-pometry authored Jun 12, 2024
1 parent fdb75da commit be0c85b
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 33 deletions.
65 changes: 33 additions & 32 deletions raphtory-benchmark/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use chrono::DateTime;
use clap::{ArgAction, Parser};
use csv::StringRecord;
use flate2::read::GzDecoder;
Expand Down Expand Up @@ -50,6 +49,10 @@ struct Args {
/// Debug to print more info to the screen
#[arg(long, action=ArgAction::SetTrue)]
debug: bool,

/// Set the number of locks for the node and edge storage
#[arg(long)]
num_shards: Option<usize>,
}

fn main() {
Expand Down Expand Up @@ -85,6 +88,7 @@ fn main() {
let to_column = args.to_column;
let time_column = args.time_column;
let download = args.download;
let num_shards = args.num_shards;

if download {
let url = "https://osf.io/download/nbq6h/";
Expand Down Expand Up @@ -139,38 +143,35 @@ fn main() {
println!("Running setup...");
let mut now = Instant::now();
// Iterate over the CSV records
let g = {
let g = Graph::new();
CsvLoader::new(file_path)
.set_header(header)
.set_delimiter(&delimiter)
.load_rec_into_graph(&g, |generic_loader: StringRecord, g: &Graph| {
let src_id = generic_loader
.get(from_column)
.map(|s| s.to_owned())
.unwrap();
let dst_id = generic_loader.get(to_column).map(|s| s.to_owned()).unwrap();
let mut edge_time = DateTime::from_timestamp(1, 0).unwrap().naive_utc();
if time_column != -1 {
edge_time = DateTime::from_timestamp_millis(
generic_loader
.get(time_column as usize)
.unwrap()
.parse()
.unwrap(),
)
.unwrap()
.naive_utc();
}
if debug {
println!("Adding edge {} -> {} at time {}", src_id, dst_id, edge_time);
}
g.add_edge(edge_time, src_id, dst_id, NO_PROPS, None)
.expect("Failed to add edge");
})
.expect("Failed to load graph from CSV data files");
g
let g = match num_shards {
Some(num_shards) => {
println!("Constructing graph with {num_shards} shards.");
Graph::new_with_shards(num_shards)
}
None => Graph::new(),
};
CsvLoader::new(file_path)
.set_header(header)
.set_delimiter(&delimiter)
.load_rec_into_graph(&g, |generic_loader: StringRecord, g: &Graph| {
let src_id = generic_loader.get(from_column).unwrap();
let dst_id = generic_loader.get(to_column).unwrap();
let edge_time = if time_column != -1 {
generic_loader
.get(time_column as usize)
.unwrap()
.parse()
.unwrap()
} else {
1i64
};
if debug {
println!("Adding edge {} -> {} at time {}", src_id, dst_id, edge_time);
}
g.add_edge(edge_time, src_id, dst_id, NO_PROPS, None)
.expect("Failed to add edge");
})
.expect("Failed to load graph from CSV data files");
println!("Setup took {} seconds", now.elapsed().as_secs_f64());

if debug {
Expand Down
20 changes: 19 additions & 1 deletion raphtory/src/db/graph/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ impl InheritMutationOps for Graph {}
impl InheritViewOps for Graph {}

impl Graph {
/// Create a new graph with the specified number of shards
/// Create a new graph
///
/// Returns:
///
Expand All @@ -156,6 +156,14 @@ impl Graph {
Self(Arc::new(InternalGraph::default()))
}

/// Create a new graph with specified number of shards
///
/// Returns:
///
/// A raphtory graph
pub fn new_with_shards(num_shards: usize) -> Self {
Self(Arc::new(InternalGraph::new(num_shards)))
}
pub(crate) fn from_internal_graph(internal_graph: Arc<InternalGraph>) -> Self {
Self(internal_graph)
}
Expand Down Expand Up @@ -2903,4 +2911,14 @@ mod db_tests {
]
);
}

#[test]
fn num_locks_same_as_threads() {
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(5)
.build()
.unwrap();
let graph = pool.install(|| Graph::new());
assert_eq!(graph.0.inner().storage.nodes.data.len(), 5);
}
}

0 comments on commit be0c85b

Please sign in to comment.