summaryrefslogtreecommitdiff
path: root/graph-checker/src
diff options
context:
space:
mode:
authorAndrew Guschin <guschin@altlinux.org>2024-10-10 19:33:13 +0400
committerAndrew Guschin <guschin@altlinux.org>2024-10-10 19:33:13 +0400
commita26981a2669555a61b8d986a888708f72d986100 (patch)
tree349307a95f4d3538ab0570291ec7c141cfa0bdbf /graph-checker/src
parent5d2b2b40b0cb0baef70934a9e6fa46fd6ab674bb (diff)
add batched insert
Diffstat (limited to 'graph-checker/src')
-rw-r--r--graph-checker/src/graph.rs37
-rw-r--r--graph-checker/src/main.rs45
2 files changed, 47 insertions, 35 deletions
diff --git a/graph-checker/src/graph.rs b/graph-checker/src/graph.rs
index bb0fb03..cf3dbc0 100644
--- a/graph-checker/src/graph.rs
+++ b/graph-checker/src/graph.rs
@@ -1,4 +1,5 @@
-use std::collections::{HashMap, HashSet};
+// use std::collections::{HashMap, HashSet};
+use std::collections::HashSet;
use std::fmt;
#[derive(Clone, PartialEq, Eq)]
@@ -7,23 +8,23 @@ pub struct Graph {
pub matrix: Vec<Vec<u32>>,
}
-pub struct GraphProfile {
- g6: String,
- stats: HashMap<String, bool>,
-}
-
-impl GraphProfile {
- pub fn new(g6: &String) -> Self {
- Self {
- g6: g6.clone(),
- stats: HashMap::new(),
- }
- }
-
- pub fn insert(&mut self, key: String, value: bool) {
- self.stats.insert(key, value);
- }
-}
+// pub struct GraphProfile {
+// g6: String,
+// stats: HashMap<String, bool>,
+// }
+//
+// impl GraphProfile {
+// pub fn new(g6: &String) -> Self {
+// Self {
+// g6: g6.clone(),
+// stats: HashMap::new(),
+// }
+// }
+//
+// pub fn insert(&mut self, key: String, value: bool) {
+// self.stats.insert(key, value);
+// }
+// }
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Cutset {
diff --git a/graph-checker/src/main.rs b/graph-checker/src/main.rs
index 2f3de7a..f38724c 100644
--- a/graph-checker/src/main.rs
+++ b/graph-checker/src/main.rs
@@ -1,4 +1,4 @@
-use sqlx::migrate::MigrateDatabase;
+use sqlx::{migrate::MigrateDatabase, QueryBuilder, Sqlite};
use std::time::Instant;
use tokio;
@@ -18,38 +18,49 @@ async fn main() -> Result<(), sqlx::Error> {
sqlx::Sqlite::create_database(&database_url).await?;
}
let db = sqlx::SqlitePool::connect(&database_url).await?;
- let _ = sqlx::query!(
+ let err = sqlx::query(
"CREATE TABLE IF NOT EXISTS graphs (g6 VARCHAR NOT NULL, ind_dom INT NOT NULL, forced_geod INT NOT NULL);"
)
.execute(&db)
.await;
+ if let Err(e) = err {
+ println!("Error while creating table: {e}");
+ }
let gi = GengIterator::new(9);
- let start = Instant::now();
-
println!("Started");
- const BATCH_SIZE: usize = 10000;
+ let start = Instant::now();
+ const BATCH_SIZE: usize = 1000;
let mut count = 0;
loop {
let graphs = gi.take(BATCH_SIZE);
let batch = Instant::now();
let tasks: Vec<_> = graphs
- .map(|g| {
- let db = db.clone();
- tokio::spawn(async move {
- let (g6, ind_dom, fg) = compute::dominating_numbers(g).await;
- let ind_dom = ind_dom.unwrap_or(0);
- let fg = fg.unwrap_or(0);
- let _ = sqlx::query!(
- "INSERT INTO graphs (g6, ind_dom, forced_geod) VALUES (?, ?, ?);",
- g6, ind_dom, fg
- ).execute(&db).await;
- })
- })
+ .map(|g| tokio::spawn(compute::dominating_numbers(g)))
.collect();
let part = futures::future::join_all(tasks).await;
let sz = part.len();
+
+ let mut query = QueryBuilder::<Sqlite>::new(
+ "INSERT INTO graphs (g6, ind_dom, forced_geod) ",
+ );
+ query.push_values(part, |mut b, res| {
+ match res {
+ Ok((g6, ind_dom, fg)) => {
+ b.push_bind(g6)
+ .push_bind(ind_dom.unwrap_or(0))
+ .push_bind(fg.unwrap_or(0));
+ }
+ Err(_) => println!("Unable to push into query"),
+ };
+ });
+
+ let err = query.build().execute(&db).await;
+ if let Err(e) = err {
+ println!("Error while inserting: {e}");
+ }
+
count += sz;
println!(
"Counted {count} in {}s",