From a26981a2669555a61b8d986a888708f72d986100 Mon Sep 17 00:00:00 2001 From: Andrew Guschin Date: Thu, 10 Oct 2024 19:33:13 +0400 Subject: add batched insert --- graph-checker/.cargo/config.toml | 5 ----- graph-checker/src/graph.rs | 37 +++++++++++++++++---------------- graph-checker/src/main.rs | 45 +++++++++++++++++++++++++--------------- 3 files changed, 47 insertions(+), 40 deletions(-) delete mode 100644 graph-checker/.cargo/config.toml diff --git a/graph-checker/.cargo/config.toml b/graph-checker/.cargo/config.toml deleted file mode 100644 index 0236928..0000000 --- a/graph-checker/.cargo/config.toml +++ /dev/null @@ -1,5 +0,0 @@ -[source.crates-io] -replace-with = "vendored-sources" - -[source.vendored-sources] -directory = "vendor" diff --git a/graph-checker/src/graph.rs b/graph-checker/src/graph.rs index bb0fb03..cf3dbc0 100644 --- a/graph-checker/src/graph.rs +++ b/graph-checker/src/graph.rs @@ -1,4 +1,5 @@ -use std::collections::{HashMap, HashSet}; +// use std::collections::{HashMap, HashSet}; +use std::collections::HashSet; use std::fmt; #[derive(Clone, PartialEq, Eq)] @@ -7,23 +8,23 @@ pub struct Graph { pub matrix: Vec>, } -pub struct GraphProfile { - g6: String, - stats: HashMap, -} - -impl GraphProfile { - pub fn new(g6: &String) -> Self { - Self { - g6: g6.clone(), - stats: HashMap::new(), - } - } - - pub fn insert(&mut self, key: String, value: bool) { - self.stats.insert(key, value); - } -} +// pub struct GraphProfile { +// g6: String, +// stats: HashMap, +// } +// +// impl GraphProfile { +// pub fn new(g6: &String) -> Self { +// Self { +// g6: g6.clone(), +// stats: HashMap::new(), +// } +// } +// +// pub fn insert(&mut self, key: String, value: bool) { +// self.stats.insert(key, value); +// } +// } #[derive(Clone, PartialEq, Eq, Debug)] pub struct Cutset { diff --git a/graph-checker/src/main.rs b/graph-checker/src/main.rs index 2f3de7a..f38724c 100644 --- a/graph-checker/src/main.rs +++ b/graph-checker/src/main.rs @@ -1,4 +1,4 @@ -use sqlx::migrate::MigrateDatabase; +use sqlx::{migrate::MigrateDatabase, QueryBuilder, Sqlite}; use std::time::Instant; use tokio; @@ -18,38 +18,49 @@ async fn main() -> Result<(), sqlx::Error> { sqlx::Sqlite::create_database(&database_url).await?; } let db = sqlx::SqlitePool::connect(&database_url).await?; - let _ = sqlx::query!( + let err = sqlx::query( "CREATE TABLE IF NOT EXISTS graphs (g6 VARCHAR NOT NULL, ind_dom INT NOT NULL, forced_geod INT NOT NULL);" ) .execute(&db) .await; + if let Err(e) = err { + println!("Error while creating table: {e}"); + } let gi = GengIterator::new(9); - let start = Instant::now(); - println!("Started"); - const BATCH_SIZE: usize = 10000; + let start = Instant::now(); + const BATCH_SIZE: usize = 1000; let mut count = 0; loop { let graphs = gi.take(BATCH_SIZE); let batch = Instant::now(); let tasks: Vec<_> = graphs - .map(|g| { - let db = db.clone(); - tokio::spawn(async move { - let (g6, ind_dom, fg) = compute::dominating_numbers(g).await; - let ind_dom = ind_dom.unwrap_or(0); - let fg = fg.unwrap_or(0); - let _ = sqlx::query!( - "INSERT INTO graphs (g6, ind_dom, forced_geod) VALUES (?, ?, ?);", - g6, ind_dom, fg - ).execute(&db).await; - }) - }) + .map(|g| tokio::spawn(compute::dominating_numbers(g))) .collect(); let part = futures::future::join_all(tasks).await; let sz = part.len(); + + let mut query = QueryBuilder::::new( + "INSERT INTO graphs (g6, ind_dom, forced_geod) ", + ); + query.push_values(part, |mut b, res| { + match res { + Ok((g6, ind_dom, fg)) => { + b.push_bind(g6) + .push_bind(ind_dom.unwrap_or(0)) + .push_bind(fg.unwrap_or(0)); + } + Err(_) => println!("Unable to push into query"), + }; + }); + + let err = query.build().execute(&db).await; + if let Err(e) = err { + println!("Error while inserting: {e}"); + } + count += sz; println!( "Counted {count} in {}s", -- cgit v1.2.3