I have a problem with concurrent insert into different SQLITE databases for each threads and don't understand how to solve it or if it is a limitation.
If i use only one thread, the process take 22 seconds (559512 for 1324 databases, ~36Mo)
If i use 10 threads, the process take 1 minutes 20 seconds ....
if i use 32 threads, the process take 3 minutes 43 seconds ....
The script is execute on a server with 128GO and 32 cores, no peaks for the cpu or the memory.
Thanks for your helpour $THREADS = 32; sub worker { my $tid = threads->tid; my( $Qwork, $Qresults ) = @_; while( my $item = $Qwork->dequeue ) { my $result; my ($M,$m) = @{$item}; my $dbfile_connect = qq{${dbfile}_${M}_${m}}; my $sqlite_dbh = sqlite_connect($dbfile_connect,''); create_table_dpt(\$sqlite_dbh,\$table_dpt_sqlite,\@{$data_dpt} +); create_tmp_table(\$sqlite_dbh,\$table_a_sqlite,\$table_d_sqlit +e,\%{$uniq_values}); insert_a_tmp_table(\$sqlite_dbh,\$table_a_sqlite,\$table_d_sql +ite,\@{$data_ref->{$M}->{$m}},\%{$data_g_c},\%{$data_c}); $sqlite_dbh->disconnect(); $Qresults->enqueue( \%{$result} ); print "$M $m finit \n"; } $Qresults->enqueue( undef ); ## Signal this thread is finished } my $Qwork = new Thread::Queue; my $Qresults = new Thread::Queue; my @pool = map{ threads->create( \&worker, $Qwork, $Qresults ) } 1 .. $THREADS; foreach my $M_m (sort { $uniq_values->{M_id_top}{$b} <=> $uniq_values- +>{M_id_top}{$a} } keys(%{$uniq_values->{M_id_top}})) { my ($M,$m) = split('_',$M_m); $Qwork->enqueue([$M,$m]); } $Qwork->enqueue( (undef) x $THREADS ); $_->join for @pool;
In reply to Sqlite: Threads and inserts into a different database for each one. (1 thread fast, >1 slow) by ssc37
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |