$ ls -lh hominae.txt hominae_renumbered.*
-rw-rw-r--. 1 aardvark aardvark 1.5G Mar 4 13:48 hominae_renumbered.idx
-rw-r--r--. 1 aardvark aardvark 25G Mar 4 13:17 hominae_renumbered.txt
-rw-rw-r--. 1 aardvark aardvark 25G Feb 28 01:41 hominae.txt
####
$ perl browseruk2_searcher.pl \
hominae_renumbered.txt \
hominae_renumbered.idx > bukrun;
tail -n1 bukrun
'Lookup averaged 0.012486 seconds/record
####
# I took a join to 1000 random numbers as equivalent to 1000 searches:
# table hm is the table with the 25GB data loaded into it
$ echo "select *
from (select (random()*131899400)::int from generate_series(1,1000)) as r(n)
join hm on r.n = hm.id;" | psql -q | tail -n 1
Time: 19555.717 ms
####
time < hominae.txt perl -ne '
chomp;
my @arr = split(/;/, $_, 2);
print $arr[1], "\n";
' \
| psql -c "
drop table if exists hm;
create table if not exists hm (line text, id serial primary key);
copy hm (line) from stdin with (format csv, delimiter E'\t', header FALSE);
";
testdb=# \dti+ hm*
List of relations
Schema | Name | Type | Owner | Table | Size
--------+---------+-------+----------+-------+---------
public | hm | table | aardvark | | 29 GB
public | hm_pkey | index | aardvark | hm | 2825 MB
(2 rows)