# open my $pipe, '|-', 'pv -perlt | external-command' or die $!; my $pipe = *STDOUT{IO}; # https://stackoverflow.com/questions/63172427/why-does-perl-qx-hang-i +n-mojoliciouslite-but-not-in-an-ordinary-program/63178400#63178400 my $qfn = '/dev/urandom'; open my $fh, '<:raw', $qfn or die "$qfn: $!"; while (1) { my ($num, $buf) = (32, ''); while ($num) { my $rv = sysread($fh, $buf, $num, length $buf) // die "$qfn: $ +!"; die "$qfn: Premature EOF\n" unless $rv; $num -= $rv; } # For each chunk of random data, derive 1023 extra chunks. This re +duces # entropy consumption and increases throughput. # # Negate bits for my $bytes ($buf, ~$buf) { # Reverse bits for my $bits ($bytes, pack('b*', unpack 'B*', $bytes)) { # Reverse nibbles for my $hex (unpack("H*", $bits), unpack("h*", $bits)) { # Reverse hex string for my $str ($hex, scalar reverse $hex) { say {$pipe} $str; # Rotate hex string say {$pipe} substr($str, $_), substr($str, 0, $_) for 1 .. 63; } } } } } close $pipe;
In reply to Increasing throughput of random data by Anonymous Monk
For: | Use: | ||
& | & | ||
< | < | ||
> | > | ||
[ | [ | ||
] | ] |