Maybe something a simple as this is all you need? This loads a dictionary into a hash as dataset, and can service 100,000 requests from each of 4 concurrent clients at a rate of ~40,000 requests per second.
Note: This is a cut-down version of a server that expects each client to make many requests via a persistent connections. If your clients would only make a single request for each connection, I'd use a thread pool architecture, but I expect the throughput to be at least as good.
#! perl -slw use strict; use threads ( stack_size => 4096 ); use IO::Socket; use constant { SERVERIP => '127.0.0.1', SERVERPORT => 3000, MAXBUF => 4096, }; sub s2S{ my( $p, $h ) = sockaddr_in( $_[0] ); $h = inet_ntoa( $h ); "$h:$p"; } my %DB :shared; chomp, $DB{ $_ } = $. while <>; close *ARGV; my $lsn = IO::Socket::INET->new( LocalHost => SERVERIP, LocalPort => SERVERPORT, Reuse => 1, Listen + => SOMAXCONN ) or die $!; print "Listening..."; while( my $client = $lsn->accept ) { async { while( 1 ) { $client->recv( my $in, MAXBUF ); unless( length $in ) { print "Disconnected from ", s2S $client->peername; shutdown $client, 2; close $client; last; }; print "Received $in from ", s2S $client->peername; my( $cmd, @args ) = split ' ', $in; if( $cmd eq 'FETCH' ) { $client->send( $DB{ $args[ 0 ] } ); } else { $client->send( 'Bad command' ); } } }->detach; } sleep 1e9;
In reply to Re: Bid data but need fast response time
by BrowserUk
in thread Bid data but need fast response time
by Anonymous Monk
| For: | Use: | ||
| & | & | ||
| < | < | ||
| > | > | ||
| [ | [ | ||
| ] | ] |