#!/usr/bin/perl -w use strict; use Data::Dump qw(pp); # the data is not strictly time sequential # I would as a first step, gather the data into records # based upon the team name and location # The easiest way with a HoA, Hash of Array. # although an extra dimension to the hash could be used # for the location. # %team_city_data is a HoA (Hash of Array) # my %team_city_data; while (<DATA>) { next if (/^\s*$/); #skip blank lines chomp; my ($team_name, $location) = (split /,/,$_)[1,4]; push @{$team_city_data{"$team_name-$location"}}, $_; } print "BEFORE the sort\n"; print pp(\%team_city_data),"\n"; # The time format of field[3] can be sorted by a # simple alpha sort because of the leading zero's. # This is an important thing when designing easily # parsable and sortable date/time formats # This will eventually give the earliest time and last # time for each team_location combination. # These times are extracted later as the first and last # of the sorted array's. foreach my $name_location (keys %team_city_data) { @{$team_city_data{$name_location}} = sort{ my ($Date_timeA) = (split(/,/, $a))[3]; my ($Date_timeB) = (split(/,/, $b))[3]; $Date_timeA cmp $Date_timeB }@{$team_city_data{$name_location}}; } print "AFTER the sort\n"; print pp(\%team_city_data),"\n"; # Instead of just printing the first and last dates # use some module to calculate the time difference # or use one of the built-in time functions. # The "tricky part" is how to get this far. # I leave the rest to you. foreach my $name_location (keys %team_city_data) { print "first: $team_city_data{$name_location}[0]\n"; print "last : $team_city_data{$name_location}[-1]\n"; print "\n"; } =prints....as final output (run yourself to see BEFORE and AFTER) first: 4,HAWK,2011-11-21 10:36:26.000,2011-11-21 10:45:11.823,Los Ange +les,,, last : 5,HAWK,2011-11-21 10:36:26.000,2011-11-21 12:41:17.763,Los Ange +les,,, first: 6,HAWK,2011-11-21 10:41:12.000,2011-11-21 10:55:08.393,San Fran +cisco,,, last : 8,HAWK,2011-11-21 10:41:12.000,2011-11-22 11:09:13.907,San Fran +cisco,,, first: 3,BEAR,2011-11-21 08:49:16.000,2011-11-21 08:49:19.987,San Fran +cisco,,, last : 2,BEAR,2011-11-21 08:49:16.000,2011-11-21 12:50:31.550,San Fran +cisco,,, =cut __DATA__ 1,BEAR,2011-11-21 08:49:16.000,2011-11-21 08:53:13.910,San Francisco,, +, 2,BEAR,2011-11-21 08:49:16.000,2011-11-21 12:50:31.550,San Francisco,, +, 3,BEAR,2011-11-21 08:49:16.000,2011-11-21 08:49:19.987,San Francisco,, +, 4,HAWK,2011-11-21 10:36:26.000,2011-11-21 10:45:11.823,Los Angeles,,, 5,HAWK,2011-11-21 10:36:26.000,2011-11-21 12:41:17.763,Los Angeles,,, 6,HAWK,2011-11-21 10:41:12.000,2011-11-21 10:55:08.393,San Francisco,, +, 7,HAWK,2011-11-21 10:41:12.000,2011-11-21 15:46:24.707,San Francisco,, +, 8,HAWK,2011-11-21 10:41:12.000,2011-11-22 11:09:13.907,San Francisco,, +,

In reply to Re: Need help get longest timediff by Marshall
in thread Need help get longest timediff by britney

Title:
Use:  <p> text here (a paragraph) </p>
and:  <code> code here </code>
to format your post, it's "PerlMonks-approved HTML":



  • Posts are HTML formatted. Put <p> </p> tags around your paragraphs. Put <code> </code> tags around your code and data!
  • Titles consisting of a single word are discouraged, and in most cases are disallowed outright.
  • Read Where should I post X? if you're not absolutely sure you're posting in the right place.
  • Please read these before you post! —
  • Posts may use any of the Perl Monks Approved HTML tags:
    a, abbr, b, big, blockquote, br, caption, center, col, colgroup, dd, del, details, div, dl, dt, em, font, h1, h2, h3, h4, h5, h6, hr, i, ins, li, ol, p, pre, readmore, small, span, spoiler, strike, strong, sub, summary, sup, table, tbody, td, tfoot, th, thead, tr, tt, u, ul, wbr
  • You may need to use entities for some characters, as follows. (Exception: Within code tags, you can put the characters literally.)
            For:     Use:
    & &amp;
    < &lt;
    > &gt;
    [ &#91;
    ] &#93;
  • Link using PerlMonks shortcuts! What shortcuts can I use for linking?
  • See Writeup Formatting Tips and other pages linked from there for more info.