#!/usr/bin/perl use strict; # https://perlmonks.org/?node_id=11132653 use warnings; #use File::Basename; my ($nameA, $nameB) = qw( data.A data.B ); # FIXME just for testing ## filename #my ($nameA,$pathA,$suffixA) = fileparse($ARGV[0],'\.[^\.]*'); #my ($nameB,$pathB,$suffixB) = fileparse($ARGV[1],'\.[^\.]*'); my $nameC = $nameA =~ s/\.\K.*/dif/r; # read data open(dataA, $nameA) or die " cannot open/read file $nameA:$!\n"; my @headerA; push @headerA, $_ = <dataA> for 1 .. 2; my @multi_arrayA; push @multi_arrayA, [split(' ', $_)] for <dataA>; #pop @multi_arrayA; pop @multi_arrayA; # trash header close dataA; open(dataB, $nameB) or die " cannot open/read file $nameB:$!\n"; my @headerB; push @headerB, $_ = <dataB> for 1 .. 2; my @multi_arrayB; push @multi_arrayB, [split(' ', $_)] for <dataB>; #pop @multi_arrayB; pop @multi_arrayB; # trash header close dataB; # modify data my @multi_arrayC; my ($avgX,$avgY,$avgZ,$difF); #u_cor_T pi_T_ts TFP eta_is_T_ts for my $i (0..$#multi_arrayA) { $avgX = ( $multi_arrayA[$i]->[0] + $multi_arrayB[$i]->[0] ) / 2.; $avgY = ( $multi_arrayA[$i]->[1] + $multi_arrayB[$i]->[1] ) / 2.; $avgZ = ( $multi_arrayA[$i]->[2] + $multi_arrayB[$i]->[2] ) / 2.; $difF = $multi_arrayA[$i]->[3] - $multi_arrayB[$i]->[3]; # push( $multi_arrayC[$i]->[0], $avgX ); # push( $multi_arrayC[$i]->[1], $avgY ); # push( $multi_arrayC[$i]->[2], $avgZ ); # push( $multi_arrayC[$i]->[3], $difF ); $multi_arrayC[$i]->[0] = $avgX; $multi_arrayC[$i]->[1] = $avgY; $multi_arrayC[$i]->[2] = $avgZ; $multi_arrayC[$i]->[3] = $difF; } use Data::Dump 'dd'; dd \@multi_arrayA, \@multi_arrayB, $nameC, \@mult +i_arrayC; # write data open(dataC, ">". $nameC) or die " cannot open/read file $nameC:$!\n" +; print dataC join(" ", map { sprintf "%E", $_ } @$_),"\n" for @multi_ar +rayC; close dataC; system "echo following is output file $nameC; cat $nameC"; # FIXME tes +ting ## input #__dataA__ #variables #units #1.0 1.0 1.0 4.0 9.99999 #2.0 2.0 2.0 4.0 9.99999 #3.0 3.0 3.0 4.0 9.99999 # #__dataB__ #variables #units #3.0 3.0 3.0 5.0 9.99999 #2.0 2.0 2.0 5.0 9.99999 #1.0 1.0 1.0 5.0 9.99999 # ## output (expected #__dataC__ #variables #units #2.00000E0 2.00000E0 2.00000E0 1.00000E0 #2.00000E0 2.00000E0 2.00000E0 1.00000E0 #2.00000E0 2.00000E0 2.00000E0 1.00000E0

Outputs:

( [ ["1.0", "1.0", "1.0", "4.0", 9.99999], ["2.0", "2.0", "2.0", "4.0", 9.99999], ["3.0", "3.0", "3.0", "4.0", 9.99999], ], [ ["3.0", "3.0", "3.0", "5.0", 9.99999], ["2.0", "2.0", "2.0", "5.0", 9.99999], ["1.0", "1.0", "1.0", "5.0", 9.99999], ], "data.dif", [[2, 2, 2, -1], [2, 2, 2, -1], [2, 2, 2, -1]], ) following is output file data.dif 2.000000E+00 2.000000E+00 2.000000E+00 -1.000000E+00 2.000000E+00 2.000000E+00 2.000000E+00 -1.000000E+00 2.000000E+00 2.000000E+00 2.000000E+00 -1.000000E+00

In reply to Re: combining two csv files by using math operations by tybalt89
in thread combining two csv files by using math operations by ng0177

Title:
Use:  <p> text here (a paragraph) </p>
and:  <code> code here </code>
to format your post, it's "PerlMonks-approved HTML":



  • Posts are HTML formatted. Put <p> </p> tags around your paragraphs. Put <code> </code> tags around your code and data!
  • Titles consisting of a single word are discouraged, and in most cases are disallowed outright.
  • Read Where should I post X? if you're not absolutely sure you're posting in the right place.
  • Please read these before you post! —
  • Posts may use any of the Perl Monks Approved HTML tags:
    a, abbr, b, big, blockquote, br, caption, center, col, colgroup, dd, del, details, div, dl, dt, em, font, h1, h2, h3, h4, h5, h6, hr, i, ins, li, ol, p, pre, readmore, small, span, spoiler, strike, strong, sub, summary, sup, table, tbody, td, tfoot, th, thead, tr, tt, u, ul, wbr
  • You may need to use entities for some characters, as follows. (Exception: Within code tags, you can put the characters literally.)
            For:     Use:
    & &amp;
    < &lt;
    > &gt;
    [ &#91;
    ] &#93;
  • Link using PerlMonks shortcuts! What shortcuts can I use for linking?
  • See Writeup Formatting Tips and other pages linked from there for more info.