0

cat file.txt

ob_No  ob_kind  ob_value  ob_lat   ob_lon   ob_depth  ob_time   
1      S        36.045    44.1432  12.5701  1.6       201303210000  
2      S        37.148    44.1432  12.5701  8.5       201303210000  
3      S        36.069    44.1432  12.5701  1.6       201303210029  
4      S        37.117    44.1432  12.5701  8.5       201303210029  
5      S        36.105    44.1432  12.5701  1.6       201303210100  
6      S        37.136    44.1432  12.5701  8.5       201303210100  
7      S        36.143    44.1432  12.5701  1.6       201303210130  
8      S        37.081    44.1432  12.5701  8.5       201303210130  
9      S        36.162    44.1432  12.5701  1.6       201303210159  

mapfile -s 1 -t ArrayObs < file.txt

I get the array of lines 1 to 9.

for  oline in ${ArrayObs[@]};do
  varDP=`echo ${oline} | awk '{print $6}'`
     if [ "${ArrayObs[0]}" == "$oline" ]; then
      valuesMO=`echo ${oline}`
      echo -e " first time"
      awk '{printf ("%.3f\n", $6)}' valuesMO > MkDepth
    else
      echo -e " not the first time"
      valuesMO=`echo ${oline}`
      awk '{printf ("%.3f\n", $6)}' valuesMO >> MkDepth
    fi
  done

I want to concatenate when its first iteration and append for the rest. the if loop gives fatal error (awk) because there are spaces in the strings. Thanks,

2 Answers 2

2

I will provide a script that reads file line by line; each line is an array and will iterate over value by value over an array. Hope it helps.


file.txt

ob_No  ob_kind  ob_value  ob_lat   ob_lon   ob_depth  ob_time   
1      S        36.045    44.1432  12.5701  1.6       201303210000  

Script

#!/usr/bin/env bash
# author    : Daniel Leybovich
# SO        : http://stackoverflow.com/questions/27981928/bash-for-loop-strings-with-spaces

FILE=${1}

while read LINE
do
    declare -a ROW

    ROW=($(echo ${LINE}))
    COLS=${#ROW[@]}

    echo "LINE    : .... ${LINE}"
    echo "COLS NUM: .... ${COLS}"
    echo "----------------------"
    echo "value by value:       "

    for INDEX in $(seq 0 $((COLS - 1)))
    do
        echo -e "\tCOL[${INDEX}] = ${ROW[${INDEX}]}"
    done

    echo '========================================='

done < ${FILE}

exit 0

Output

daniel@synapse:/tmp$ ./so27981928.sh file.txt
LINE    : .... ob_No  ob_kind  ob_value  ob_lat   ob_lon   ob_depth  ob_time
COLS NUM: .... 7
----------------------
value by value:   
    COL[0] = ob_No
    COL[1] = ob_kind
    COL[2] = ob_value
    COL[3] = ob_lat
    COL[4] = ob_lon
    COL[5] = ob_depth
    COL[6] = ob_time
=========================================
LINE    : .... 1      S        36.045    44.1432  12.5701  1.6       201303210000
COLS NUM: .... 7
----------------------
value by value:   
    COL[0] = 1
    COL[1] = S
    COL[2] = 36.045
    COL[3] = 44.1432
    COL[4] = 12.5701
    COL[5] = 1.6
    COL[6] = 201303210000
=========================================

EDIT [minor script improvements]

#!/usr/bin/env bash
# author        : Daniel Leybovich

# create a temporary file
TABLE=$(mktemp)

# populate file with tabular data
cat << EOF >> ${TABLE}
col1    col2    col3    col4
[0,0]   [0,1]   [0,2]   [0,3]
[1,0]   [1,1]   [1,2]   [1,3]
EOF

# count line numbers
declare -i LN=0

# iterate over matrix rows 
while read LINE; do
    echo "-------------------------------------------------------------------"

    declare -a ROW

    ROW=(${LINE})
    COLS=${#ROW[@]}

    echo "LINE NO# ....... ${LN}"
    echo "COLUMNS  ....... ${LINE}"
    echo "COLUMNS NUM .... ${COLS}"

    # iterate over values
    for INDEX in $(seq 0 $((COLS - 1)));do
        echo -e "\tCOL[${INDEX}] = ${ROW[${INDEX}]}"
    done

    # increment line counter
    let LN=$((LN + 1))

done < ${TABLE}

# clean up
rm -rf ${TABLE}

# we are done
exit 0

OUTPUT

daniel@synapse:/tmp$ ./arrays_2d.sh
-------------------------------------------------------------------
LINE NO# ....... 0
COLUMNS  ....... col1    col2    col3    col4
COLUMNS NUM .... 4
    COL[0] = col1
    COL[1] = col2
    COL[2] = col3
    COL[3] = col4
-------------------------------------------------------------------
LINE NO# ....... 1
COLUMNS  ....... [0,0]   [0,1]   [0,2]   [0,3]
COLUMNS NUM .... 4
    COL[0] = [0,0]
    COL[1] = [0,1]
    COL[2] = [0,2]
    COL[3] = [0,3]
-------------------------------------------------------------------
LINE NO# ....... 2
COLUMNS  ....... [1,0]   [1,1]   [1,2]   [1,3]
COLUMNS NUM .... 4
    COL[0] = [1,0]
    COL[1] = [1,1]
    COL[2] = [1,2]
    COL[3] = [1,3]
Sign up to request clarification or add additional context in comments.

Comments

0

I don't see a question in there at all.

Some code review:

for  oline in ${ArrayObs[@]};do

quotes missing: need "${ArrayObs[@]}"

  valuesMO=`echo ${oline}`

Same as valuesMO=$oline except the whitespace is squeezed. Is that your intention?

  varDP=`echo ${oline} | awk '{print $6}'`
     if [ "${ArrayObs[0]}" == "$oline" ]; then
      echo -e " first time"
      awk '{printf ("%.3f\n", $6)}' valuesMO > MkDepth

Do you have a file named valuesMO? You're missing a $

    else
      echo -e " not the first time"
      awk '{printf ("%.3f\n", $6)}' valuesMO >> MkDepth
    fi
  done

You don't need the if statement. Put the redirect after the for loop to capture all its output. Like this:

for x in ...; do
    echo $x
done > MkDepth

3 Comments

Thanks Glenn. valuesMO are env var got from the "${ArrayObs[@]}". I want to create a file called MkDepth which contains $6 of valuesMO. When its the first element of array ${ArrayObs}, it should create the file MkDepth and for second onwards, just append to the existing MkDepth. Is there any workaround instead of if block? perhaps while?
Why don't you just do awk '{print $6}' file.txt > MkDepth ?
I understand the create versus append. What I suggest is you collect ALL the output, and create the file with the whole contents. That's essentially what happens with the redirection after "done".

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.