view · edit · history · print

This is a small collection of script snippets, some are more interesting then others. An old but similar page I created is located here..


"Real" Scripting

(not shell scripting)

Good scripting reference:


ex

Using ex editor (basis of vi commands) to change sh-bang

  
NEWBIN="/home/root/testperl"
ex -c"1,1s%/usr/bin/perl%${NEWBIN}%" -c"wq" myscript.pl


sed

Strip comments, strip leading spaces, strip trailing whitespaces and remove empty lines

  
sed '
/#.*$/s///
s/^[[:blank:]]\+//
s/[[:blank:]]\+//g
/^$/d
'

# or use a similar short hand 
sed -n '/^[0-9|A-Z|a-z]/p'

# simple delete
sed /^$/d ## same as grep -v ^$

# escape spaces and hypens
echo "da da - lala" | sed 's/[\ |\-]/\\&/g'
da\ da\ \-\ lala

# remove most HTML tags (accommodates multiple-line tags)
sed -e :a -e 's/<[^>]*>//g;/</N;//ba'

# remove last line
| sed '$d'

# remove first section of file until keyword
sed '/keyword/,$!d' filename

# run sed unbuffered (option -u) in case you pipe data from tail -f
tail -f <somefile> | sed -u -e 's/...//'


Shell

 
# How do i log script stdout to a file and reset it at some point to stout? 
exec >/tmp/file.out
cat <<- +++
  some text for the file
+++
exec >&0


# How do i log stdout + strerr to a file in a script? 
exec >>/tmp/logfile 2>&1


# Howto use cat as an editor?
cat << EOF >> /tmp/file.out
somme crappy text
EOF


# save stdout and stderr to file descriptors 3 and 4, then redirect them to file "foo" 
exec 3>&1 4>&2 >foo 2>&1  
# ...  
# restore stdout and stderr 
exec 1>&3 2>&4 


# how do i print to stderr
echo "coucou to stderr" >&2
print -u2 "coucou to stderr"


# difference beween column selections
file is 221932KiB and has 17481202 lines
$ time cat file > /dev/null
  real    0m0.36s
$ time cat file | awk '{print $2}' > /dev/null
  real    0m23.03s
$ time cat file | tr -s ' ' ';' | cut -d";" -f 2 > /dev/null
  real    0m18.04s    # tr-command takes 2x longer then cut-command
$ time perl -p -e 's/ +/ /' file | cut -d" " -f 2 > /dev/null
  real    0m47.67s
$ time perl -lane 'print $F[1]' file > /dev/null
  real    2m34.27s
$ time perl -lape '$_ = $F[1]' file > /dev/null
  real    2m46.11s


# change capitals
$ tr '[[:upper:]]' '[[:lower:]]'


# Adding numbers in column layouts using awk:
ipcs -m -b | sed '1,3d' | awk '{SIZE+=$7} END {print SIZE}'
ipcs -m -b | sed '1,3d' | perl -lane '$sum += $F[6]; END {print $sum}'


# Grep for a field, not the full line:
ls -l | awk '$6 == "Dec"'


# Get last argument
echo "number of arguments:                  $# "
eval LAST=$"$#"
echo "last argument:                        $LAST "
MAXSHIFT=$(( $# - 1 ))
echo "maximum shifts:                       $MAXSHIFT "
shift $MAXSHIFT
echo "last argument after maximum shifts:   $1 "


# Array loop
$ colors[0]=RED ; colors[1]=GREEN ; colors[2]=BLUE
$ set -A arrray a b c d
$ i=0
$ while [ $i -lt ${#colors[*]} ]
> do
> print ${colors[$i]}
> (( i=i+1 ))
> done
$ echo list all elements ${arrray[*]} (use * or @)

# ISO time (use -u option for UCT)
ISOTIME=$(date +%Y%m%dT%H%M%S)

# Ping with timestamp (on OSX)
ping -i 5 google.com | while read pong
do 
  echo "$(date +%Y%m%dT%H%M%S): $pong"
done

# remove first line
tail -n +2 <file>

# If you need to copy a lot of files from one machine to another in a hurry across a busy network, you can run multiple cpio's in parallel.
# !untested!
find . -depth -print >/tmp/files
split /tmp/files
for F in /tmp/files?? ; do
  cat $F | cpio -o | ssh destination "cd /target && cpio -idum" &
done

# duplicate a directory
# option1, using cp -a
test -d d2 || cp -a d1 d2 # WARN, make sure the target directory doe NOT exists!
# option2, using cpio (PS: first cd into the target directory)
cd d1 && find . -depth -print | cpio -pdm ../d2 ; cd -
# option3, using tar
...

# uid => ID => gecos
getent passwd 18568
testaccount:*:18568:58500:test user:/home/testaccount:/bin/bash

# hex => bin
echo "732F206578653D2E2A2F2F" | xxd -r -p
s/ exe=.*//

# split string into arrays
animals="dog|cat|fish|squirrel|bird|shark";
OIFS=$IFS;
IFS="|";
animalArray=($animals); # <=== this is all, tell those people using 'read -r -a <<<$*' to stop that nonsense!
IFS=$OIFS;


awk

I don't use awk for scripting and thus always try to find alternatives for my 1-liners. Maybe it is a shame, awk is quite performant.

  
# Adding numbers in column layouts
ipcs -m -b | sed '1,3d' | awk '{SIZE+=$7} END {print SIZE}'
# Adding numbers in column layouts (avoiding output in scientific notation)
ipcs -m -b | sed '1,3d' | awk '{SIZE+=$7} END {printf ("%8d\n", SIZE)}'
ls -l | awk '$6 == "Dec"'
cat file | awk '{print $2}'

# Converting scientific notation 
# ATTENTION: when using awk, please use {printf ("%8d\n", SIZE)}
echo "5.083E+5" | awk -F"E" 'BEGIN{OFMT="%10.10f"} {print $1 * (10 ^ $2)}'
508300

# Find the position of a string
echo "123%%67890" | awk -v find=%% '{ printf ("%s\n",index($0,find) ) }'
# ps: there is also a perl one-liner for this


EOL

replacing End of line

 
blablablah | perl -p -i -e 's/\n/,/g'
blablablah | tr '\n' ','


CGI basics

To use CGI outside the Apache /cgi-bin/ in Apache.conf

 AddHandler cgi-script .cgi

playfullsimple.cgi

  
#!/bin/sh
set -x
echo "Content-type: text/html"
echo ""
cat << ++++
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head><title>title</title></head>
<body>
++++
echo "$( date ) ---- ${QUERY_STRING} <br>"
echo '<hr>'
set 
echo '</body>'
echo '</html>'
exit 0
EOF

Note: Do not forget to chown & chmod


Other

  
# insert $value in file according to Line and Column co÷rdinates (column separator = ":")
awk 'BEGIN { FS=OFS=":" } NR==l { $f=v } 1' l=$LIGNE f=$COLUMN v=$VALUE input_file

ASCII compare (kind of dirty but it works on Linux)

  
$ cat asciicompare
f1=$1
f2=$2
od -av $f1 | awk '{$1=""; print $0}' | perl -p -i -e 's/\n//g' | perl -p -i -e 's/nl/\n/g' > /tmp/${f1}.ascii
od -av $f2 | awk '{$1=""; print $0}' | perl -p -i -e 's/\n//g' | perl -p -i -e 's/nl/\n/g' > /tmp/${f2}.ascii
diff /tmp/${f1}.ascii /tmp/${f2}.ascii
rm -f /tmp/${f1}.ascii /tmp/${f2}.ascii

admin · attr · attach · edit · history · print
Page last modified on January 15, 2016, at 02:48 AM