shell基础知识

if [ ! -d /home/upgrade/iaas_v331/conf_backups ]; then mkdir -p /home/upgrade/iaas_v331/conf_backups; fi
  cmd : [ -d /home/upgrade/iaas_v331/conf_backups/all ] || mkdir -p /home/upgrade/iaas_v331/conf_backups/all
                now=$(date +"%Y%m%d%H%M%S")
                tar -zcvf /home/upgrade/iaas_v331/conf_backups/all/etc_${now}.tar.gz /etc
 
if结构
if [ conidtion ] : then ... fi
if [ conditon ] : then ... else ... fi
if [ condition ] : then ... elif ... fi
简单的逻辑可以使用&& || 去替换
条件可以用命令返回值代替
if [ -e test ]; then echo exist; else echo not exist; fi
exist
 
[ -e test ]&&echo exist||echo not exist]
 
echo "1" && echo "2" || echo "3" && echo "4" || echo "5" ||echo "6" && echo "7" && echo "8" || echo "9"
1
2
4
7
8
 
for(( c1 : c2 : c3));
do
...;
done
for (( i=0; i<10; i++)); do echo $i; done
for x in ${array[@]}; do echo $x; done
for x in `ls`; do echo $x; done
 
while
i=0; while [ $i -lt 3 ];do echo $i; (( i=i+1 )); done
一个有用的小技巧,一行行的读取文件内容 while read line; do echo $line; done < /tmp/tmp
[root@zhouzz ~]# while read line; do echo $line;done < x
eeeee
fffff
ggggg
iiiii
ppppp
 
for f in *; do echo $f; if [ -f $f ]; then echo "this is file $f";else continue;fi;done;
 
bash 是一个进程 bash下还可以再重新启动一个shell,这个shell是sub shell,原shell复制自身给他
在sub shell中定义的变量,什随着sub shell的消失而消失
()子shell中执行
{}当前shell中执行, $$当前脚本执行的pid
&后台执行
$! 运行在后台的最后一个作业的PID(进程ID)
shell基础知识
 
shell基础知识
a=100
~ � {a=1; echo $a;};echo $a;
1
1
 
echo $PATH |awk 'BEGIN{RS=":"}END{print NR}'
RS行记录分隔符
FS记录分隔符
NR记录数
NF字段数
BEGIN END
 
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |awk -F '/' '{print $1,$2,$3,$4}'
usr local sbin
usr local bin
usr sbin
usr bin
usr local java
usr local java
root bin
 
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |awk 'BEGIN{FS="/"}{print $1,$2,$3,$4}'
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |awk 'BEGIN{FS="/|-"}{print $1,$2,$3,$4}'
 
RS两行之间的分割符
FS 类同 -F ‘分割符’
sed
-e 多次操作
-i 替换原文
awk 'BEGIN{RS=":"}{print $0}' |sed 's#/#-----#g'
echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |sed -n '/^/bin/./sbin/p'
sed -i -e ‘’ -e"
echo "2>1" |bc
类比SQL
grep 数据查找定位 =select * from table
awk数据切片 (名字来源于三个作者的名称) select field from table
sed数据修改
update table set field=new where field=old
grep -o pattern file 把每个匹配的内容用独立的行显示
grep -i pattern file忽略大小写
grep pattern -r dir / 递归搜索
grep -A -B -C pattern file 打印命中数据的上下文
echo "ABC" |grep -i abc
ps -ef |grep bash
ps -ef |grep bash |grep -v grep
[root@zhouzz ~]# echo "1234 7654" |grep "[0-9]4"
1234 7654
[root@zhouzz ~]# echo "1234 7654" |grep -o "[0-9]4"
34
54
patter:(BRE)
^,$
[0-9]
[a-z]
基本正则(BRE),扩展正则的区别ERE
*0个或多个
?非贪婪匹配
+一个或多个
()分组
{}范围匹配
|匹配多个表达式的任何一个
[root@zhouzz ~]# echo "1234 7654" |grep -o "[0-9]4|76"
[root@zhouzz ~]# echo "1234 7654" |grep -oE "[0-9]4|76"
34
76
54
root@zhouzz tmp]# ls /tmp |grep jenkins83 -C2
hsperfdata_jenkins
hsperfdata_root
jenkins8376545221077029752.sh
systemd-private-40795f9ed6f04acdb904bee0520b03e8-mariadb.service-E8OrMz
systemd-private-40795f9ed6f04acdb904bee0520b03e8-ntpd.service-nzrdhO
[root@zhouzz tmp]# ls /tmp |grep jenkins83 -A2 -B2
hsperfdata_jenkins
hsperfdata_root
jenkins8376545221077029752.sh
systemd-private-40795f9ed6f04acdb904bee0520b03e8-mariadb.service-E8OrMz
systemd-private-40795f9ed6f04acdb904bee0520b03e8-ntpd.service-nzrdhO
grep 00:00:01 -r .
grep -E '404 | 503' /tmp/nginx.log |head -3
 
awk理论上可以代替grep
awk 'pattern{action}'
 
awk 'BEGIN{}END{}'开始与结束
awk '/Running/' 正则匹配
awk '/aa/,/bb/'区间匹配
awk 'NR==2' 取第二行
awk 'NR >1' 去掉第一行
ps | awk 'BEGIN{print "start"}{print $0}END{print "end"}'
awk '/ 404 | 500 /' /tmp/nginx.log
[root@zhouzz ~]# echo '1
> 2
> 3
> 4
> 5' |awk '/2/,/4/'
2
3
4
[root@zhouzz ~]# echo '1
> 2
> 3
> 4
> 5' |awk '$0>3'
4
5
 
echo '1
2
3
4
5' | awk '$0>3'
ps | awk 'NR>1'
 
ps | awk '{print $NF}'
echo $PATH | awk 'BEGIN{RS=":"}{print $0}' | grep -v "^$" | awk 'BEGIN{FS=" ";ORS=":"}{print $0}END{printf " " }'
echo '1,10
2,20
3,30' | awk 'BEGIN{a=0;FS=","}{a+=$2}END{print a,a/NR}'
awk 'BEGIN{print 33*20*76/200/3}'
echo "123|456_789" | awk 'BEGIN{FS="\||_"}{print $2}'
因为双引号存在,最外层单引号确保整个内容是原样传递给awk的。然后awk收到了双引号,双引号在awk内部也有转义,然后两个反斜杠变成了一个反斜杠。这样FS就会能正常的知道竖杠不是正则了。
echo "123|456_789" | awk "BEGIN{FS="\\||_"}{print $2}" #尽量使用单引号
[root@zhouzz ~]# echo '1,10
> 2,20
> 3,30' | awk 'BEGIN{a=0;FS=","}{a+=$2}END{print a,a/NR}'
60 20
grep -E " 404 | 500 " /tmp/nginx.log | awk '{print $9}' | sort | uniq -c
将单行分拆为多行
[root@zhouzz ~]# echo $PATH
/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/local/java/jdk1.8/bin:/usr/local/java/jdk1.8/jre/bin:/root/bin
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}'
/usr/local/sbin
/usr/local/bin
/usr/sbin
/usr/bin
/usr/local/java/jdk1.8/bin
/usr/local/java/jdk1.8/jre/bin
/root/bin
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print NR,$0}'
1 /usr/local/sbin
2 /usr/local/bin
3 /usr/sbin
4 /usr/bin
5 /usr/local/java/jdk1.8/bin
6 /usr/local/java/jdk1.8/jre/bin
7 /root/bin
 
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}END{print NR}'
7
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}END{print NR,$0}'
7 /root/bin
将多行合成一行
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |awk 'BEGIN{FS=" ";ORS=":"}{print $0}'
/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/local/java/jdk1.8/bin:/usr/local/java/jdk1.8/jre/bin:/root/bin::
 
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |awk 'BEGIN{FS=" ";ORS=":"}{print $0}END{printf " "}'
/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/local/java/jdk1.8/bin:/usr/local/java/jdk1.8/jre/bin:/root/bin::
 
[root@zhouzz ~]# echo $PATH |awk 'BEGIN{RS=":"}{print $0}' |grep -v "^$"|awk 'BEGIN{FS=" ";ORS=":"}{print $0}END{printf " "}'
/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/local/java/jdk1.8/bin:/usr/local/java/jdk1.8/jre/bin:/root/bin:
 
sed -n '2p' 打印第二行
sed 's#hello#world#'
-E 扩展表达式
--debug调试
ps | sed -n 1,3p
ps | sed 's/CMD/command/'
ps | sed -n '/ps/p'
echo '1
2
3
4
5' | sed -n '/3/,/4/p'
echo '1
2
3
4
5' | sed '/3/,/4/d'
ps | sed -e 's/CMD/command/' -e 's#00#20#g'
pattern表达式
  • 20 30,35 行数与行数范围
  • /pattern/ 正则匹配
  • //,// 正则匹配的区间
action
  • d 删除
  • p 打印,通畅结合-n参数
  • s/REGEXP/REPLACEMENT/[FLAGS]
  • 替换时引用 1 2 匹配的字段
[root@zhouzz ~]# echo '1
> 1
> 2
> 3
> 4
> 5
> 6' |sed -n '/3/,/4/p'
3
4
 
对所有404 500 的数据,统计出现这种状态码的url,需要对url汇总(汇总相似的url,把相同的资源但是变化的id去掉)去重,打印前5个出问题的路径,把命令贴到回复里