ls > files.txt # overwrite
ls >> files.txt # append
echo "hello" > output.txt
cat /etc/hosts > hosts.bak
# discard output
command > /dev/null
mail -s "Report" user@host < report.txt
mysql -u root -p < backup.sql
sort < names.txt
wc -l < file.txt # only count, no filename shown
cat access.log | grep "404"
ps aux | grep nginx
cat file.txt | sort | uniq
dmesg | tail -20
history | grep "git"
command 2> errors.log # redirect stderr to file
command 2>> errors.log # append stderr
command 2>/dev/null # discard stderr
# separate stdout and stderr
command > output.log 2> error.log
command &> all.log # stdout + stderr to file (bash)
command &>> all.log # append both
command > all.log 2>&1 # POSIX compatible
command >> all.log 2>&1 # append, POSIX
# redirect stderr to stdout (for piping)
command 2>&1 | grep "error"
cat << EOF
Line one
Line two
EOF
cat << 'EOF' # no variable expansion
$HOME stays literal
EOF
cat <<- EOF # strip leading tabs
indented content
EOF
grep "root" <<< "/etc/passwd content here"
read -r a b c <<< "1 2 3"
bc <<< "2^10"
awk '{print $1}' <<< "hello world"
ls | tee files.txt # output to screen and file
ls | tee -a files.txt # append mode
echo "log entry" | tee -a log.txt
# tee to multiple files
ls | tee a.txt b.txt c.txt
# pipe through while saving
cat bigfile.log | tee >(grep ERROR > errors.txt) | wc -l
find . -name "*.log" | xargs rm
find . -type f | xargs grep "TODO"
cat urls.txt | xargs -n1 curl -s
# handle filenames with spaces
find . -name "*.txt" -print0 | xargs -0 rm
ls | xargs -I {} cp {} /backup/{}
ls | xargs -P 4 -n1 process.sh # run 4 in parallel
diff <(sort file1.txt) <(sort file2.txt)
comm <(sort a.txt) <(sort b.txt)
while read line; do
echo "$line"
done < <(grep "error" log.txt)
# feed output as input file
cat <(echo "generated content")
exec 3> custom.log # open fd 3 for writing
echo "info" >&3
exec 3>&- # close fd 3
exec 4< input.txt # open fd 4 for reading
read -r line <&4
exec 4<&- # close fd 4
exec 5>&1 # save stdout to fd 5
exec > log.txt # redirect stdout
exec 1>&5 5>&- # restore stdout
# count unique IPs from access log
awk '{print $1}' access.log | sort | uniq -c | sort -rn | head
# backup with progress
tar cf - /data | pv > backup.tar
# search and replace across files
grep -rl "oldtext" . | xargs sed -i 's/oldtext/newtext/g'
# merge sorted files
sort -m <(sort file1) <(sort file2) > merged.txt
# monitor log in real time
tail -f /var/log/syslog | grep --line-buffered "error"
ls > files.txt # 覆盖写入
ls >> files.txt # 追加写入
echo "hello" > output.txt
cat /etc/hosts > hosts.bak
# 丢弃输出
command > /dev/null
mail -s "报告" user@host < report.txt
mysql -u root -p < backup.sql
sort < names.txt
wc -l < file.txt # 仅输出行数,不显示文件名
cat access.log | grep "404"
ps aux | grep nginx
cat file.txt | sort | uniq
dmesg | tail -20
history | grep "git"
command 2> errors.log # stderr 重定向到文件
command 2>> errors.log # 追加 stderr
command 2>/dev/null # 丢弃 stderr
# 分离 stdout 和 stderr
command > output.log 2> error.log
command &> all.log # stdout + stderr 写入文件 (bash)
command &>> all.log # 追加两者
command > all.log 2>&1 # POSIX 兼容写法
command >> all.log 2>&1 # 追加,POSIX 写法
# 将 stderr 转为 stdout 以便管道处理
command 2>&1 | grep "error"
cat << EOF
第一行内容
第二行内容
EOF
cat << 'EOF' # 不展开变量
$HOME 保持原样
EOF
cat <<- EOF # 去除前导制表符
缩进内容
EOF
grep "root" <<< "内容在这里"
read -r a b c <<< "1 2 3"
bc <<< "2^10"
awk '{print $1}' <<< "hello world"
ls | tee files.txt # 同时输出到屏幕和文件
ls | tee -a files.txt # 追加模式
echo "日志记录" | tee -a log.txt
# 同时写入多个文件
ls | tee a.txt b.txt c.txt
# 管道传输的同时保存中间结果
cat bigfile.log | tee >(grep ERROR > errors.txt) | wc -l
find . -name "*.log" | xargs rm
find . -type f | xargs grep "TODO"
cat urls.txt | xargs -n1 curl -s
# 处理含空格的文件名
find . -name "*.txt" -print0 | xargs -0 rm
ls | xargs -I {} cp {} /backup/{}
ls | xargs -P 4 -n1 process.sh # 并行执行4个
diff <(sort file1.txt) <(sort file2.txt)
comm <(sort a.txt) <(sort b.txt)
while read line; do
echo "$line"
done < <(grep "error" log.txt)
cat <(echo "生成的内容")
exec 3> custom.log # 打开 fd 3 用于写入
echo "info" >&3
exec 3>&- # 关闭 fd 3
exec 4< input.txt # 打开 fd 4 用于读取
read -r line <&4
exec 4<&- # 关闭 fd 4
exec 5>&1 # 保存 stdout 到 fd 5
exec > log.txt # 重定向 stdout
exec 1>&5 5>&- # 恢复 stdout
# 统计访问日志中的独立 IP
awk '{print $1}' access.log | sort | uniq -c | sort -rn | head
# 带进度的备份
tar cf - /data | pv > backup.tar
# 批量搜索替换
grep -rl "旧文本" . | xargs sed -i 's/旧文本/新文本/g'
# 合并已排序的文件
sort -m <(sort file1) <(sort file2) > merged.txt
# 实时监控日志
tail -f /var/log/syslog | grep --line-buffered "error"