写一个清理化本地大文件分段缓存的脚本
#!/bin/bash url=$1 # 获取命令行输入的 URL port=$2 # 获取命令行输入的端口(可选) if [[ -z "$url" ]]; then echo "Usage: $0 <url> [port]" exit 1 fi # 提取 URL 路径和查询字符串 url_path=$(echo $url | cut -d '/' -f 4-) # 检查是否有端口号传入 if [[ -z "$port" ]]; then host="http://0" # 默认端口80 else host="http://0:$port" # 使用指定的端口 fi # 获取总长度 content_length=$(curl -sI $host/$url_path | grep -i Content-Length | awk -F ": " '{print $2}' | tr -d '\r') if [[ -z "$content_length" ]]; then echo "Failed to retrieve Content-Length for the URL." exit 1 fi # 设置分段大小为 10MB (即 10485760 bytes) segment_size=10485760 # 计算需要的总迭代次数 let total_segments=($content_length+$segment_size-1)/$segment_size success_count=0 not_found_count=0 fail_count=0 echo "Starting cache purge for $host/$url_path" for (( i=0; i<$total_segments; i++ )) do start=$(($i*$segment_size)) end=$(($start+$segment_size-1)) if