File buffer chunked processing
(Johan via Bernard)
Reading file buffer in chunks to prevent reaching process memory limits.
File.open("", "wb") do |f| while buff = file_field.read(4096) f.write(buff) end end
TextSnippets > ajz > ruby
1159 users tagging and storing useful source code snippets
AJ http://ajz.textdriven.com
File.open("", "wb") do |f| while buff = file_field.read(4096) f.write(buff) end end
#!/usr/local/bin/ruby Dir.chdir(ENV['HOME']) # Disk usage quotaline = `quota -g | tail -n 1` usage = `echo -n "#{quotaline}" | awk '{print $2}'`.to_f quota = `echo -n "#{quotaline}" | awk '{print $3}'`.to_f percent_used = (usage / quota) * 100 puts "Disk usage: " + sprintf("%.4f", usage/1024) + " MiB (Quota: " + sprintf("%.4f", quota/1024/1024) +" GiB; " + sprintf("%.1f", percent_used) + "% used)" # HTTP Bandwidth print "Calculating Bandwidth Usage..." month = `date +"%B %Y"`.chomp access_logs="access_log." + `date +%Y%m`.chomp + "??" system("cat logs/access_log 2>/dev/null > temp-bandwidthcount") # Today's log system("cat domains/*/logs/access_log 2>/dev/null >> temp-bandwidthcount") system("cat logs/#{access_logs} 2>/dev/null >> temp-bandwidthcount") # Any logs not (yet) gzipped system("cat domains/*/logs/#{access_logs} 2>/dev/null >> temp-bandwidthcount") system("zcat logs/#{access_logs}.gz 2>/dev/null >> temp-bandwidthcount") # Gzipped logs from previous days system("zcat domains/*/logs/#{access_logs}.gz 2>/dev/null >> temp-bandwidthcount") usage = `cat temp-bandwidthcount | awk '{sum += $10} END {print sum}'`.chomp.to_f / 1024 / 1024 File.delete("temp-bandwidthcount") 30.times {print "\b"} puts "Bandwidth used for #{month}: " + sprintf("%.4f", usage) + " MiB"
curl -o rsrcmeter http://ajz.textdriven.com/rsrcmeter.txt
chmod u+x rsrcmeter
./rsrcmeter
Disk usage: 2.2910 MiB (Quota: 1.9073 GiB; 0.1% used) Bandwidth used for August 2006: 0.5773 MiB