Built specifically for these sites I used to frequent cause I couldn't be bothered to click on each and every picture in a 40-pic post.
Tested verified working with Superiorpics, Otvali, Friends.kz, Hollywoodtuna, Tunaflix. Methinks it doesn't work so well with imagevenue anymore.
NOTE: uses wget to download the pics. So I guess, Unix only. I run OS X so...
require 'open-uri'
$home_dir = `echo $HOME`.chomp
`mkdir
`mkdir
$tmp_dir = $home_dir + "/.tmp"
Dir.chdir($tmp_dir)
puts "Files will be stored under -> #{$home_dir}/tmp"
$otvali_re = /http:\/\/img.otvali.ru\/[0-9]{4}\/[0-9]{2}\/[a-z0-9_]+_[0-9]{1,2}\.jpg/
$friendz_re = /thumbs\/([a-z0-9_]+.jpg)/
$fake_link = /(http:\/\/img[0-9]+.image(shack|venue)+\.(us|com)+\/(my|img).php\?image=(\w+_?)+\.jpg)/
$shack_link = /(http:\/\/img[0-9]+.image(shack|venue)+\.(us|com)+\/(\w_?-?)+\/(\w_?-?)+\/(\w+_?)+\.jpg)/
$av_link = /src="((\w_?-?)+\/(\w_?-?)+\/(\w+_?)+\.jpg)/i
$tuna_small = /(\/images\/(\w+_?)+\.jpg)/
def makeNewFolder
puts "OK, where do you want to hide these? Or ENTER and I'll do it for you."
answer = gets
if answer !="\n"
$new_folder_name = answer.gsub(/[\s\W]/, '_').chomp
else
$new_folder_name = File.basename($address, ".*").gsub(/[\s\W]/, '_')
end
$path_to_new_dir = $home_dir + "/tmp/" + $new_folder_name + "/"
`mkdir
puts "Created: " + $new_folder_name
puts "Path --> " + $path_to_new_dir
end
def cleanUp
puts "Moving files to #{$path_to_new_dir}..."
`mv
end
$pages = Array.new
puts "Enter URL:"
$address = gets.chomp
if $address =~ /otvali.ru/
puts "Scanning page..."
$pages = open($address).read.scan($otvali_re).delete_if { |x| x =~ /_s__/ }
elsif $address =~ /friends.kz/
puts "Scanning page..."
$pages = open($address).read.scan($friendz_re).flatten!.uniq
$pages.collect! { |path| "http://friends.kz/uploads/posts/" + path }
elsif $address =~ /superiorpics/
puts "Scanning page..."
$pages = open($address).read.scan($fake_link).flatten!.delete_if { |i| i !~ /http/ }
$pages.collect! do |page|
if page =~ /imageshack/
puts "Resolving #{page}"
open(page).read.scan($shack_link)[0][0]
elsif page =~ /imagevenue/
puts "Resolving #{page}"
path_to_pic = open(page).read.scan($av_link)[0][0]
av_direct = "http://" + URI.parse(page).host + "/" + path_to_pic
end
end
elsif $address =~ /(hollywoodtuna|tunaflix)/
puts "Who do you want? ENTER to take'em all! Start with OMIT for negative search."
search_string = gets.chomp.gsub(/ /, "|" )
tuna_want = Regexp.new(search_string)
puts tuna_want
puts "Scanning page..."
$pages = open($address).read.scan($tuna_small).flatten!.delete_if { |i| i !~ /(th|tn|small)\.jpg$/ }
$pages.collect! do |page|
page.gsub!("images", "images/bigimages")
page.gsub!(/(th|tn|small)\.jpg$/, "big.jpg")
tuna_direct = URI.parse($address).host + page
end
if search_string =~ /^OMIT/
$pages.delete_if { |i| i =~ tuna_want }
else
$pages.delete_if { |i| i !~ tuna_want }
end
end
$pages.uniq!
puts $pages
if $pages.length == 0
puts "Yeah right. Nice try. Off you go!"
exit
elsif $pages.length <= 20
puts "What?! #{$pages.length}, is that all?"
elsif $pages.length >= 21
puts "What're y'fuckin nuts??! #{$pages.length} !!!"
end
makeNewFolder
$threads = []
$down_count = 0
for pic_to_fetch in $pages
$threads << Thread.new(pic_to_fetch) do |link|
puts "Fetching #{link}"
%x{wget --quiet --random-wait #{link}}
$down_count += 1
puts "Downloaded " + $down_count.to_s + "/" + $pages.length.to_s
end
end
$threads.each {|thr| thr.join }
dwn_actual = Dir.entries($tmp_dir).length-2
cleanUp
dwn_moved = Dir.entries($path_to_new_dir).length-2
if $pages.length == dwn_actual and dwn_actual == dwn_moved
puts "All completed @ #{Time.now.hour}:#{Time.now.min}. See y'around, dirty boy."
elsif dwn_moved <= dwn_actual
diff = dwn_actual - dwn_moved
puts "#{diff} got lost in the moving, baby. Goodbye."
elsif dwn_actual <= $pages.length
diff = $pages.length - dwn_actual
puts "#{diff} got lost coming down. Goodbye."
end