so i decided why not to make a script that download them all and then i can check them later, plus i have a friend interested in tattoo's that find the pics very useful
anyway this is the code written in python:
Code: Select all
import urllib2
import re
report = open("failed_DL.txt","w")
imgnb = 1
baseurl = "http://www.girlsinink.tumblr.com/page/" #this part of url does not change
for n in range(1,101): #loop through the pages
print "Page number: " + str(n)
page = baseurl + str(n) #we add a number at the and of base url to get page url
srcode = urllib2.urlopen(page) #get the page source code
lines = srcode.readlines() #split them into a line list
for i in lines: #loop throu the page lines
if i.find('photo2') > -1: #find the line that contain the image url, it have "photo2" in it
picline = i #get the line
picline.strip()
picurl = re.findall(r'http://[\'"]?([^\'" >]+)', picline)
imgurl = "http://" + picurl[0]
print imgurl
try:
opener1 = urllib2.build_opener()
page1 = opener1.open(imgurl) #open the picture page
my_picture = page1.read() #read it
imgname = str(imgnb) + ".jpg"
print imgname
fout = open(imgname, "wb") #open the file for writing
fout.write(my_picture) #write the picture to it
fout.close() #close the file
imgnb = imgnb+1
except Exception:
print Exception
report.write(imgurl+"\n")
report.close()