home *** CD-ROM | disk | FTP | other *** search
/ Netrunner 2004 October / NETRUNNER0410.ISO / regular / iria107a.lzh / script / homestead.ipy < prev    next >
Encoding:
Text File  |  2000-11-23  |  2.9 KB  |  95 lines

  1.  
  2. #name=homestead
  3. #version=0.1
  4. #author=Wolfy
  5. #date=2000/11/22
  6. #comment=homesteadé╠URLé≡ò╧è╖é╡é─â_âEâôâìü[âh
  7. #func=OnRequest
  8. #category=download
  9. #param_comment=
  10.  
  11. import sys,httplib,re,string,os.path,urlparse
  12. from gaogaolib import *
  13.  
  14. #homesteadé╠âzâXâgû╝
  15. hosts = r'^http://.+\.homestead\.com'
  16. #û│Äïé╖éΘègÆúÄq
  17. ignore_ext = r'.*\.(jpg|htm|css|gif|wav|au|mid|class|swf|jar)$'   
  18.  
  19. def OnRequest(url,headers,proxy,redirect,item,param):
  20.     r'''HTTPâèâNâGâXâgé╠æOé╔î─é╬éΩé▄é╖üBò╘Ælé═ âtâëâOé╞ÉVé╡éóURLé╠â^âvâï'''
  21.     #Redirecté╚éτé╬ÅIéΦ
  22.     if redirect:
  23.       return (IGNORE,url)
  24.  
  25.     #É│ïKò\î╗âRâôâpâCâï
  26.     re_hosts = re.compile(hosts,re.IGNORECASE)
  27.     #URLé≡ë≡É═
  28.     if not re_hosts.search(url):
  29.         #î⌐é┬é⌐éτé╚é⌐é┴é╜éτÅIéΦ
  30.         return (IGNORE,url)
  31.     else: #î⌐é┬é⌐é┴é╜éτGETé╖éΘ
  32.         #ègÆúÄqé≡Æ▓é╫éΘ 
  33.         re_ignore_ext = re.compile(ignore_ext,re.IGNORECASE)
  34.         if re_ignore_ext.search(url):
  35.           return (IGNORE,url)
  36.  
  37.         print r'''ÉVé╡éóURLé≡ÆTé╡é▄é╖'''
  38.         #ò╧è╖îπé╠urlé╛é┴é╜éτî│é╔û▀é╖
  39.         url = restore_homestead(url)
  40.         url_info = parse_url(url)
  41.  
  42.         #host,port,pathé≡ô╛éΘ
  43.         (host,port,path) = extract_url(url,proxy)
  44.         #É┌æ▒
  45.         http = httplib.HTTP(host,port)
  46.         http.set_debuglevel(1)
  47.         try:
  48.             #âèâNâGâXâg
  49.             http.putrequest('GET',path)
  50.             #âwâbâ_é≡æùéΘ
  51.             keys = headers.keys()
  52.             for field in keys:
  53.                 #Range: é═û│Äïé╖éΘ
  54.                 if string.lower(field) == 'range':
  55.                   pass
  56.                 elif string.lower(field) == 'host':
  57.                   http.putheader('Host',url_info['host'])
  58.                 else:
  59.                   http.putheader(field,headers[field])
  60.                     
  61.             #âèâNâGâXâgÅIéΦ        
  62.             http.endheaders()
  63.             #âîâXâ|âôâX
  64.             status_num,status,ret_headers = http.getreply()
  65.             #ɼî≈é╚éτé╬
  66.             if status_num == 200:
  67.                 #htmlô╟é▌ì₧é▌
  68.                 f = http.getfile()
  69.                 html = f.read()
  70.                 f.close()
  71.                 #pathé≡ÆTé╖
  72.                 m = re.search(r'http://freeload.+' + url_info['filename'],html,re.IGNORECASE)
  73.                 #pathé¬î⌐é┬é⌐éΩé╬
  74.                 if m:
  75.                     #ɼî≈ ÉVé╡éóURLé≡ò╘é╖
  76.                     ret_url = html[m.start(0):m.end(0)]
  77.                     print r'''ÉVé╡éóURLé╠ĵô╛é╔ɼî≈é╡é▄é╡é╜'''
  78.                     print ret_url
  79.                     #âwâbâ_é≡ò╧éªéΘ
  80.                     headers['Host'] = urlparse.urlparse(ret_url)[1]
  81.                     #ɼî≈
  82.                     return (SUCCESS,ret_url)
  83.                 else:
  84.                     #î⌐é┬é⌐éτé╚éóé╠é┼âGâëü[
  85.                     return (ERROR,url)
  86.             else:
  87.                 #âGâëü[
  88.                 return (ERROR,url)
  89.         finally: #ûYéΩé╕é╔
  90.             http.close()
  91. #end OnRequest
  92.                      
  93.         
  94.  
  95.