home *** CD-ROM | disk | FTP | other *** search
/ Netrunner 2004 October / NETRUNNER0410.ISO / regular / iria107a.lzh / script / homestead.pyc (.txt) < prev    next >
Encoding:
Python Compiled Bytecode  |  2000-11-20  |  2.3 KB  |  68 lines

  1. # Source Generated with Decompyle++
  2. # File: in.pyc (Python 2.0)
  3.  
  4. import sys
  5. import httplib
  6. import re
  7. import string
  8. import os.path as os
  9. from gaogaolib import *
  10. hosts = '^http://.+\\.homestead\\.com'
  11. re_hosts = re.compile(hosts, re.IGNORECASE)
  12. ignore_ext = '.*\\.(jpg|htm|css|gif|wav|au|mid|class|swf|jar)$'
  13.  
  14. def OnRequest(url, headers, proxy = '', redirect = FALSE, item = None, misc = None):
  15.     '''HTTP\x83\x8a\x83N\x83G\x83X\x83g\x82\xcc\x91O\x82\xc9\x8c\xc4\x82\xce\x82\xea\x82\xdc\x82\xb7\x81B\x95\xd4\x92l\x82\xcd \x83t\x83\x89\x83O\x82\xc6\x90V\x82\xb5\x82\xa2URL\x82\xcc\x83^\x83v\x83\x8b'''
  16.     if redirect:
  17.         return (INGNORE, url)
  18.     
  19.     if not re_hosts.search(url):
  20.         return (IGNORE, url)
  21.     else:
  22.         re_ignore_ext = re.compile(ignore_ext, re.IGNORECASE)
  23.         if re_ignore_ext.search(url):
  24.             return (IGNORE, url)
  25.         
  26.         url_info = parse_url(url)
  27.         print '\x90V\x82\xb5\x82\xa2URL\x82\xf0\x92T\x82\xb5\x82\xdc\x82\xb7'
  28.         new_url = restore_homestead(url)
  29.         if new_url != url:
  30.             url = new_url
  31.             url_info = parse_url(url)
  32.         
  33.         (host, port, path) = extract_url(url, proxy)
  34.         http = httplib.HTTP(host, port)
  35.         http.set_debuglevel(1)
  36.         
  37.         try:
  38.             http.putrequest('GET', path)
  39.             keys = headers.keys()
  40.             for field in keys:
  41.                 if string.lower(field) == 'range':
  42.                     pass
  43.                 elif string.lower(field) == 'host':
  44.                     http.putheader('Host', url_info['host'])
  45.                 else:
  46.                     http.putheader(field, headers[field])
  47.             
  48.             http.endheaders()
  49.             (status_num, status, ret_headers) = http.getreply()
  50.             if status_num == 200:
  51.                 f = http.getfile()
  52.                 html = f.read()
  53.                 f.close()
  54.                 m = re.search('http://freeload.+' + url_info['filename'], html, re.IGNORECASE)
  55.                 if m:
  56.                     ret_url = html[m.start(0):m.end(0)]
  57.                     print '\x90V\x82\xb5\x82\xa2URL\x82\xcc\x8e\xe6\x93\xbe\x82\xc9\x90\xac\x8c\xf7\x82\xb5\x82\xdc\x82\xb5\x82\xbd'
  58.                     print ret_url
  59.                     return (SUCCESS, ret_url)
  60.                 else:
  61.                     return (ERROR, url)
  62.             else:
  63.                 return (ERROR, url)
  64.         finally:
  65.             http.close()
  66.  
  67.  
  68.