home *** CD-ROM | disk | FTP | other *** search
/ Netrunner 2004 October / NETRUNNER0410.ISO / regular / iria107a.lzh / script / webjump.pyc (.txt) < prev    next >
Encoding:
Python Compiled Bytecode  |  2000-11-20  |  2.4 KB  |  61 lines

  1. # Source Generated with Decompyle++
  2. # File: in.pyc (Python 2.0)
  3.  
  4. import sys
  5. import httplib
  6. import re
  7. import string
  8. import os.path as os
  9. import urlparse
  10. from gaogaolib import *
  11. hosts = '^http://.+\\.' + '(webjump|jumpmovies|jumptunes|jumptravel|jumpsports|' + 'jumpshopping|jumpreligion|jumprealestate|jumppolitics|' + 'jumpoutdoors|jumphobbies|jumphealth|jumpgardens|' + 'jumpgames|jumpfood|jumpfinance|jumpfamily|jumpeducation|' + 'jumpfun|jumpcomputers|jumpcareers|jumpbusiness|jumpbooks|' + 'jumpautos|jumpart)\\.com'
  12. re_hosts = re.compile(hosts, re.IGNORECASE)
  13.  
  14. def OnRequest(url, headers, proxy = '', redirect = FALSE, item = None, misc = None):
  15.     '''HTTP\x83\x8a\x83N\x83G\x83X\x83g\x82\xcc\x91O\x82\xc9\x8c\xc4\x82\xce\x82\xea\x82\xdc\x82\xb7\x81B\x95\xd4\x92l\x82\xcd \x83t\x83\x89\x83O\x82\xc6\x90V\x82\xb5\x82\xa2URL\x82\xcc\x83^\x83v\x83\x8b'''
  16.     if redirect:
  17.         return (INGNORE, url)
  18.     
  19.     if not re_hosts.search(url):
  20.         return (IGNORE, url)
  21.     else:
  22.         print '\x90V\x82\xb5\x82\xa2URL\x82\xf0\x92T\x82\xb5\x82\xdc\x82\xb7'
  23.         url = restore_webjump(url)
  24.         url_info = parse_url(url)
  25.         (host, port, path) = extract_url(url, proxy)
  26.         http = httplib.HTTP(host, port)
  27.         http.set_debuglevel(1)
  28.         
  29.         try:
  30.             http.putrequest('GET', path)
  31.             keys = headers.keys()
  32.             for field in keys:
  33.                 if string.lower(field) == 'range':
  34.                     pass
  35.                 elif string.lower(field) == 'host':
  36.                     http.putheader('Host', url_info['host'])
  37.                 else:
  38.                     http.putheader(field, headers[field])
  39.             
  40.             http.endheaders()
  41.             (status_num, status, ret_headers) = http.getreply()
  42.             if status_num == 200:
  43.                 f = http.getfile()
  44.                 html = f.read()
  45.                 f.close()
  46.                 m = re.search('http://.+' + url_info['path'], html, re.IGNORECASE)
  47.                 if m:
  48.                     ret_url = html[m.start(0):m.end(0)]
  49.                     print '\x90V\x82\xb5\x82\xa2URL\x82\xcc\x8e\xe6\x93\xbe\x82\xc9\x90\xac\x8c\xf7\x82\xb5\x82\xdc\x82\xb5\x82\xbd'
  50.                     print ret_url
  51.                     headers['Host'] = urlparse.urlparse(ret_url)[1]
  52.                     return (SUCCESS, ret_url)
  53.                 else:
  54.                     return (ERROR, url)
  55.             else:
  56.                 return (ERROR, url)
  57.         finally:
  58.             http.close()
  59.  
  60.  
  61.