-í
ç¶<c       s¹   d  Z  d k Z d k Z d k Z d g Z d a d „  Z d f  d „  ƒ  YZ d f  d „  ƒ  YZ d f  d	 „  ƒ  YZ	 d
 e i
 f d „  ƒ  YZ d „  Z d „  Z e d j o e ƒ  n d S(   s@   robotparser.py

    Copyright (C) 2000  Bastian Kleineidam

    You can choose between two licenses when using this package:
    1) GNU GPLv2
    2) PYTHON 2.0 OPEN SOURCE LICENSE

    The robots.txt Exclusion Protocol is implemented as specified in
    http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
Ns   RobotFileParseri    c    s   t  o	 |  GHn d  S(   N(   s   debugs   msg(   s   msg(    (    s!   /usr/lib/python2.2/robotparser.pys   _debug s     c      sS   t  Z d d „ Z d „  Z d „  Z d „  Z d „  Z d „  Z d „  Z d	 „  Z	 RS(
   Ns    c    s5   g  |  _ d |  _ d |  _ |  i | ƒ d |  _ d  S(   Ni    (   s   selfs   entriess   disallow_alls	   allow_alls   set_urls   urls   last_checked(   s   selfs   url(    (    s!   /usr/lib/python2.2/robotparser.pys   __init__ s
    			c    s   |  i Sd  S(   N(   s   selfs   last_checked(   s   self(    (    s!   /usr/lib/python2.2/robotparser.pys   mtime s    c    s   d  k  } | i  ƒ  |  _ d  S(   N(   s   times   selfs   last_checked(   s   selfs   time(    (    s!   /usr/lib/python2.2/robotparser.pys   modified! s    	c    s/   | |  _  t i | ƒ d d !\ |  _ |  _ d  S(   Ni   i   (   s   urls   selfs   urlparses   hosts   path(   s   selfs   url(    (    s!   /usr/lib/python2.2/robotparser.pys   set_url% s    	c    sÇ   t  ƒ  } | i |  i ƒ } | i ƒ  } | i |  _ |  i d j p |  i d j o d |  _	 t
 d ƒ nZ |  i d j o d |  _ t
 d ƒ n3 |  i d j o | o t
 d ƒ |  i | ƒ n d  S(	   Ni‘  i“  i   s   disallow alli  s	   allow alliÈ   s   parse lines(   s	   URLopeners   openers   opens   selfs   urls   fs	   readliness   liness   errcodes   disallow_alls   _debugs	   allow_alls   parse(   s   selfs   openers   fs   lines(    (    s!   /usr/lib/python2.2/robotparser.pys   read) s    	 		
c    s¶  d } d } t ƒ  } xe| D]]} | i ƒ  } | d } | ob | d j o! t d | ƒ t ƒ  } d } n1 | d j o# |  i	 i
 | ƒ t ƒ  } d } n n | i d ƒ } | d j o | |  } n | i ƒ  } | o q n | i d d ƒ } t | ƒ d j oY| d i ƒ  i ƒ  | d <| d i ƒ  | d <| d d j oV | d j o+ t d | ƒ |  i	 i
 | ƒ t ƒ  } n | i i
 | d ƒ d } nÁ | d d	 j oF | d j o t d
 | ƒ n$ | i i
 t | d d ƒ ƒ d } nj | d d j o@ | d j o t d
 | ƒ n | i i
 t | d d ƒ ƒ n t d | | d f ƒ n t d | | f ƒ q W| d j o |  i	 i
 | ƒ n t d t |  ƒ ƒ d S(   s   parse the input lines from a robot.txt file.
           We allow that a user-agent: line is not preceded by
           one or more blank lines.i    i   s]   line %d: warning: you should insert allow: or disallow: directives below any user-agent: linei   s   #s   :s
   user-agentsP   line %d: warning: you should insert a blank line before any user-agent directives   disallowsH   line %d: error: you must insert a user-agent: directive before this lines   allows    line %d: warning: unknown key %ss!   line %d: error: malformed line %ss   Parsed rules:
%sN(   s   states
   linenumbers   Entrys   entrys   liness   lines   strips   _debugs   selfs   entriess   appends   finds   is   splits   lens   lowers
   useragentss	   ruleliness   RuleLines   str(   s   selfs   liness   is
   linenumbers   states   entrys   line(    (    s!   /usr/lib/python2.2/robotparser.pys   parse8 s`     	 
	
	

!c    s˜   t  d | | f ƒ |  i o d Sn |  i o d Sn t i t i | ƒ d ƒ p d } x2 |  i	 D]' } | i | ƒ o | i | ƒ Sn qe Wd Sd S(   s=   using the parsed robots.txt decide if useragent can fetch urls<   Checking robot.txt allowance for:
  user agent: %s
  url: %si    i   i   s   /N(   s   _debugs	   useragents   urls   selfs   disallow_alls	   allow_alls   urllibs   quotes   urlparses   entriess   entrys
   applies_tos	   allowance(   s   selfs	   useragents   urls   entry(    (    s!   /usr/lib/python2.2/robotparser.pys	   can_fetchy s     

#
 c    s6   d } x% |  i D] } | t | ƒ d } q W| Sd  S(   Ns    s   
(   s   rets   selfs   entriess   entrys   str(   s   selfs   entrys   ret(    (    s!   /usr/lib/python2.2/robotparser.pys   __str__‹ s
    
 (
   s   __name__s
   __module__s   __init__s   mtimes   modifieds   set_urls   reads   parses	   can_fetchs   __str__(    (    (    s!   /usr/lib/python2.2/robotparser.pys   RobotFileParser s   					A	s   RuleLinec      s)   t  Z d  Z d „  Z d „  Z d „  Z RS(   sh   A rule line is a single "Allow:" (allowance==1) or "Disallow:"
       (allowance==0) followed by a path.c    s   t  i | ƒ |  _ | |  _ d  S(   N(   s   urllibs   quotes   paths   selfs	   allowance(   s   selfs   paths	   allowance(    (    s!   /usr/lib/python2.2/robotparser.pys   __init__• s    c    s'   |  i d j p t i |  i | ƒ Sd  S(   Ns   *(   s   selfs   paths   res   matchs   filename(   s   selfs   filename(    (    s!   /usr/lib/python2.2/robotparser.pys
   applies_to™ s    c    s$   |  i o d p d d |  i Sd  S(   Ns   Allows   Disallows   : (   s   selfs	   allowances   path(   s   self(    (    s!   /usr/lib/python2.2/robotparser.pys   __str__œ s    (   s   __name__s
   __module__s   __doc__s   __init__s
   applies_tos   __str__(    (    (    s!   /usr/lib/python2.2/robotparser.pys   RuleLine’ s    		s   Entryc      s2   t  Z d  Z d „  Z d „  Z d „  Z d „  Z RS(   s?   An entry has one or more user-agents and zero or more rulelinesc    s   g  |  _ g  |  _ d  S(   N(   s   selfs
   useragentss	   rulelines(   s   self(    (    s!   /usr/lib/python2.2/robotparser.pys   __init__¢ s    	c    s\   d } x# |  i D] } | d | d } q Wx% |  i D] } | t | ƒ d } q6 W| Sd  S(   Ns    s   User-agent: s   
(   s   rets   selfs
   useragentss   agents	   ruleliness   lines   str(   s   selfs   lines   agents   ret(    (    s!   /usr/lib/python2.2/robotparser.pys   __str__¦ s    
 
 c    sz   | i d ƒ d i ƒ  } xV |  i D]K } | d j o d Sn | i ƒ  } t i t i | ƒ | ƒ o d Sn q# Wd Sd S(   s2   check if this entry applies to the specified agents   /i    s   *i   N(	   s	   useragents   splits   lowers   selfs
   useragentss   agents   res   searchs   escape(   s   selfs	   useragents   agent(    (    s!   /usr/lib/python2.2/robotparser.pys
   applies_to® s     
 c    sS   xH |  i D]= } t | t | ƒ | i f ƒ | i | ƒ o | i Sn q
 Wd Sd S(   sZ   Preconditions:
        - our agent applies to this entry
        - filename is URL decodedi   N(   s   selfs	   ruleliness   lines   _debugs   filenames   strs	   allowances
   applies_to(   s   selfs   filenames   line(    (    s!   /usr/lib/python2.2/robotparser.pys	   allowance¼ s     
 (   s   __name__s
   __module__s   __doc__s   __init__s   __str__s
   applies_tos	   allowance(    (    (    s!   /usr/lib/python2.2/robotparser.pys   Entry  s
    			s	   URLopenerc      s&   t  Z d „  Z d „  Z e d „ Z RS(   Nc    s9   t  t i i |  f | ƒ d |  _ d |  _ d |  _ d  S(   NiÈ   i    i
   (	   s   applys   urllibs   FancyURLopeners   __init__s   selfs   argss   errcodes   triess   maxtries(   s   selfs   args(    (    s!   /usr/lib/python2.2/robotparser.pys   __init__Ç s    		c    s,   | |  _  t i i |  | | | | | ƒ Sd  S(   N(	   s   errcodes   selfs   urllibs   FancyURLopeners   http_error_defaults   urls   fps   errmsgs   headers(   s   selfs   urls   fps   errcodes   errmsgs   headers(    (    s!   /usr/lib/python2.2/robotparser.pys   http_error_defaultÍ s    	c    st   |  i d 7_ |  i |  i j o |  i | | d d | ƒ Sn t i i	 |  | | | | | | ƒ } d |  _ | Sd  S(   Ni   iô  s)   Internal Server Error: Redirect Recursioni    (   s   selfs   triess   maxtriess   http_error_defaults   urls   fps   headerss   urllibs   FancyURLopeners   http_error_302s   errcodes   errmsgs   datas   result(   s   selfs   urls   fps   errcodes   errmsgs   headerss   datas   result(    (    s!   /usr/lib/python2.2/robotparser.pys   http_error_302Ò s    	(   s   __name__s
   __module__s   __init__s   http_error_defaults   Nones   http_error_302(    (    (    s!   /usr/lib/python2.2/robotparser.pys	   URLopenerÆ s   		c    s<   | o
 d } n d } |  | j o	 d GHn
 d | GHHd  S(   Ns   access denieds   access alloweds   faileds   ok (%s)(   s   bs   acs   a(   s   as   bs   ac(    (    s!   /usr/lib/python2.2/robotparser.pys   _checkÝ s    
		c     s†  t  ƒ  }  d a |  i d ƒ |  i ƒ  t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d	 d ƒ d ƒ t |  i d
 d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ t |  i d d ƒ d ƒ |  i d ƒ |  i ƒ  t |  i d d ƒ d ƒ d  S(   Ni   s"   http://www.musi-cal.com/robots.txts   *s   http://www.musi-cal.com/s    i    s   CherryPickerSEs?   http://www.musi-cal.com/cgi-bin/event-search?city=San+Franciscos   CherryPickerSE/1.0s   CherryPickerSE/1.5s   ExtractorPros   http://www.musi-cal.com/blubbas   extractorpros   toolpak/1.1s   spams   http://www.musi-cal.com/searchs#   http://www.musi-cal.com/Musician/mes   http://www.lycos.com/robots.txts   Mozillas   http://www.lycos.com/search(   s   RobotFileParsers   rps   debugs   set_urls   reads   _checks	   can_fetch(   s   rp(    (    s!   /usr/lib/python2.2/robotparser.pys   _testè s4     	

s   __main__(   s   __doc__s   res   urlparses   urllibs   __all__s   debugs   _debugs   RobotFileParsers   RuleLines   Entrys   FancyURLopeners	   URLopeners   _checks   _tests   __name__(   s	   URLopeners   _debugs   __all__s   _tests   urllibs   urlparses   res   _checks   RuleLines   Entrys   RobotFileParser(    (    s!   /usr/lib/python2.2/robotparser.pys   ? s   		|&		'