bugged
stringlengths 4
228k
| fixed
stringlengths 0
96.3M
| __index_level_0__
int64 0
481k
|
|---|---|---|
def testFilter (self): self.filt("""<script src="#ivwbox.de"></script>""", "")
|
def testFilter (self): self.filt("""<script src="#ivwbox.de"></script>""", "")
| 16,200
|
def rawrequest3 (url, port): """raw request with proxy CONNECT protocol""" from wc.proxy.Dispatcher import create_socket from urllib import splitnport parts = urlparse.urlsplit(url) host, sslport = splitnport(parts[1], 443) path = urlparse.urlunsplit(('', '', parts[2], parts[3], parts[4])) _sock = create_socket(socket.AF_INET, socket.SOCK_STREAM) addr = (socket.gethostbyname('localhost'), port) _sock.connect(addr) _sock.send('CONNECT %s:%d HTTP/1.1\r\n' % (host, sslport)) _sock.send('User-Agent: getssl\r\n') _sock.send('\r\n') buf = "" while True: buf += _sock.recv(1024) if "\r\n\r\n" in buf: break print repr(buf) print "initiating SSL handshake" sock = socket.ssl(_sock) print "write SSL request...", sock.write("GET %s HTTP/1.1\r\n" % url) sock.write("Host: %s\r\n" % host) sock.write("\r\n") print " ok." while True: try: print "XXX read" print repr(sock.read(80)) except socket.sslerror, msg: print "Oops" break _sock.close()
|
def rawrequest3 (url, port): """raw request with proxy CONNECT protocol""" from wc.proxy.Dispatcher import create_socket from urllib import splitnport parts = urlparse.urlsplit(url) host, sslport = splitnport(parts[1], 443) path = urlparse.urlunsplit(('', '', parts[2], parts[3], parts[4])) _sock = create_socket(socket.AF_INET, socket.SOCK_STREAM) addr = (socket.gethostbyname('localhost'), port) _sock.connect(addr) _sock.send('CONNECT %s:%d HTTP/1.1\r\n' % (host, sslport)) _sock.send('User-Agent: getssl\r\n') _sock.send('\r\n') buf = "" while True: buf += _sock.recv(1024) if "\r\n\r\n" in buf: break print repr(buf) print "initiating SSL handshake" sock = socket.ssl(_sock) print "write SSL request...", sock.write("GET %s HTTP/1.1\r\n" % url) sock.write("Host: %s\r\n" % host) sock.write("\r\n") print " ok." while True: try: print repr(sock.read(80)) except socket.sslerror, msg: print "Oops" break _sock.close()
| 16,201
|
def rawrequest3 (url, port): """raw request with proxy CONNECT protocol""" from wc.proxy.Dispatcher import create_socket from urllib import splitnport parts = urlparse.urlsplit(url) host, sslport = splitnport(parts[1], 443) path = urlparse.urlunsplit(('', '', parts[2], parts[3], parts[4])) _sock = create_socket(socket.AF_INET, socket.SOCK_STREAM) addr = (socket.gethostbyname('localhost'), port) _sock.connect(addr) _sock.send('CONNECT %s:%d HTTP/1.1\r\n' % (host, sslport)) _sock.send('User-Agent: getssl\r\n') _sock.send('\r\n') buf = "" while True: buf += _sock.recv(1024) if "\r\n\r\n" in buf: break print repr(buf) print "initiating SSL handshake" sock = socket.ssl(_sock) print "write SSL request...", sock.write("GET %s HTTP/1.1\r\n" % url) sock.write("Host: %s\r\n" % host) sock.write("\r\n") print " ok." while True: try: print "XXX read" print repr(sock.read(80)) except socket.sslerror, msg: print "Oops" break _sock.close()
|
def rawrequest3 (url, port): """raw request with proxy CONNECT protocol""" from wc.proxy.Dispatcher import create_socket from urllib import splitnport parts = urlparse.urlsplit(url) host, sslport = splitnport(parts[1], 443) path = urlparse.urlunsplit(('', '', parts[2], parts[3], parts[4])) _sock = create_socket(socket.AF_INET, socket.SOCK_STREAM) addr = (socket.gethostbyname('localhost'), port) _sock.connect(addr) _sock.send('CONNECT %s:%d HTTP/1.1\r\n' % (host, sslport)) _sock.send('User-Agent: getssl\r\n') _sock.send('\r\n') buf = "" while True: buf += _sock.recv(1024) if "\r\n\r\n" in buf: break print repr(buf) print "initiating SSL handshake" sock = socket.ssl(_sock) print "write SSL request...", sock.write("GET %s HTTP/1.1\r\n" % url) sock.write("Host: %s\r\n" % host) sock.write("\r\n") print " ok." while True: try: print "XXX read" print repr(sock.read(80)) except socket.sslerror, msg: if msg[0] != 6: print "Oops", msg break _sock.close()
| 16,202
|
def _reset_ratings (): ratings.clear() for category, catdata in service['categories'].items(): if catdata['rvalues']: ratings[category] = catdata['rvalues'][0] else: ratings[category] = "" values.clear() for category, value in ratings.items(): if category not in ["generic", "modified"]: values[category] = {value: True} rating_modified.clear()
|
def _reset_ratings (): for category, catdata in service['categories'].items(): if catdata['rvalues']: ratings[category] = catdata['rvalues'][0] else: ratings[category] = "" values.clear() for category, value in ratings.items(): if category not in ["generic", "modified"]: values[category] = {value: True} rating_modified.clear()
| 16,203
|
def _reset_ratings (): ratings.clear() for category, catdata in service['categories'].items(): if catdata['rvalues']: ratings[category] = catdata['rvalues'][0] else: ratings[category] = "" values.clear() for category, value in ratings.items(): if category not in ["generic", "modified"]: values[category] = {value: True} rating_modified.clear()
|
def _reset_ratings (): ratings.clear() for category, catdata in service['categories'].items(): if catdata['rvalues']: ratings[category] = catdata['rvalues'][0] else: ratings[category] = "" values[category] = {} for value in catdata['rvalues']: values[category][value] = False rating_modified.clear()
| 16,204
|
def _calc_ratings_display (): global ratings_display urls = rating_cache.keys() urls.sort() ratings_display = urls[curindex:curindex+_entries_per_page] for _url in ratings_display: t = _strtime(float(rating_cache[_url]['modified'])) rating_modified[_url] = t.replace(u" ", u" ")
|
def _calc_ratings_display (): global ratings_display urls = rating_cache.keys() urls.sort() ratings_display = urls[curindex:curindex+_entries_per_page] for _url in ratings_display: t = _strtime(float(rating_cache[_url]['modified'])) rating_modified[_url] = t.replace(u" ", u" ")
| 16,205
|
def _form_reset (): info.clear() error.clear() _reset_ratings() global url, generic, curindex url = u"" generic = False curindex = 0
|
def _form_reset (): for key in info.keys(): info[key] = False for key in error.keys(): error[key] = False _reset_ratings() global url, generic, curindex url = u"" generic = False curindex = 0
| 16,206
|
def clean (script, jscomments=True): """ Clean script from comments and HTML. """ script = escape_js(remove_html_comments(script)) if not jscomments: script = remove_js_comments(script) return u"\n<!--\n%s\n//-->\n" % script
|
def clean (script, jscomments=True): """ Clean script from comments and HTML. """ script = escape_js(remove_html_comments(script, jscomments=jscomments)) return u"\n<!--\n%s\n//-->\n" % script
| 16,207
|
def clean (script, jscomments=True): """ Clean script from comments and HTML. """ script = escape_js(remove_html_comments(script)) if not jscomments: script = remove_js_comments(script) return u"\n<!--\n%s\n//-->\n" % script
|
def clean (script, jscomments=True): """ Clean script from comments and HTML. """ script = escape_js(remove_html_comments(script)) if not jscomments: script = remove_js_comments(script) return u"\n<!--\n%s\n//-->\n" % script
| 16,208
|
def escape_js_line (script): # if we encounter "</script>" in the script, we assume that is # in a quoted string. The solution is to split it into # "</scr"+"ipt>" (with the proper quotes of course) quote = False escape = False i = 0 while i < len(script): c = script[i] if c == '"' or c == "'": if not escape: if quote == c: quote = False elif not quote: quote = c escape = False elif c == '\\': escape = not escape elif c == '<': if script[i:i+9].lower() == '</script>' and quote: script = script[:i]+"</scr"+quote+"+"+quote+"ipt>"+\ script[(i+9):] escape = False else: escape = False i += 1 #script = script.replace('-->', '-->') #script = script_sub("</script>", script) return script
|
def escape_js_line (script): # if we encounter "</script>" in the script, we assume that is # in a quoted string. The solution is to split it into # "</scr"+"ipt>" (with the proper quotes of course) quote = False escape = False i = 0 while i < len(script): c = script[i] if c == '"' or c == "'": if not escape: if quote == c: quote = False elif not quote: quote = c escape = False elif c == '\\': escape = not escape elif c == '<': if script[i:i+9].lower() == '</script>': if quote: script = script[:i] + "</scr" + quote + "+" + \ quote + "ipt>" + script[(i+9):] else: script = script[:i] + script[(i+9):] escape = False else: escape = False i += 1 #script = script.replace('-->', '-->') #script = script_sub("</script>", script) return script
| 16,209
|
def remove_html_comments (script): """ Remove leading and trailing HTML comments from the script text. """ script = remove_whitespace(script) mo = _start_js_comment(script) if mo: script = script[mo.end():] mo = _end_js_comment(script) if mo: if mo.group("comment") is not None: i = script.rindex("//") else: i = script.rindex("-->") script = script[:i] return script.strip()
|
def remove_html_comments (script, jscomments=True): """ Remove leading and trailing HTML comments from the script text. """ script = remove_whitespace(script) mo = _start_js_comment(script) if mo: script = script[mo.end():] mo = _end_js_comment(script) if mo: if mo.group("comment") is not None: i = script.rindex("//") else: i = script.rindex("-->") script = script[:i] return script.strip()
| 16,210
|
def remove_html_comments (script): """ Remove leading and trailing HTML comments from the script text. """ script = remove_whitespace(script) mo = _start_js_comment(script) if mo: script = script[mo.end():] mo = _end_js_comment(script) if mo: if mo.group("comment") is not None: i = script.rindex("//") else: i = script.rindex("-->") script = script[:i] return script.strip()
|
def remove_html_comments (script): """ Remove leading and trailing HTML comments from the script text. """ lines = [] for line in script.splitlines(): line = line.rstrip() if line and (jscomments or not line.lstrip().startswith('//')): lines.append(line) script = "\n".join(lines) mo = _start_js_comment(script) if mo: script = script[mo.end():] mo = _end_js_comment(script) if mo: if mo.group("comment") is not None: i = script.rindex("//") else: i = script.rindex("-->") script = script[:i] return script.strip()
| 16,211
|
def decode (page): "gunzip or deflate a compressed page" encoding = page.info().get("Content-Encoding") if encoding in ('gzip', 'x-gzip', 'deflate'): # cannot seek in socket descriptors, so must get content now content = page.read() if encoding == 'deflate': fp = StringIO.StringIO(zlib.decompress(content)) else: fp = gzip.GzipFile('', 'rb', 9, StringIO(content)) # remove content-encoding header headers = {} ceheader = re.compile(r"(?i)content-encoding:") for h in page.info().keys(): if not ceheader.match(h): headers[h] = page.info()[h] page = urllib.addinfourl(fp, headers, page.geturl()) return page
|
def decode (page): "gunzip or deflate a compressed page" encoding = page.info().get("Content-Encoding") if encoding in ('gzip', 'x-gzip', 'deflate'): # cannot seek in socket descriptors, so must get content now content = page.read() if encoding == 'deflate': fp = StringIO.StringIO(zlib.decompress(content)) else: fp = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(content)) # remove content-encoding header headers = {} ceheader = re.compile(r"(?i)content-encoding:") for h in page.info().keys(): if not ceheader.match(h): headers[h] = page.info()[h] page = urllib.addinfourl(fp, headers, page.geturl()) return page
| 16,212
|
def jsProcessData (self, data): """process data produced by document.write() JavaScript""" #self._debug("JS document.write %s", `data`) self.js_output += 1 # parse recursively self.js_html.feed(data)
|
defself._debug("JS document.write %s", `data`) jsProcessDataself._debug("JS document.write %s", `data`) (self,self._debug("JS document.write %s", `data`) data):self._debug("JS document.write %s", `data`) """processself._debug("JS document.write %s", `data`) dataself._debug("JS document.write %s", `data`) producedself._debug("JS document.write %s", `data`) byself._debug("JS document.write %s", `data`) document.write()self._debug("JS document.write %s", `data`) JavaScript"""self._debug("JS document.write %s", `data`) #self._debug("JSself._debug("JS document.write %s", `data`) document.writeself._debug("JS document.write %s", `data`) %s",self._debug("JS document.write %s", `data`) `data`)self._debug("JS document.write %s", `data`) self.js_outputself._debug("JS document.write %s", `data`) +=self._debug("JS document.write %s", `data`) 1self._debug("JS document.write %s", `data`) #self._debug("JS document.write %s", `data`) parseself._debug("JS document.write %s", `data`) recursivelyself._debug("JS document.write %s", `data`) self.js_html.feed(data)
| 16,213
|
def jsProcessPopup (self): """process javascript popup""" #self._debug("JS: popup") self.js_popup += 1
|
defself._debug("JS: popup") jsProcessPopupself._debug("JS: popup") (self):self._debug("JS: popup") """processself._debug("JS: popup") javascriptself._debug("JS: popup") popup"""self._debug("JS: popup") #self._debug("JS:self._debug("JS: popup") popup")self._debug("JS: popup") self.js_popupself._debug("JS: popup") +=self._debug("JS: popup") 1
| 16,214
|
def buf_append_data (self, data): """we have to make sure that we have no two following DATA things in the tag buffer. Why? To be 100% sure that an ENCLOSED match really matches enclosed data. """ #self._debug("buf_append_data") if data[0]==DATA and self.buf and self.buf[-1][0]==DATA: self.buf[-1][1] += data[1] else: self.buf.append(data)
|
defself._debug("buf_append_data") buf_append_dataself._debug("buf_append_data") (self,self._debug("buf_append_data") data):self._debug("buf_append_data") """weself._debug("buf_append_data") haveself._debug("buf_append_data") toself._debug("buf_append_data") makeself._debug("buf_append_data") sureself._debug("buf_append_data") thatself._debug("buf_append_data") weself._debug("buf_append_data") haveself._debug("buf_append_data") noself._debug("buf_append_data") twoself._debug("buf_append_data") followingself._debug("buf_append_data") DATAself._debug("buf_append_data") thingsself._debug("buf_append_data") inself._debug("buf_append_data") theself._debug("buf_append_data") tagself._debug("buf_append_data") buffer.self._debug("buf_append_data") Why?self._debug("buf_append_data") Toself._debug("buf_append_data") beself._debug("buf_append_data") 100%self._debug("buf_append_data") sureself._debug("buf_append_data") thatself._debug("buf_append_data") anself._debug("buf_append_data") ENCLOSEDself._debug("buf_append_data") matchself._debug("buf_append_data") reallyself._debug("buf_append_data") matchesself._debug("buf_append_data") enclosedself._debug("buf_append_data") data.self._debug("buf_append_data") """self._debug("buf_append_data") #self._debug("buf_append_data")self._debug("buf_append_data") ifself._debug("buf_append_data") data[0]==DATAself._debug("buf_append_data") andself._debug("buf_append_data") self.bufself._debug("buf_append_data") andself._debug("buf_append_data") self.buf[-1][0]==DATA:self._debug("buf_append_data") self.buf[-1][1]self._debug("buf_append_data") +=self._debug("buf_append_data") data[1]self._debug("buf_append_data") else:self._debug("buf_append_data") self.buf.append(data)
| 16,215
|
def flushbuf (self): """clear and return the output buffer""" #self._debug("flushbuf") data = self.outbuf.getvalue() self.outbuf.close() self.outbuf = StringIO() return data
|
defself._debug("flushbuf") flushbufself._debug("flushbuf") (self):self._debug("flushbuf") """clearself._debug("flushbuf") andself._debug("flushbuf") returnself._debug("flushbuf") theself._debug("flushbuf") outputself._debug("flushbuf") buffer"""self._debug("flushbuf") #self._debug("flushbuf")self._debug("flushbuf") dataself._debug("flushbuf") =self._debug("flushbuf") self.outbuf.getvalue()self._debug("flushbuf") self.outbuf.close()self._debug("flushbuf") self.outbufself._debug("flushbuf") =self._debug("flushbuf") StringIO()self._debug("flushbuf") returnself._debug("flushbuf") data
| 16,216
|
def _debugbuf (self): """print debugging information about data buffer status""" #self._debug("self.outbuf %s", `self.outbuf.getvalue()`) #self._debug("self.buf %s", `self.buf`) #self._debug("self.waitbuf %s", `self.waitbuf`) #self._debug("self.inbuf %s", `self.inbuf.getvalue()`)
|
def _debugbuf (self): """print debugging information about data buffer status""" #self._debug("self.outbuf %s", `self.outbuf.getvalue()`) #self._debug("self.buf %s", `self.buf`) #self._debug("self.waitbuf %s", `self.waitbuf`) #self._debug("self.inbuf %s", `self.inbuf.getvalue()`)
| 16,217
|
def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': self.inbuf.write(data) return data = self.inbuf.getvalue() + data self.inbuf.close() self.inbuf = StringIO() if data: # only feed non-empty data #self._debug("feed %s", `data`) self.parser.feed(data) else: #self._debug("feed") pass else: # wait state ==> put in input buffer #self._debug("wait") self.inbuf.write(data)
|
defself._debug("feed %s", `data`) feedself._debug("feed %s", `data`) (self,self._debug("feed %s", `data`) data):self._debug("feed %s", `data`) """feedself._debug("feed %s", `data`) someself._debug("feed %s", `data`) dataself._debug("feed %s", `data`) toself._debug("feed %s", `data`) theself._debug("feed %s", `data`) parser"""self._debug("feed %s", `data`) ifself._debug("feed %s", `data`) self.state=='parse':self._debug("feed %s", `data`) #self._debug("feed %s", `data`) lookself._debug("feed %s", `data`) ifself._debug("feed %s", `data`) weself._debug("feed %s", `data`) mustself._debug("feed %s", `data`) replayself._debug("feed %s", `data`) somethingself._debug("feed %s", `data`) ifself._debug("feed %s", `data`) self.waited:self._debug("feed %s", `data`) self.waitedself._debug("feed %s", `data`) =self._debug("feed %s", `data`) 0self._debug("feed %s", `data`) waitbuf,self._debug("feed %s", `data`) self.waitbufself._debug("feed %s", `data`) =self._debug("feed %s", `data`) self.waitbuf,self._debug("feed %s", `data`) []self._debug("feed %s", `data`) self.replay(waitbuf)self._debug("feed %s", `data`) ifself._debug("feed %s", `data`) self.state!='parse':self._debug("feed %s", `data`) self.inbuf.write(data)self._debug("feed %s", `data`) returnself._debug("feed %s", `data`) dataself._debug("feed %s", `data`) =self._debug("feed %s", `data`) self.inbuf.getvalue()self._debug("feed %s", `data`) +self._debug("feed %s", `data`) dataself._debug("feed %s", `data`) self.inbuf.close()self._debug("feed %s", `data`) self.inbufself._debug("feed %s", `data`) =self._debug("feed %s", `data`) StringIO()self._debug("feed %s", `data`) ifself._debug("feed %s", `data`) data:self._debug("feed %s", `data`) #self._debug("feed %s", `data`) onlyself._debug("feed %s", `data`) feedself._debug("feed %s", `data`) non-emptyself._debug("feed %s", `data`) dataself._debug("feed %s", `data`) #self._debug("feedself._debug("feed %s", `data`) %s",self._debug("feed %s", `data`) `data`)self._debug("feed %s", `data`) self.parser.feed(data)self._debug("feed %s", `data`) else:self._debug("feed %s", `data`) #self._debug("feed")self._debug("feed %s", `data`) passself._debug("feed %s", `data`) else:self._debug("feed %s", `data`) #self._debug("feed %s", `data`) waitself._debug("feed %s", `data`) stateself._debug("feed %s", `data`) ==>self._debug("feed %s", `data`) putself._debug("feed %s", `data`) inself._debug("feed %s", `data`) inputself._debug("feed %s", `data`) bufferself._debug("feed %s", `data`) #self._debug("wait")self._debug("feed %s", `data`) self.inbuf.write(data)
| 16,218
|
def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': self.inbuf.write(data) return data = self.inbuf.getvalue() + data self.inbuf.close() self.inbuf = StringIO() if data: # only feed non-empty data #self._debug("feed %s", `data`) self.parser.feed(data) else: #self._debug("feed") pass else: # wait state ==> put in input buffer #self._debug("wait") self.inbuf.write(data)
|
defself._debug("feed") feedself._debug("feed") (self,self._debug("feed") data):self._debug("feed") """feedself._debug("feed") someself._debug("feed") dataself._debug("feed") toself._debug("feed") theself._debug("feed") parser"""self._debug("feed") ifself._debug("feed") self.state=='parse':self._debug("feed") #self._debug("feed") lookself._debug("feed") ifself._debug("feed") weself._debug("feed") mustself._debug("feed") replayself._debug("feed") somethingself._debug("feed") ifself._debug("feed") self.waited:self._debug("feed") self.waitedself._debug("feed") =self._debug("feed") 0self._debug("feed") waitbuf,self._debug("feed") self.waitbufself._debug("feed") =self._debug("feed") self.waitbuf,self._debug("feed") []self._debug("feed") self.replay(waitbuf)self._debug("feed") ifself._debug("feed") self.state!='parse':self._debug("feed") self.inbuf.write(data)self._debug("feed") returnself._debug("feed") dataself._debug("feed") =self._debug("feed") self.inbuf.getvalue()self._debug("feed") +self._debug("feed") dataself._debug("feed") self.inbuf.close()self._debug("feed") self.inbufself._debug("feed") =self._debug("feed") StringIO()self._debug("feed") ifself._debug("feed") data:self._debug("feed") #self._debug("feed") onlyself._debug("feed") feedself._debug("feed") non-emptyself._debug("feed") dataself._debug("feed") #self._debug("feedself._debug("feed") %s",self._debug("feed") `data`)self._debug("feed") self.parser.feed(data)self._debug("feed") else:self._debug("feed") #self._debug("feed")self._debug("feed") passself._debug("feed") else:self._debug("feed") #self._debug("feed") waitself._debug("feed") stateself._debug("feed") ==>self._debug("feed") putself._debug("feed") inself._debug("feed") inputself._debug("feed") bufferself._debug("feed") #self._debug("wait")self._debug("feed") self.inbuf.write(data)
| 16,219
|
def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': self.inbuf.write(data) return data = self.inbuf.getvalue() + data self.inbuf.close() self.inbuf = StringIO() if data: # only feed non-empty data #self._debug("feed %s", `data`) self.parser.feed(data) else: #self._debug("feed") pass else: # wait state ==> put in input buffer #self._debug("wait") self.inbuf.write(data)
|
defself._debug("wait") feedself._debug("wait") (self,self._debug("wait") data):self._debug("wait") """feedself._debug("wait") someself._debug("wait") dataself._debug("wait") toself._debug("wait") theself._debug("wait") parser"""self._debug("wait") ifself._debug("wait") self.state=='parse':self._debug("wait") #self._debug("wait") lookself._debug("wait") ifself._debug("wait") weself._debug("wait") mustself._debug("wait") replayself._debug("wait") somethingself._debug("wait") ifself._debug("wait") self.waited:self._debug("wait") self.waitedself._debug("wait") =self._debug("wait") 0self._debug("wait") waitbuf,self._debug("wait") self.waitbufself._debug("wait") =self._debug("wait") self.waitbuf,self._debug("wait") []self._debug("wait") self.replay(waitbuf)self._debug("wait") ifself._debug("wait") self.state!='parse':self._debug("wait") self.inbuf.write(data)self._debug("wait") returnself._debug("wait") dataself._debug("wait") =self._debug("wait") self.inbuf.getvalue()self._debug("wait") +self._debug("wait") dataself._debug("wait") self.inbuf.close()self._debug("wait") self.inbufself._debug("wait") =self._debug("wait") StringIO()self._debug("wait") ifself._debug("wait") data:self._debug("wait") #self._debug("wait") onlyself._debug("wait") feedself._debug("wait") non-emptyself._debug("wait") dataself._debug("wait") #self._debug("feedself._debug("wait") %s",self._debug("wait") `data`)self._debug("wait") self.parser.feed(data)self._debug("wait") else:self._debug("wait") #self._debug("feed")self._debug("wait") passself._debug("wait") else:self._debug("wait") #self._debug("wait") waitself._debug("wait") stateself._debug("wait") ==>self._debug("wait") putself._debug("wait") inself._debug("wait") inputself._debug("wait") bufferself._debug("wait") #self._debug("wait")self._debug("wait") self.inbuf.write(data)
| 16,220
|
def flush (self): #self._debug("flush") # flushing in wait state raises a filter exception if self.state=='wait': raise FilterWait("HtmlParser[%d]: waiting for data"%self.level) self.parser.flush()
|
defself._debug("flush") flushself._debug("flush") (self):self._debug("flush") #self._debug("flush")self._debug("flush") #self._debug("flush") flushingself._debug("flush") inself._debug("flush") waitself._debug("flush") stateself._debug("flush") raisesself._debug("flush") aself._debug("flush") filterself._debug("flush") exceptionself._debug("flush") ifself._debug("flush") self.state=='wait':self._debug("flush") raiseself._debug("flush") FilterWait("HtmlParser[%d]:self._debug("flush") waitingself._debug("flush") forself._debug("flush") data"%self.level)self._debug("flush") self.parser.flush()
| 16,221
|
def replay (self, waitbuf): """call the handler functions again with buffer data""" #self._debug("replay %s", `waitbuf`) for item in waitbuf: if item[0]==DATA: self._data(item[1]) elif item[0]==STARTTAG: self.startElement(item[1], item[2]) elif item[0]==ENDTAG: self.endElement(item[1]) elif item[0]==COMMENT: self.comment(item[1])
|
defself._debug("replay %s", `waitbuf`) replayself._debug("replay %s", `waitbuf`) (self,self._debug("replay %s", `waitbuf`) waitbuf):self._debug("replay %s", `waitbuf`) """callself._debug("replay %s", `waitbuf`) theself._debug("replay %s", `waitbuf`) handlerself._debug("replay %s", `waitbuf`) functionsself._debug("replay %s", `waitbuf`) againself._debug("replay %s", `waitbuf`) withself._debug("replay %s", `waitbuf`) bufferself._debug("replay %s", `waitbuf`) data"""self._debug("replay %s", `waitbuf`) #self._debug("replayself._debug("replay %s", `waitbuf`) %s",self._debug("replay %s", `waitbuf`) `waitbuf`)self._debug("replay %s", `waitbuf`) forself._debug("replay %s", `waitbuf`) itemself._debug("replay %s", `waitbuf`) inself._debug("replay %s", `waitbuf`) waitbuf:self._debug("replay %s", `waitbuf`) ifself._debug("replay %s", `waitbuf`) item[0]==DATA:self._debug("replay %s", `waitbuf`) self._data(item[1])self._debug("replay %s", `waitbuf`) elifself._debug("replay %s", `waitbuf`) item[0]==STARTTAG:self._debug("replay %s", `waitbuf`) self.startElement(item[1],self._debug("replay %s", `waitbuf`) item[2])self._debug("replay %s", `waitbuf`) elifself._debug("replay %s", `waitbuf`) item[0]==ENDTAG:self._debug("replay %s", `waitbuf`) self.endElement(item[1])self._debug("replay %s", `waitbuf`) elifself._debug("replay %s", `waitbuf`) item[0]==COMMENT:self._debug("replay %s", `waitbuf`) self.comment(item[1])
| 16,222
|
def cdata (self, data): """character data""" #self._debug("cdata %s", `data`) return self._data(data)
|
defself._debug("cdata %s", `data`) cdataself._debug("cdata %s", `data`) (self,self._debug("cdata %s", `data`) data):self._debug("cdata %s", `data`) """characterself._debug("cdata %s", `data`) data"""self._debug("cdata %s", `data`) #self._debug("cdataself._debug("cdata %s", `data`) %s",self._debug("cdata %s", `data`) `data`)self._debug("cdata %s", `data`) returnself._debug("cdata %s", `data`) self._data(data)
| 16,223
|
def characters (self, data): """characters""" #self._debug("characters %s", `data`) return self._data(data)
|
defself._debug("characters %s", `data`) charactersself._debug("characters %s", `data`) (self,self._debug("characters %s", `data`) data):self._debug("characters %s", `data`) """characters"""self._debug("characters %s", `data`) #self._debug("charactersself._debug("characters %s", `data`) %s",self._debug("characters %s", `data`) `data`)self._debug("characters %s", `data`) returnself._debug("characters %s", `data`) self._data(data)
| 16,224
|
def comment (self, data): """a comment; accept only non-empty comments""" #self._debug("comment %s", `data`) item = [COMMENT, data] if self.state=='wait': return self.waitbuf.append(item) if self.comments and data: self.buf.append(item)
|
defself._debug("comment %s", `data`) commentself._debug("comment %s", `data`) (self,self._debug("comment %s", `data`) data):self._debug("comment %s", `data`) """aself._debug("comment %s", `data`) comment;self._debug("comment %s", `data`) acceptself._debug("comment %s", `data`) onlyself._debug("comment %s", `data`) non-emptyself._debug("comment %s", `data`) comments"""self._debug("comment %s", `data`) #self._debug("commentself._debug("comment %s", `data`) %s",self._debug("comment %s", `data`) `data`)self._debug("comment %s", `data`) itemself._debug("comment %s", `data`) =self._debug("comment %s", `data`) [COMMENT,self._debug("comment %s", `data`) data]self._debug("comment %s", `data`) ifself._debug("comment %s", `data`) self.state=='wait':self._debug("comment %s", `data`) returnself._debug("comment %s", `data`) self.waitbuf.append(item)self._debug("comment %s", `data`) ifself._debug("comment %s", `data`) self.commentsself._debug("comment %s", `data`) andself._debug("comment %s", `data`) data:self._debug("comment %s", `data`) self.buf.append(item)
| 16,225
|
def doctype (self, data): #self._debug("doctype %s", `data`) return self._data("<!DOCTYPE%s>"%data)
|
defself._debug("doctype %s", `data`) doctypeself._debug("doctype %s", `data`) (self,self._debug("doctype %s", `data`) data):self._debug("doctype %s", `data`) #self._debug("doctypeself._debug("doctype %s", `data`) %s",self._debug("doctype %s", `data`) `data`)self._debug("doctype %s", `data`) returnself._debug("doctype %s", `data`) self._data("<!DOCTYPE%s>"%data)
| 16,226
|
def pi (self, data): #self._debug("pi %s", `data`) return self._data("<?%s?>"%data)
|
defself._debug("pi %s", `data`) piself._debug("pi %s", `data`) (self,self._debug("pi %s", `data`) data):self._debug("pi %s", `data`) #self._debug("piself._debug("pi %s", `data`) %s",self._debug("pi %s", `data`) `data`)self._debug("pi %s", `data`) returnself._debug("pi %s", `data`) self._data("<?%s?>"%data)
| 16,227
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data #self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state=='wait': return self.waitbuf.append(item) rulelist = [] filtered = 0 if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) debug(FILTER, "using base url %s", `self.base_url`) # look for filter rules which apply for rule in self.rules: if rule.match_tag(tag) and rule.match_attrs(attrs): #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) if rule.start_sufficient: item = rule.filter_tag(tag, attrs) filtered = "True" if item[0]==STARTTAG and item[1]==tag: foo,tag,attrs = item # give'em a chance to replace more than one attribute continue else: break else: #self._debug("put on buffer") rulelist.append(rule) if rulelist: # remember buffer position for end tag matching pos = len(self.buf) self.rulestack.append((pos, rulelist)) if filtered: self.buf_append_data(item) elif self.js_filter: # if its not yet filtered, try filter javascript self.jsStartElement(tag, attrs) else: self.buf.append(item) # if rule stack is empty, write out the buffered data if not self.rulestack and not self.js_filter: self.buf2data()
|
defself._debug("startElement %s", `tag`) startElementself._debug("startElement %s", `tag`) (self,self._debug("startElement %s", `tag`) tag,self._debug("startElement %s", `tag`) attrs):self._debug("startElement %s", `tag`) """Weself._debug("startElement %s", `tag`) getself._debug("startElement %s", `tag`) aself._debug("startElement %s", `tag`) newself._debug("startElement %s", `tag`) startself._debug("startElement %s", `tag`) tag.self._debug("startElement %s", `tag`) Newself._debug("startElement %s", `tag`) rulesself._debug("startElement %s", `tag`) couldself._debug("startElement %s", `tag`) beself._debug("startElement %s", `tag`) appendedself._debug("startElement %s", `tag`) toself._debug("startElement %s", `tag`) theself._debug("startElement %s", `tag`) pendingself._debug("startElement %s", `tag`) rules.self._debug("startElement %s", `tag`) Noself._debug("startElement %s", `tag`) rulesself._debug("startElement %s", `tag`) canself._debug("startElement %s", `tag`) beself._debug("startElement %s", `tag`) removedself._debug("startElement %s", `tag`) fromself._debug("startElement %s", `tag`) theself._debug("startElement %s", `tag`) list."""self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) defaultself._debug("startElement %s", `tag`) dataself._debug("startElement %s", `tag`) #self._debug("startElementself._debug("startElement %s", `tag`) %s",self._debug("startElement %s", `tag`) `tag`)self._debug("startElement %s", `tag`) tagself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) check_spelling(tag,self._debug("startElement %s", `tag`) self.url)self._debug("startElement %s", `tag`) itemself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) [STARTTAG,self._debug("startElement %s", `tag`) tag,self._debug("startElement %s", `tag`) attrs]self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) self.state=='wait':self._debug("startElement %s", `tag`) returnself._debug("startElement %s", `tag`) self.waitbuf.append(item)self._debug("startElement %s", `tag`) rulelistself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) []self._debug("startElement %s", `tag`) filteredself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) 0self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) tag=="meta"self._debug("startElement %s", `tag`) andself._debug("startElement %s", `tag`) \self._debug("startElement %s", `tag`) attrs.get('http-equiv',self._debug("startElement %s", `tag`) '').lower()self._debug("startElement %s", `tag`) =='pics-label':self._debug("startElement %s", `tag`) labelsself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) resolve_html_entities(attrs.get('content',self._debug("startElement %s", `tag`) ''))self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) note:self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) thereself._debug("startElement %s", `tag`) areself._debug("startElement %s", `tag`) noself._debug("startElement %s", `tag`) picsself._debug("startElement %s", `tag`) rules,self._debug("startElement %s", `tag`) thisself._debug("startElement %s", `tag`) loopself._debug("startElement %s", `tag`) isself._debug("startElement %s", `tag`) emptyself._debug("startElement %s", `tag`) forself._debug("startElement %s", `tag`) ruleself._debug("startElement %s", `tag`) inself._debug("startElement %s", `tag`) self.pics:self._debug("startElement %s", `tag`) msgself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) check_pics(rule,self._debug("startElement %s", `tag`) labels)self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) msg:self._debug("startElement %s", `tag`) raiseself._debug("startElement %s", `tag`) FilterPics(msg)self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) firstself._debug("startElement %s", `tag`) labelsself._debug("startElement %s", `tag`) matchself._debug("startElement %s", `tag`) countsself._debug("startElement %s", `tag`) self.picsself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) []self._debug("startElement %s", `tag`) elifself._debug("startElement %s", `tag`) tag=="body":self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) headersself._debug("startElement %s", `tag`) finishedself._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) self.pics:self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) noself._debug("startElement %s", `tag`) picsself._debug("startElement %s", `tag`) dataself._debug("startElement %s", `tag`) foundself._debug("startElement %s", `tag`) self.picsself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) []self._debug("startElement %s", `tag`) elifself._debug("startElement %s", `tag`) tag=="base"self._debug("startElement %s", `tag`) andself._debug("startElement %s", `tag`) attrs.has_key('href'):self._debug("startElement %s", `tag`) self.base_urlself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) strip_quotes(attrs['href'])self._debug("startElement %s", `tag`) debug(FILTER,self._debug("startElement %s", `tag`) "usingself._debug("startElement %s", `tag`) baseself._debug("startElement %s", `tag`) urlself._debug("startElement %s", `tag`) %s",self._debug("startElement %s", `tag`) `self.base_url`)self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) lookself._debug("startElement %s", `tag`) forself._debug("startElement %s", `tag`) filterself._debug("startElement %s", `tag`) rulesself._debug("startElement %s", `tag`) whichself._debug("startElement %s", `tag`) applyself._debug("startElement %s", `tag`) forself._debug("startElement %s", `tag`) ruleself._debug("startElement %s", `tag`) inself._debug("startElement %s", `tag`) self.rules:self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) rule.match_tag(tag)self._debug("startElement %s", `tag`) andself._debug("startElement %s", `tag`) rule.match_attrs(attrs):self._debug("startElement %s", `tag`) #self._debug("matchedself._debug("startElement %s", `tag`) ruleself._debug("startElement %s", `tag`) %sself._debug("startElement %s", `tag`) onself._debug("startElement %s", `tag`) tagself._debug("startElement %s", `tag`) %s",self._debug("startElement %s", `tag`) `rule.title`,self._debug("startElement %s", `tag`) `tag`)self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) rule.start_sufficient:self._debug("startElement %s", `tag`) itemself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) rule.filter_tag(tag,self._debug("startElement %s", `tag`) attrs)self._debug("startElement %s", `tag`) filteredself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) "True"self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) item[0]==STARTTAGself._debug("startElement %s", `tag`) andself._debug("startElement %s", `tag`) item[1]==tag:self._debug("startElement %s", `tag`) foo,tag,attrsself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) itemself._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) give'emself._debug("startElement %s", `tag`) aself._debug("startElement %s", `tag`) chanceself._debug("startElement %s", `tag`) toself._debug("startElement %s", `tag`) replaceself._debug("startElement %s", `tag`) moreself._debug("startElement %s", `tag`) thanself._debug("startElement %s", `tag`) oneself._debug("startElement %s", `tag`) attributeself._debug("startElement %s", `tag`) continueself._debug("startElement %s", `tag`) else:self._debug("startElement %s", `tag`) breakself._debug("startElement %s", `tag`) else:self._debug("startElement %s", `tag`) #self._debug("putself._debug("startElement %s", `tag`) onself._debug("startElement %s", `tag`) buffer")self._debug("startElement %s", `tag`) rulelist.append(rule)self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) rulelist:self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) rememberself._debug("startElement %s", `tag`) bufferself._debug("startElement %s", `tag`) positionself._debug("startElement %s", `tag`) forself._debug("startElement %s", `tag`) endself._debug("startElement %s", `tag`) tagself._debug("startElement %s", `tag`) matchingself._debug("startElement %s", `tag`) posself._debug("startElement %s", `tag`) =self._debug("startElement %s", `tag`) len(self.buf)self._debug("startElement %s", `tag`) self.rulestack.append((pos,self._debug("startElement %s", `tag`) rulelist))self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) filtered:self._debug("startElement %s", `tag`) self.buf_append_data(item)self._debug("startElement %s", `tag`) elifself._debug("startElement %s", `tag`) self.js_filter:self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) itsself._debug("startElement %s", `tag`) notself._debug("startElement %s", `tag`) yetself._debug("startElement %s", `tag`) filtered,self._debug("startElement %s", `tag`) tryself._debug("startElement %s", `tag`) filterself._debug("startElement %s", `tag`) javascriptself._debug("startElement %s", `tag`) self.jsStartElement(tag,self._debug("startElement %s", `tag`) attrs)self._debug("startElement %s", `tag`) else:self._debug("startElement %s", `tag`) self.buf.append(item)self._debug("startElement %s", `tag`) #self._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) ruleself._debug("startElement %s", `tag`) stackself._debug("startElement %s", `tag`) isself._debug("startElement %s", `tag`) empty,self._debug("startElement %s", `tag`) writeself._debug("startElement %s", `tag`) outself._debug("startElement %s", `tag`) theself._debug("startElement %s", `tag`) bufferedself._debug("startElement %s", `tag`) dataself._debug("startElement %s", `tag`) ifself._debug("startElement %s", `tag`) notself._debug("startElement %s", `tag`) self.rulestackself._debug("startElement %s", `tag`) andself._debug("startElement %s", `tag`) notself._debug("startElement %s", `tag`) self.js_filter:self._debug("startElement %s", `tag`) self.buf2data()
| 16,228
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data #self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state=='wait': return self.waitbuf.append(item) rulelist = [] filtered = 0 if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) debug(FILTER, "using base url %s", `self.base_url`) # look for filter rules which apply for rule in self.rules: if rule.match_tag(tag) and rule.match_attrs(attrs): #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) if rule.start_sufficient: item = rule.filter_tag(tag, attrs) filtered = "True" if item[0]==STARTTAG and item[1]==tag: foo,tag,attrs = item # give'em a chance to replace more than one attribute continue else: break else: #self._debug("put on buffer") rulelist.append(rule) if rulelist: # remember buffer position for end tag matching pos = len(self.buf) self.rulestack.append((pos, rulelist)) if filtered: self.buf_append_data(item) elif self.js_filter: # if its not yet filtered, try filter javascript self.jsStartElement(tag, attrs) else: self.buf.append(item) # if rule stack is empty, write out the buffered data if not self.rulestack and not self.js_filter: self.buf2data()
|
defself._debug("matched rule %s on tag %s", `rule.title`, `tag`) startElementself._debug("matched rule %s on tag %s", `rule.title`, `tag`) (self,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs):self._debug("matched rule %s on tag %s", `rule.title`, `tag`) """Weself._debug("matched rule %s on tag %s", `rule.title`, `tag`) getself._debug("matched rule %s on tag %s", `rule.title`, `tag`) aself._debug("matched rule %s on tag %s", `rule.title`, `tag`) newself._debug("matched rule %s on tag %s", `rule.title`, `tag`) startself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag.self._debug("matched rule %s on tag %s", `rule.title`, `tag`) Newself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulesself._debug("matched rule %s on tag %s", `rule.title`, `tag`) couldself._debug("matched rule %s on tag %s", `rule.title`, `tag`) beself._debug("matched rule %s on tag %s", `rule.title`, `tag`) appendedself._debug("matched rule %s on tag %s", `rule.title`, `tag`) toself._debug("matched rule %s on tag %s", `rule.title`, `tag`) theself._debug("matched rule %s on tag %s", `rule.title`, `tag`) pendingself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rules.self._debug("matched rule %s on tag %s", `rule.title`, `tag`) Noself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulesself._debug("matched rule %s on tag %s", `rule.title`, `tag`) canself._debug("matched rule %s on tag %s", `rule.title`, `tag`) beself._debug("matched rule %s on tag %s", `rule.title`, `tag`) removedself._debug("matched rule %s on tag %s", `rule.title`, `tag`) fromself._debug("matched rule %s on tag %s", `rule.title`, `tag`) theself._debug("matched rule %s on tag %s", `rule.title`, `tag`) list."""self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) defaultself._debug("matched rule %s on tag %s", `rule.title`, `tag`) dataself._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("startElementself._debug("matched rule %s on tag %s", `rule.title`, `tag`) %s",self._debug("matched rule %s on tag %s", `rule.title`, `tag`) `tag`)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) tagself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) check_spelling(tag,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.url)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) itemself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) [STARTTAG,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs]self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.state=='wait':self._debug("matched rule %s on tag %s", `rule.title`, `tag`) returnself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.waitbuf.append(item)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulelistself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) []self._debug("matched rule %s on tag %s", `rule.title`, `tag`) filteredself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) 0self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag=="meta"self._debug("matched rule %s on tag %s", `rule.title`, `tag`) andself._debug("matched rule %s on tag %s", `rule.title`, `tag`) \self._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs.get('http-equiv',self._debug("matched rule %s on tag %s", `rule.title`, `tag`) '').lower()self._debug("matched rule %s on tag %s", `rule.title`, `tag`) =='pics-label':self._debug("matched rule %s on tag %s", `rule.title`, `tag`) labelsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) resolve_html_entities(attrs.get('content',self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ''))self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) note:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) thereself._debug("matched rule %s on tag %s", `rule.title`, `tag`) areself._debug("matched rule %s on tag %s", `rule.title`, `tag`) noself._debug("matched rule %s on tag %s", `rule.title`, `tag`) picsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rules,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) thisself._debug("matched rule %s on tag %s", `rule.title`, `tag`) loopself._debug("matched rule %s on tag %s", `rule.title`, `tag`) isself._debug("matched rule %s on tag %s", `rule.title`, `tag`) emptyself._debug("matched rule %s on tag %s", `rule.title`, `tag`) forself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ruleself._debug("matched rule %s on tag %s", `rule.title`, `tag`) inself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.pics:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) msgself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) check_pics(rule,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) labels)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) msg:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) raiseself._debug("matched rule %s on tag %s", `rule.title`, `tag`) FilterPics(msg)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) firstself._debug("matched rule %s on tag %s", `rule.title`, `tag`) labelsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) matchself._debug("matched rule %s on tag %s", `rule.title`, `tag`) countsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.picsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) []self._debug("matched rule %s on tag %s", `rule.title`, `tag`) elifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag=="body":self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) headersself._debug("matched rule %s on tag %s", `rule.title`, `tag`) finishedself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.pics:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) noself._debug("matched rule %s on tag %s", `rule.title`, `tag`) picsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) dataself._debug("matched rule %s on tag %s", `rule.title`, `tag`) foundself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.picsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) []self._debug("matched rule %s on tag %s", `rule.title`, `tag`) elifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tag=="base"self._debug("matched rule %s on tag %s", `rule.title`, `tag`) andself._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs.has_key('href'):self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.base_urlself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) strip_quotes(attrs['href'])self._debug("matched rule %s on tag %s", `rule.title`, `tag`) debug(FILTER,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) "usingself._debug("matched rule %s on tag %s", `rule.title`, `tag`) baseself._debug("matched rule %s on tag %s", `rule.title`, `tag`) urlself._debug("matched rule %s on tag %s", `rule.title`, `tag`) %s",self._debug("matched rule %s on tag %s", `rule.title`, `tag`) `self.base_url`)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) lookself._debug("matched rule %s on tag %s", `rule.title`, `tag`) forself._debug("matched rule %s on tag %s", `rule.title`, `tag`) filterself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulesself._debug("matched rule %s on tag %s", `rule.title`, `tag`) whichself._debug("matched rule %s on tag %s", `rule.title`, `tag`) applyself._debug("matched rule %s on tag %s", `rule.title`, `tag`) forself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ruleself._debug("matched rule %s on tag %s", `rule.title`, `tag`) inself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.rules:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rule.match_tag(tag)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) andself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rule.match_attrs(attrs):self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matchedself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ruleself._debug("matched rule %s on tag %s", `rule.title`, `tag`) %sself._debug("matched rule %s on tag %s", `rule.title`, `tag`) onself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tagself._debug("matched rule %s on tag %s", `rule.title`, `tag`) %s",self._debug("matched rule %s on tag %s", `rule.title`, `tag`) `rule.title`,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) `tag`)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rule.start_sufficient:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) itemself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) rule.filter_tag(tag,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) filteredself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) "True"self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) item[0]==STARTTAGself._debug("matched rule %s on tag %s", `rule.title`, `tag`) andself._debug("matched rule %s on tag %s", `rule.title`, `tag`) item[1]==tag:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) foo,tag,attrsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) itemself._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) give'emself._debug("matched rule %s on tag %s", `rule.title`, `tag`) aself._debug("matched rule %s on tag %s", `rule.title`, `tag`) chanceself._debug("matched rule %s on tag %s", `rule.title`, `tag`) toself._debug("matched rule %s on tag %s", `rule.title`, `tag`) replaceself._debug("matched rule %s on tag %s", `rule.title`, `tag`) moreself._debug("matched rule %s on tag %s", `rule.title`, `tag`) thanself._debug("matched rule %s on tag %s", `rule.title`, `tag`) oneself._debug("matched rule %s on tag %s", `rule.title`, `tag`) attributeself._debug("matched rule %s on tag %s", `rule.title`, `tag`) continueself._debug("matched rule %s on tag %s", `rule.title`, `tag`) else:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) breakself._debug("matched rule %s on tag %s", `rule.title`, `tag`) else:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("putself._debug("matched rule %s on tag %s", `rule.title`, `tag`) onself._debug("matched rule %s on tag %s", `rule.title`, `tag`) buffer")self._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulelist.append(rule)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulelist:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) rememberself._debug("matched rule %s on tag %s", `rule.title`, `tag`) bufferself._debug("matched rule %s on tag %s", `rule.title`, `tag`) positionself._debug("matched rule %s on tag %s", `rule.title`, `tag`) forself._debug("matched rule %s on tag %s", `rule.title`, `tag`) endself._debug("matched rule %s on tag %s", `rule.title`, `tag`) tagself._debug("matched rule %s on tag %s", `rule.title`, `tag`) matchingself._debug("matched rule %s on tag %s", `rule.title`, `tag`) posself._debug("matched rule %s on tag %s", `rule.title`, `tag`) =self._debug("matched rule %s on tag %s", `rule.title`, `tag`) len(self.buf)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.rulestack.append((pos,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) rulelist))self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) filtered:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.buf_append_data(item)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) elifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.js_filter:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) itsself._debug("matched rule %s on tag %s", `rule.title`, `tag`) notself._debug("matched rule %s on tag %s", `rule.title`, `tag`) yetself._debug("matched rule %s on tag %s", `rule.title`, `tag`) filtered,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) tryself._debug("matched rule %s on tag %s", `rule.title`, `tag`) filterself._debug("matched rule %s on tag %s", `rule.title`, `tag`) javascriptself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.jsStartElement(tag,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) attrs)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) else:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.buf.append(item)self._debug("matched rule %s on tag %s", `rule.title`, `tag`) #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ruleself._debug("matched rule %s on tag %s", `rule.title`, `tag`) stackself._debug("matched rule %s on tag %s", `rule.title`, `tag`) isself._debug("matched rule %s on tag %s", `rule.title`, `tag`) empty,self._debug("matched rule %s on tag %s", `rule.title`, `tag`) writeself._debug("matched rule %s on tag %s", `rule.title`, `tag`) outself._debug("matched rule %s on tag %s", `rule.title`, `tag`) theself._debug("matched rule %s on tag %s", `rule.title`, `tag`) bufferedself._debug("matched rule %s on tag %s", `rule.title`, `tag`) dataself._debug("matched rule %s on tag %s", `rule.title`, `tag`) ifself._debug("matched rule %s on tag %s", `rule.title`, `tag`) notself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.rulestackself._debug("matched rule %s on tag %s", `rule.title`, `tag`) andself._debug("matched rule %s on tag %s", `rule.title`, `tag`) notself._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.js_filter:self._debug("matched rule %s on tag %s", `rule.title`, `tag`) self.buf2data()
| 16,229
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data #self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state=='wait': return self.waitbuf.append(item) rulelist = [] filtered = 0 if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) debug(FILTER, "using base url %s", `self.base_url`) # look for filter rules which apply for rule in self.rules: if rule.match_tag(tag) and rule.match_attrs(attrs): #self._debug("matched rule %s on tag %s", `rule.title`, `tag`) if rule.start_sufficient: item = rule.filter_tag(tag, attrs) filtered = "True" if item[0]==STARTTAG and item[1]==tag: foo,tag,attrs = item # give'em a chance to replace more than one attribute continue else: break else: #self._debug("put on buffer") rulelist.append(rule) if rulelist: # remember buffer position for end tag matching pos = len(self.buf) self.rulestack.append((pos, rulelist)) if filtered: self.buf_append_data(item) elif self.js_filter: # if its not yet filtered, try filter javascript self.jsStartElement(tag, attrs) else: self.buf.append(item) # if rule stack is empty, write out the buffered data if not self.rulestack and not self.js_filter: self.buf2data()
|
defself._debug("put on buffer") startElementself._debug("put on buffer") (self,self._debug("put on buffer") tag,self._debug("put on buffer") attrs):self._debug("put on buffer") """Weself._debug("put on buffer") getself._debug("put on buffer") aself._debug("put on buffer") newself._debug("put on buffer") startself._debug("put on buffer") tag.self._debug("put on buffer") Newself._debug("put on buffer") rulesself._debug("put on buffer") couldself._debug("put on buffer") beself._debug("put on buffer") appendedself._debug("put on buffer") toself._debug("put on buffer") theself._debug("put on buffer") pendingself._debug("put on buffer") rules.self._debug("put on buffer") Noself._debug("put on buffer") rulesself._debug("put on buffer") canself._debug("put on buffer") beself._debug("put on buffer") removedself._debug("put on buffer") fromself._debug("put on buffer") theself._debug("put on buffer") list."""self._debug("put on buffer") #self._debug("put on buffer") defaultself._debug("put on buffer") dataself._debug("put on buffer") #self._debug("startElementself._debug("put on buffer") %s",self._debug("put on buffer") `tag`)self._debug("put on buffer") tagself._debug("put on buffer") =self._debug("put on buffer") check_spelling(tag,self._debug("put on buffer") self.url)self._debug("put on buffer") itemself._debug("put on buffer") =self._debug("put on buffer") [STARTTAG,self._debug("put on buffer") tag,self._debug("put on buffer") attrs]self._debug("put on buffer") ifself._debug("put on buffer") self.state=='wait':self._debug("put on buffer") returnself._debug("put on buffer") self.waitbuf.append(item)self._debug("put on buffer") rulelistself._debug("put on buffer") =self._debug("put on buffer") []self._debug("put on buffer") filteredself._debug("put on buffer") =self._debug("put on buffer") 0self._debug("put on buffer") ifself._debug("put on buffer") tag=="meta"self._debug("put on buffer") andself._debug("put on buffer") \self._debug("put on buffer") attrs.get('http-equiv',self._debug("put on buffer") '').lower()self._debug("put on buffer") =='pics-label':self._debug("put on buffer") labelsself._debug("put on buffer") =self._debug("put on buffer") resolve_html_entities(attrs.get('content',self._debug("put on buffer") ''))self._debug("put on buffer") #self._debug("put on buffer") note:self._debug("put on buffer") ifself._debug("put on buffer") thereself._debug("put on buffer") areself._debug("put on buffer") noself._debug("put on buffer") picsself._debug("put on buffer") rules,self._debug("put on buffer") thisself._debug("put on buffer") loopself._debug("put on buffer") isself._debug("put on buffer") emptyself._debug("put on buffer") forself._debug("put on buffer") ruleself._debug("put on buffer") inself._debug("put on buffer") self.pics:self._debug("put on buffer") msgself._debug("put on buffer") =self._debug("put on buffer") check_pics(rule,self._debug("put on buffer") labels)self._debug("put on buffer") ifself._debug("put on buffer") msg:self._debug("put on buffer") raiseself._debug("put on buffer") FilterPics(msg)self._debug("put on buffer") #self._debug("put on buffer") firstself._debug("put on buffer") labelsself._debug("put on buffer") matchself._debug("put on buffer") countsself._debug("put on buffer") self.picsself._debug("put on buffer") =self._debug("put on buffer") []self._debug("put on buffer") elifself._debug("put on buffer") tag=="body":self._debug("put on buffer") #self._debug("put on buffer") headersself._debug("put on buffer") finishedself._debug("put on buffer") ifself._debug("put on buffer") self.pics:self._debug("put on buffer") #self._debug("put on buffer") noself._debug("put on buffer") picsself._debug("put on buffer") dataself._debug("put on buffer") foundself._debug("put on buffer") self.picsself._debug("put on buffer") =self._debug("put on buffer") []self._debug("put on buffer") elifself._debug("put on buffer") tag=="base"self._debug("put on buffer") andself._debug("put on buffer") attrs.has_key('href'):self._debug("put on buffer") self.base_urlself._debug("put on buffer") =self._debug("put on buffer") strip_quotes(attrs['href'])self._debug("put on buffer") debug(FILTER,self._debug("put on buffer") "usingself._debug("put on buffer") baseself._debug("put on buffer") urlself._debug("put on buffer") %s",self._debug("put on buffer") `self.base_url`)self._debug("put on buffer") #self._debug("put on buffer") lookself._debug("put on buffer") forself._debug("put on buffer") filterself._debug("put on buffer") rulesself._debug("put on buffer") whichself._debug("put on buffer") applyself._debug("put on buffer") forself._debug("put on buffer") ruleself._debug("put on buffer") inself._debug("put on buffer") self.rules:self._debug("put on buffer") ifself._debug("put on buffer") rule.match_tag(tag)self._debug("put on buffer") andself._debug("put on buffer") rule.match_attrs(attrs):self._debug("put on buffer") #self._debug("matchedself._debug("put on buffer") ruleself._debug("put on buffer") %sself._debug("put on buffer") onself._debug("put on buffer") tagself._debug("put on buffer") %s",self._debug("put on buffer") `rule.title`,self._debug("put on buffer") `tag`)self._debug("put on buffer") ifself._debug("put on buffer") rule.start_sufficient:self._debug("put on buffer") itemself._debug("put on buffer") =self._debug("put on buffer") rule.filter_tag(tag,self._debug("put on buffer") attrs)self._debug("put on buffer") filteredself._debug("put on buffer") =self._debug("put on buffer") "True"self._debug("put on buffer") ifself._debug("put on buffer") item[0]==STARTTAGself._debug("put on buffer") andself._debug("put on buffer") item[1]==tag:self._debug("put on buffer") foo,tag,attrsself._debug("put on buffer") =self._debug("put on buffer") itemself._debug("put on buffer") #self._debug("put on buffer") give'emself._debug("put on buffer") aself._debug("put on buffer") chanceself._debug("put on buffer") toself._debug("put on buffer") replaceself._debug("put on buffer") moreself._debug("put on buffer") thanself._debug("put on buffer") oneself._debug("put on buffer") attributeself._debug("put on buffer") continueself._debug("put on buffer") else:self._debug("put on buffer") breakself._debug("put on buffer") else:self._debug("put on buffer") #self._debug("putself._debug("put on buffer") onself._debug("put on buffer") buffer")self._debug("put on buffer") rulelist.append(rule)self._debug("put on buffer") ifself._debug("put on buffer") rulelist:self._debug("put on buffer") #self._debug("put on buffer") rememberself._debug("put on buffer") bufferself._debug("put on buffer") positionself._debug("put on buffer") forself._debug("put on buffer") endself._debug("put on buffer") tagself._debug("put on buffer") matchingself._debug("put on buffer") posself._debug("put on buffer") =self._debug("put on buffer") len(self.buf)self._debug("put on buffer") self.rulestack.append((pos,self._debug("put on buffer") rulelist))self._debug("put on buffer") ifself._debug("put on buffer") filtered:self._debug("put on buffer") self.buf_append_data(item)self._debug("put on buffer") elifself._debug("put on buffer") self.js_filter:self._debug("put on buffer") #self._debug("put on buffer") ifself._debug("put on buffer") itsself._debug("put on buffer") notself._debug("put on buffer") yetself._debug("put on buffer") filtered,self._debug("put on buffer") tryself._debug("put on buffer") filterself._debug("put on buffer") javascriptself._debug("put on buffer") self.jsStartElement(tag,self._debug("put on buffer") attrs)self._debug("put on buffer") else:self._debug("put on buffer") self.buf.append(item)self._debug("put on buffer") #self._debug("put on buffer") ifself._debug("put on buffer") ruleself._debug("put on buffer") stackself._debug("put on buffer") isself._debug("put on buffer") empty,self._debug("put on buffer") writeself._debug("put on buffer") outself._debug("put on buffer") theself._debug("put on buffer") bufferedself._debug("put on buffer") dataself._debug("put on buffer") ifself._debug("put on buffer") notself._debug("put on buffer") self.rulestackself._debug("put on buffer") andself._debug("put on buffer") notself._debug("put on buffer") self.js_filter:self._debug("put on buffer") self.buf2data()
| 16,230
|
def endElement (self, tag): """We know the following: if a rule matches, it must be the one on the top of the stack. So we look only at the top rule.
|
defself._debug("endElement %s", `tag`) endElementself._debug("endElement %s", `tag`) (self,self._debug("endElement %s", `tag`) tag):self._debug("endElement %s", `tag`) """Weself._debug("endElement %s", `tag`) knowself._debug("endElement %s", `tag`) theself._debug("endElement %s", `tag`) following:self._debug("endElement %s", `tag`) ifself._debug("endElement %s", `tag`) aself._debug("endElement %s", `tag`) ruleself._debug("endElement %s", `tag`) matches,self._debug("endElement %s", `tag`) itself._debug("endElement %s", `tag`) mustself._debug("endElement %s", `tag`) beself._debug("endElement %s", `tag`) theself._debug("endElement %s", `tag`) oneself._debug("endElement %s", `tag`) onself._debug("endElement %s", `tag`) theself._debug("endElement %s", `tag`) topself._debug("endElement %s", `tag`) ofself._debug("endElement %s", `tag`) theself._debug("endElement %s", `tag`) stack.self._debug("endElement %s", `tag`) Soself._debug("endElement %s", `tag`) weself._debug("endElement %s", `tag`) lookself._debug("endElement %s", `tag`) onlyself._debug("endElement %s", `tag`) atself._debug("endElement %s", `tag`) theself._debug("endElement %s", `tag`) topself._debug("endElement %s", `tag`) rule.
| 16,231
|
def jsStartElement (self, tag, attrs): """Check popups for onmouseout and onmouseover. Inline extern javascript sources""" changed = 0 self.js_src = None self.js_output = 0 self.js_popup = 0 for name in ('onmouseover', 'onmouseout'): if attrs.has_key(name) and self.jsPopup(attrs, name): #self._debug("JS: del %s from %s", `name`, `tag`) del attrs[name] changed = 1 if tag=='form': name = attrs.get('name', attrs.get('id')) self.jsForm(name, attrs.get('action', ''), attrs.get('target', '')) elif tag=='script': lang = attrs.get('language', '').lower() url = attrs.get('src', '') scrtype = attrs.get('type', '').lower() is_js = scrtype=='text/javascript' or \ lang.startswith('javascript') or \ not (lang or scrtype) if is_js and url: return self.jsScriptSrc(url, lang) self.buf.append([STARTTAG, tag, attrs])
|
defself._debug("JS: del %s from %s", `name`, `tag`) jsStartElementself._debug("JS: del %s from %s", `name`, `tag`) (self,self._debug("JS: del %s from %s", `name`, `tag`) tag,self._debug("JS: del %s from %s", `name`, `tag`) attrs):self._debug("JS: del %s from %s", `name`, `tag`) """Checkself._debug("JS: del %s from %s", `name`, `tag`) popupsself._debug("JS: del %s from %s", `name`, `tag`) forself._debug("JS: del %s from %s", `name`, `tag`) onmouseoutself._debug("JS: del %s from %s", `name`, `tag`) andself._debug("JS: del %s from %s", `name`, `tag`) onmouseover.self._debug("JS: del %s from %s", `name`, `tag`) Inlineself._debug("JS: del %s from %s", `name`, `tag`) externself._debug("JS: del %s from %s", `name`, `tag`) javascriptself._debug("JS: del %s from %s", `name`, `tag`) sources"""self._debug("JS: del %s from %s", `name`, `tag`) changedself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) 0self._debug("JS: del %s from %s", `name`, `tag`) self.js_srcself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) Noneself._debug("JS: del %s from %s", `name`, `tag`) self.js_outputself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) 0self._debug("JS: del %s from %s", `name`, `tag`) self.js_popupself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) 0self._debug("JS: del %s from %s", `name`, `tag`) forself._debug("JS: del %s from %s", `name`, `tag`) nameself._debug("JS: del %s from %s", `name`, `tag`) inself._debug("JS: del %s from %s", `name`, `tag`) ('onmouseover',self._debug("JS: del %s from %s", `name`, `tag`) 'onmouseout'):self._debug("JS: del %s from %s", `name`, `tag`) ifself._debug("JS: del %s from %s", `name`, `tag`) attrs.has_key(name)self._debug("JS: del %s from %s", `name`, `tag`) andself._debug("JS: del %s from %s", `name`, `tag`) self.jsPopup(attrs,self._debug("JS: del %s from %s", `name`, `tag`) name):self._debug("JS: del %s from %s", `name`, `tag`) #self._debug("JS:self._debug("JS: del %s from %s", `name`, `tag`) delself._debug("JS: del %s from %s", `name`, `tag`) %sself._debug("JS: del %s from %s", `name`, `tag`) fromself._debug("JS: del %s from %s", `name`, `tag`) %s",self._debug("JS: del %s from %s", `name`, `tag`) `name`,self._debug("JS: del %s from %s", `name`, `tag`) `tag`)self._debug("JS: del %s from %s", `name`, `tag`) delself._debug("JS: del %s from %s", `name`, `tag`) attrs[name]self._debug("JS: del %s from %s", `name`, `tag`) changedself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) 1self._debug("JS: del %s from %s", `name`, `tag`) ifself._debug("JS: del %s from %s", `name`, `tag`) tag=='form':self._debug("JS: del %s from %s", `name`, `tag`) nameself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('name',self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('id'))self._debug("JS: del %s from %s", `name`, `tag`) self.jsForm(name,self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('action',self._debug("JS: del %s from %s", `name`, `tag`) ''),self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('target',self._debug("JS: del %s from %s", `name`, `tag`) ''))self._debug("JS: del %s from %s", `name`, `tag`) elifself._debug("JS: del %s from %s", `name`, `tag`) tag=='script':self._debug("JS: del %s from %s", `name`, `tag`) langself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('language',self._debug("JS: del %s from %s", `name`, `tag`) '').lower()self._debug("JS: del %s from %s", `name`, `tag`) urlself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('src',self._debug("JS: del %s from %s", `name`, `tag`) '')self._debug("JS: del %s from %s", `name`, `tag`) scrtypeself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) attrs.get('type',self._debug("JS: del %s from %s", `name`, `tag`) '').lower()self._debug("JS: del %s from %s", `name`, `tag`) is_jsself._debug("JS: del %s from %s", `name`, `tag`) =self._debug("JS: del %s from %s", `name`, `tag`) scrtype=='text/javascript'self._debug("JS: del %s from %s", `name`, `tag`) orself._debug("JS: del %s from %s", `name`, `tag`) \self._debug("JS: del %s from %s", `name`, `tag`) lang.startswith('javascript')self._debug("JS: del %s from %s", `name`, `tag`) orself._debug("JS: del %s from %s", `name`, `tag`) \self._debug("JS: del %s from %s", `name`, `tag`) notself._debug("JS: del %s from %s", `name`, `tag`) (langself._debug("JS: del %s from %s", `name`, `tag`) orself._debug("JS: del %s from %s", `name`, `tag`) scrtype)self._debug("JS: del %s from %s", `name`, `tag`) ifself._debug("JS: del %s from %s", `name`, `tag`) is_jsself._debug("JS: del %s from %s", `name`, `tag`) andself._debug("JS: del %s from %s", `name`, `tag`) url:self._debug("JS: del %s from %s", `name`, `tag`) returnself._debug("JS: del %s from %s", `name`, `tag`) self.jsScriptSrc(url,self._debug("JS: del %s from %s", `name`, `tag`) lang)self._debug("JS: del %s from %s", `name`, `tag`) self.buf.append([STARTTAG,self._debug("JS: del %s from %s", `name`, `tag`) tag,self._debug("JS: del %s from %s", `name`, `tag`) attrs])
| 16,232
|
def jsPopup (self, attrs, name): """check if attrs[name] javascript opens a popup window""" #self._debug("JS: jsPopup") val = resolve_html_entities(attrs[name]) if not val: return self.js_env.attachListener(self) try: self.js_env.executeScriptAsFunction(val, 0.0) except jslib.error, msg: pass self.js_env.detachListener(self) res = self.js_popup self.js_popup = 0 return res
|
defself._debug("JS: jsPopup") jsPopupself._debug("JS: jsPopup") (self,self._debug("JS: jsPopup") attrs,self._debug("JS: jsPopup") name):self._debug("JS: jsPopup") """checkself._debug("JS: jsPopup") ifself._debug("JS: jsPopup") attrs[name]self._debug("JS: jsPopup") javascriptself._debug("JS: jsPopup") opensself._debug("JS: jsPopup") aself._debug("JS: jsPopup") popupself._debug("JS: jsPopup") window"""self._debug("JS: jsPopup") #self._debug("JS:self._debug("JS: jsPopup") jsPopup")self._debug("JS: jsPopup") valself._debug("JS: jsPopup") =self._debug("JS: jsPopup") resolve_html_entities(attrs[name])self._debug("JS: jsPopup") ifself._debug("JS: jsPopup") notself._debug("JS: jsPopup") val:self._debug("JS: jsPopup") returnself._debug("JS: jsPopup") self.js_env.attachListener(self)self._debug("JS: jsPopup") try:self._debug("JS: jsPopup") self.js_env.executeScriptAsFunction(val,self._debug("JS: jsPopup") 0.0)self._debug("JS: jsPopup") exceptself._debug("JS: jsPopup") jslib.error,self._debug("JS: jsPopup") msg:self._debug("JS: jsPopup") passself._debug("JS: jsPopup") self.js_env.detachListener(self)self._debug("JS: jsPopup") resself._debug("JS: jsPopup") =self._debug("JS: jsPopup") self.js_popupself._debug("JS: jsPopup") self.js_popupself._debug("JS: jsPopup") =self._debug("JS: jsPopup") 0self._debug("JS: jsPopup") returnself._debug("JS: jsPopup") res
| 16,233
|
def jsForm (self, name, action, target): """when hitting a (named) form, notify the JS engine about that""" if not name: return #self._debug("jsForm %s action %s %s", `name`, `action`, `target`) self.js_env.addForm(name, action, target)
|
defself._debug("jsForm %s action %s %s", `name`, `action`, `target`) jsFormself._debug("jsForm %s action %s %s", `name`, `action`, `target`) (self,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) name,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) action,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) target):self._debug("jsForm %s action %s %s", `name`, `action`, `target`) """whenself._debug("jsForm %s action %s %s", `name`, `action`, `target`) hittingself._debug("jsForm %s action %s %s", `name`, `action`, `target`) aself._debug("jsForm %s action %s %s", `name`, `action`, `target`) (named)self._debug("jsForm %s action %s %s", `name`, `action`, `target`) form,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) notifyself._debug("jsForm %s action %s %s", `name`, `action`, `target`) theself._debug("jsForm %s action %s %s", `name`, `action`, `target`) JSself._debug("jsForm %s action %s %s", `name`, `action`, `target`) engineself._debug("jsForm %s action %s %s", `name`, `action`, `target`) aboutself._debug("jsForm %s action %s %s", `name`, `action`, `target`) that"""self._debug("jsForm %s action %s %s", `name`, `action`, `target`) ifself._debug("jsForm %s action %s %s", `name`, `action`, `target`) notself._debug("jsForm %s action %s %s", `name`, `action`, `target`) name:self._debug("jsForm %s action %s %s", `name`, `action`, `target`) returnself._debug("jsForm %s action %s %s", `name`, `action`, `target`) #self._debug("jsFormself._debug("jsForm %s action %s %s", `name`, `action`, `target`) %sself._debug("jsForm %s action %s %s", `name`, `action`, `target`) actionself._debug("jsForm %s action %s %s", `name`, `action`, `target`) %sself._debug("jsForm %s action %s %s", `name`, `action`, `target`) %s",self._debug("jsForm %s action %s %s", `name`, `action`, `target`) `name`,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) `action`,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) `target`)self._debug("jsForm %s action %s %s", `name`, `action`, `target`) self.js_env.addForm(name,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) action,self._debug("jsForm %s action %s %s", `name`, `action`, `target`) target)
| 16,234
|
def jsScriptData (self, data, url, ver): """Callback for loading <script src=""> data in the background If downloading is finished, data is None""" assert self.state=='wait' if data is None: if not self.js_script: warn(PARSER, "HtmlParser[%d]: empty JS src %s", self.level, url) else: self.buf.append([STARTTAG, "script", {'type': 'text/javascript'}]) script = "\n<!--\n%s\n//-->\n"%escape_js(self.js_script) self.buf.append([DATA, script]) # Note: <script src=""> could be missing an end tag, # but now we need one. Look later for a duplicate </script>. self.buf.append([ENDTAG, "script"]) self.js_script = '' self.state = 'parse' #self._debug("switching back to parse with") self._debugbuf() else: #self._debug("JS read %d <= %s", len(data), url) self.js_script += data
|
defself._debug("switching back to parse with") jsScriptDataself._debug("switching back to parse with") (self,self._debug("switching back to parse with") data,self._debug("switching back to parse with") url,self._debug("switching back to parse with") ver):self._debug("switching back to parse with") """Callbackself._debug("switching back to parse with") forself._debug("switching back to parse with") loadingself._debug("switching back to parse with") <scriptself._debug("switching back to parse with") src="">self._debug("switching back to parse with") dataself._debug("switching back to parse with") inself._debug("switching back to parse with") theself._debug("switching back to parse with") backgroundself._debug("switching back to parse with") Ifself._debug("switching back to parse with") downloadingself._debug("switching back to parse with") isself._debug("switching back to parse with") finished,self._debug("switching back to parse with") dataself._debug("switching back to parse with") isself._debug("switching back to parse with") None"""self._debug("switching back to parse with") assertself._debug("switching back to parse with") self.state=='wait'self._debug("switching back to parse with") ifself._debug("switching back to parse with") dataself._debug("switching back to parse with") isself._debug("switching back to parse with") None:self._debug("switching back to parse with") ifself._debug("switching back to parse with") notself._debug("switching back to parse with") self.js_script:self._debug("switching back to parse with") warn(PARSER,self._debug("switching back to parse with") "HtmlParser[%d]:self._debug("switching back to parse with") emptyself._debug("switching back to parse with") JSself._debug("switching back to parse with") srcself._debug("switching back to parse with") %s",self._debug("switching back to parse with") self.level,self._debug("switching back to parse with") url)self._debug("switching back to parse with") else:self._debug("switching back to parse with") self.buf.append([STARTTAG,self._debug("switching back to parse with") "script",self._debug("switching back to parse with") {'type':self._debug("switching back to parse with") 'text/javascript'}])self._debug("switching back to parse with") scriptself._debug("switching back to parse with") =self._debug("switching back to parse with") "\n<!--\n%s\n//-->\n"%escape_js(self.js_script)self._debug("switching back to parse with") self.buf.append([DATA,self._debug("switching back to parse with") script])self._debug("switching back to parse with") #self._debug("switching back to parse with") Note:self._debug("switching back to parse with") <scriptself._debug("switching back to parse with") src="">self._debug("switching back to parse with") couldself._debug("switching back to parse with") beself._debug("switching back to parse with") missingself._debug("switching back to parse with") anself._debug("switching back to parse with") endself._debug("switching back to parse with") tag,self._debug("switching back to parse with") #self._debug("switching back to parse with") butself._debug("switching back to parse with") nowself._debug("switching back to parse with") weself._debug("switching back to parse with") needself._debug("switching back to parse with") one.self._debug("switching back to parse with") Lookself._debug("switching back to parse with") laterself._debug("switching back to parse with") forself._debug("switching back to parse with") aself._debug("switching back to parse with") duplicateself._debug("switching back to parse with") </script>.self._debug("switching back to parse with") self.buf.append([ENDTAG,self._debug("switching back to parse with") "script"])self._debug("switching back to parse with") self.js_scriptself._debug("switching back to parse with") =self._debug("switching back to parse with") ''self._debug("switching back to parse with") self.stateself._debug("switching back to parse with") =self._debug("switching back to parse with") 'parse'self._debug("switching back to parse with") #self._debug("switchingself._debug("switching back to parse with") backself._debug("switching back to parse with") toself._debug("switching back to parse with") parseself._debug("switching back to parse with") with")self._debug("switching back to parse with") self._debugbuf()self._debug("switching back to parse with") else:self._debug("switching back to parse with") #self._debug("JSself._debug("switching back to parse with") readself._debug("switching back to parse with") %dself._debug("switching back to parse with") <=self._debug("switching back to parse with") %s",self._debug("switching back to parse with") len(data),self._debug("switching back to parse with") url)self._debug("switching back to parse with") self.js_scriptself._debug("switching back to parse with") +=self._debug("switching back to parse with") data
| 16,235
|
def jsScriptData (self, data, url, ver): """Callback for loading <script src=""> data in the background If downloading is finished, data is None""" assert self.state=='wait' if data is None: if not self.js_script: warn(PARSER, "HtmlParser[%d]: empty JS src %s", self.level, url) else: self.buf.append([STARTTAG, "script", {'type': 'text/javascript'}]) script = "\n<!--\n%s\n//-->\n"%escape_js(self.js_script) self.buf.append([DATA, script]) # Note: <script src=""> could be missing an end tag, # but now we need one. Look later for a duplicate </script>. self.buf.append([ENDTAG, "script"]) self.js_script = '' self.state = 'parse' #self._debug("switching back to parse with") self._debugbuf() else: #self._debug("JS read %d <= %s", len(data), url) self.js_script += data
|
defself._debug("JS read %d <= %s", len(data), url) jsScriptDataself._debug("JS read %d <= %s", len(data), url) (self,self._debug("JS read %d <= %s", len(data), url) data,self._debug("JS read %d <= %s", len(data), url) url,self._debug("JS read %d <= %s", len(data), url) ver):self._debug("JS read %d <= %s", len(data), url) """Callbackself._debug("JS read %d <= %s", len(data), url) forself._debug("JS read %d <= %s", len(data), url) loadingself._debug("JS read %d <= %s", len(data), url) <scriptself._debug("JS read %d <= %s", len(data), url) src="">self._debug("JS read %d <= %s", len(data), url) dataself._debug("JS read %d <= %s", len(data), url) inself._debug("JS read %d <= %s", len(data), url) theself._debug("JS read %d <= %s", len(data), url) backgroundself._debug("JS read %d <= %s", len(data), url) Ifself._debug("JS read %d <= %s", len(data), url) downloadingself._debug("JS read %d <= %s", len(data), url) isself._debug("JS read %d <= %s", len(data), url) finished,self._debug("JS read %d <= %s", len(data), url) dataself._debug("JS read %d <= %s", len(data), url) isself._debug("JS read %d <= %s", len(data), url) None"""self._debug("JS read %d <= %s", len(data), url) assertself._debug("JS read %d <= %s", len(data), url) self.state=='wait'self._debug("JS read %d <= %s", len(data), url) ifself._debug("JS read %d <= %s", len(data), url) dataself._debug("JS read %d <= %s", len(data), url) isself._debug("JS read %d <= %s", len(data), url) None:self._debug("JS read %d <= %s", len(data), url) ifself._debug("JS read %d <= %s", len(data), url) notself._debug("JS read %d <= %s", len(data), url) self.js_script:self._debug("JS read %d <= %s", len(data), url) warn(PARSER,self._debug("JS read %d <= %s", len(data), url) "HtmlParser[%d]:self._debug("JS read %d <= %s", len(data), url) emptyself._debug("JS read %d <= %s", len(data), url) JSself._debug("JS read %d <= %s", len(data), url) srcself._debug("JS read %d <= %s", len(data), url) %s",self._debug("JS read %d <= %s", len(data), url) self.level,self._debug("JS read %d <= %s", len(data), url) url)self._debug("JS read %d <= %s", len(data), url) else:self._debug("JS read %d <= %s", len(data), url) self.buf.append([STARTTAG,self._debug("JS read %d <= %s", len(data), url) "script",self._debug("JS read %d <= %s", len(data), url) {'type':self._debug("JS read %d <= %s", len(data), url) 'text/javascript'}])self._debug("JS read %d <= %s", len(data), url) scriptself._debug("JS read %d <= %s", len(data), url) =self._debug("JS read %d <= %s", len(data), url) "\n<!--\n%s\n//-->\n"%escape_js(self.js_script)self._debug("JS read %d <= %s", len(data), url) self.buf.append([DATA,self._debug("JS read %d <= %s", len(data), url) script])self._debug("JS read %d <= %s", len(data), url) #self._debug("JS read %d <= %s", len(data), url) Note:self._debug("JS read %d <= %s", len(data), url) <scriptself._debug("JS read %d <= %s", len(data), url) src="">self._debug("JS read %d <= %s", len(data), url) couldself._debug("JS read %d <= %s", len(data), url) beself._debug("JS read %d <= %s", len(data), url) missingself._debug("JS read %d <= %s", len(data), url) anself._debug("JS read %d <= %s", len(data), url) endself._debug("JS read %d <= %s", len(data), url) tag,self._debug("JS read %d <= %s", len(data), url) #self._debug("JS read %d <= %s", len(data), url) butself._debug("JS read %d <= %s", len(data), url) nowself._debug("JS read %d <= %s", len(data), url) weself._debug("JS read %d <= %s", len(data), url) needself._debug("JS read %d <= %s", len(data), url) one.self._debug("JS read %d <= %s", len(data), url) Lookself._debug("JS read %d <= %s", len(data), url) laterself._debug("JS read %d <= %s", len(data), url) forself._debug("JS read %d <= %s", len(data), url) aself._debug("JS read %d <= %s", len(data), url) duplicateself._debug("JS read %d <= %s", len(data), url) </script>.self._debug("JS read %d <= %s", len(data), url) self.buf.append([ENDTAG,self._debug("JS read %d <= %s", len(data), url) "script"])self._debug("JS read %d <= %s", len(data), url) self.js_scriptself._debug("JS read %d <= %s", len(data), url) =self._debug("JS read %d <= %s", len(data), url) ''self._debug("JS read %d <= %s", len(data), url) self.stateself._debug("JS read %d <= %s", len(data), url) =self._debug("JS read %d <= %s", len(data), url) 'parse'self._debug("JS read %d <= %s", len(data), url) #self._debug("switchingself._debug("JS read %d <= %s", len(data), url) backself._debug("JS read %d <= %s", len(data), url) toself._debug("JS read %d <= %s", len(data), url) parseself._debug("JS read %d <= %s", len(data), url) with")self._debug("JS read %d <= %s", len(data), url) self._debugbuf()self._debug("JS read %d <= %s", len(data), url) else:self._debug("JS read %d <= %s", len(data), url) #self._debug("JSself._debug("JS read %d <= %s", len(data), url) readself._debug("JS read %d <= %s", len(data), url) %dself._debug("JS read %d <= %s", len(data), url) <=self._debug("JS read %d <= %s", len(data), url) %s",self._debug("JS read %d <= %s", len(data), url) len(data),self._debug("JS read %d <= %s", len(data), url) url)self._debug("JS read %d <= %s", len(data), url) self.js_scriptself._debug("JS read %d <= %s", len(data), url) +=self._debug("JS read %d <= %s", len(data), url) data
| 16,236
|
def jsScript (self, script, ver, item): """execute given script with javascript version ver""" #self._debug("JS: jsScript %s %s", ver, `script`) assert self.state == 'parse' assert len(self.buf) >= 2 self.js_output = 0 self.js_env.attachListener(self) # start recursive html filter (used by jsProcessData) self.js_html = FilterHtmlParser(self.rules, self.pics, self.url, comments=self.comments, javascript=self.js_filter, level=self.level+1) # execute self.js_env.executeScript(unescape_js(script), ver) self.js_env.detachListener(self) # wait for recursive filter to finish self.jsEndScript(item)
|
defself._debug("JS: jsScript %s %s", ver, `script`) jsScriptself._debug("JS: jsScript %s %s", ver, `script`) (self,self._debug("JS: jsScript %s %s", ver, `script`) script,self._debug("JS: jsScript %s %s", ver, `script`) ver,self._debug("JS: jsScript %s %s", ver, `script`) item):self._debug("JS: jsScript %s %s", ver, `script`) """executeself._debug("JS: jsScript %s %s", ver, `script`) givenself._debug("JS: jsScript %s %s", ver, `script`) scriptself._debug("JS: jsScript %s %s", ver, `script`) withself._debug("JS: jsScript %s %s", ver, `script`) javascriptself._debug("JS: jsScript %s %s", ver, `script`) versionself._debug("JS: jsScript %s %s", ver, `script`) ver"""self._debug("JS: jsScript %s %s", ver, `script`) #self._debug("JS:self._debug("JS: jsScript %s %s", ver, `script`) jsScriptself._debug("JS: jsScript %s %s", ver, `script`) %sself._debug("JS: jsScript %s %s", ver, `script`) %s",self._debug("JS: jsScript %s %s", ver, `script`) ver,self._debug("JS: jsScript %s %s", ver, `script`) `script`)self._debug("JS: jsScript %s %s", ver, `script`) assertself._debug("JS: jsScript %s %s", ver, `script`) self.stateself._debug("JS: jsScript %s %s", ver, `script`) ==self._debug("JS: jsScript %s %s", ver, `script`) 'parse'self._debug("JS: jsScript %s %s", ver, `script`) assertself._debug("JS: jsScript %s %s", ver, `script`) len(self.buf)self._debug("JS: jsScript %s %s", ver, `script`) >=self._debug("JS: jsScript %s %s", ver, `script`) 2self._debug("JS: jsScript %s %s", ver, `script`) self.js_outputself._debug("JS: jsScript %s %s", ver, `script`) =self._debug("JS: jsScript %s %s", ver, `script`) 0self._debug("JS: jsScript %s %s", ver, `script`) self.js_env.attachListener(self)self._debug("JS: jsScript %s %s", ver, `script`) #self._debug("JS: jsScript %s %s", ver, `script`) startself._debug("JS: jsScript %s %s", ver, `script`) recursiveself._debug("JS: jsScript %s %s", ver, `script`) htmlself._debug("JS: jsScript %s %s", ver, `script`) filterself._debug("JS: jsScript %s %s", ver, `script`) (usedself._debug("JS: jsScript %s %s", ver, `script`) byself._debug("JS: jsScript %s %s", ver, `script`) jsProcessData)self._debug("JS: jsScript %s %s", ver, `script`) self.js_htmlself._debug("JS: jsScript %s %s", ver, `script`) =self._debug("JS: jsScript %s %s", ver, `script`) FilterHtmlParser(self.rules,self._debug("JS: jsScript %s %s", ver, `script`) self.pics,self._debug("JS: jsScript %s %s", ver, `script`) self.url,self._debug("JS: jsScript %s %s", ver, `script`) comments=self.comments,self._debug("JS: jsScript %s %s", ver, `script`) javascript=self.js_filter,self._debug("JS: jsScript %s %s", ver, `script`) level=self.level+1)self._debug("JS: jsScript %s %s", ver, `script`) #self._debug("JS: jsScript %s %s", ver, `script`) executeself._debug("JS: jsScript %s %s", ver, `script`) self.js_env.executeScript(unescape_js(script),self._debug("JS: jsScript %s %s", ver, `script`) ver)self._debug("JS: jsScript %s %s", ver, `script`) self.js_env.detachListener(self)self._debug("JS: jsScript %s %s", ver, `script`) #self._debug("JS: jsScript %s %s", ver, `script`) waitself._debug("JS: jsScript %s %s", ver, `script`) forself._debug("JS: jsScript %s %s", ver, `script`) recursiveself._debug("JS: jsScript %s %s", ver, `script`) filterself._debug("JS: jsScript %s %s", ver, `script`) toself._debug("JS: jsScript %s %s", ver, `script`) finishself._debug("JS: jsScript %s %s", ver, `script`) self.jsEndScript(item)
| 16,237
|
def jsEndScript (self, item): #self._debug("JS: endScript") assert len(self.buf) >= 2 if self.js_output: try: self.js_html.feed('') self.js_html.flush() except FilterWait: self.state = 'wait' self.waited = 'True' make_timer(0.1, lambda : self.jsEndScript(item)) return self.js_html._debugbuf() assert not self.js_html.inbuf.getvalue() assert not self.js_html.waitbuf assert len(self.buf) >= 2 self.buf[-2:-2] = [[DATA, self.js_html.outbuf.getvalue()]]+self.js_html.buf self.js_html = None if (self.js_popup + self.js_output) > 0: # delete old script del self.buf[-1] del self.buf[-1] elif not self.filterEndElement(item[1]): self.buf.append(item) #self._debug("JS: switching back to parse with") self._debugbuf() self.state = 'parse'
|
defself._debug("JS: endScript") jsEndScriptself._debug("JS: endScript") (self,self._debug("JS: endScript") item):self._debug("JS: endScript") #self._debug("JS:self._debug("JS: endScript") endScript")self._debug("JS: endScript") assertself._debug("JS: endScript") len(self.buf)self._debug("JS: endScript") >=self._debug("JS: endScript") 2self._debug("JS: endScript") ifself._debug("JS: endScript") self.js_output:self._debug("JS: endScript") try:self._debug("JS: endScript") self.js_html.feed('')self._debug("JS: endScript") self.js_html.flush()self._debug("JS: endScript") exceptself._debug("JS: endScript") FilterWait:self._debug("JS: endScript") self.stateself._debug("JS: endScript") =self._debug("JS: endScript") 'wait'self._debug("JS: endScript") self.waitedself._debug("JS: endScript") =self._debug("JS: endScript") 'True'self._debug("JS: endScript") make_timer(0.1,self._debug("JS: endScript") lambdaself._debug("JS: endScript") :self._debug("JS: endScript") self.jsEndScript(item))self._debug("JS: endScript") returnself._debug("JS: endScript") self.js_html._debugbuf()self._debug("JS: endScript") assertself._debug("JS: endScript") notself._debug("JS: endScript") self.js_html.inbuf.getvalue()self._debug("JS: endScript") assertself._debug("JS: endScript") notself._debug("JS: endScript") self.js_html.waitbufself._debug("JS: endScript") assertself._debug("JS: endScript") len(self.buf)self._debug("JS: endScript") >=self._debug("JS: endScript") 2self._debug("JS: endScript") self.buf[-2:-2]self._debug("JS: endScript") =self._debug("JS: endScript") [[DATA,self._debug("JS: endScript") self.js_html.outbuf.getvalue()]]+self.js_html.bufself._debug("JS: endScript") self.js_htmlself._debug("JS: endScript") =self._debug("JS: endScript") Noneself._debug("JS: endScript") ifself._debug("JS: endScript") (self.js_popupself._debug("JS: endScript") +self._debug("JS: endScript") self.js_output)self._debug("JS: endScript") >self._debug("JS: endScript") 0:self._debug("JS: endScript") #self._debug("JS: endScript") deleteself._debug("JS: endScript") oldself._debug("JS: endScript") scriptself._debug("JS: endScript") delself._debug("JS: endScript") self.buf[-1]self._debug("JS: endScript") delself._debug("JS: endScript") self.buf[-1]self._debug("JS: endScript") elifself._debug("JS: endScript") notself._debug("JS: endScript") self.filterEndElement(item[1]):self._debug("JS: endScript") self.buf.append(item)self._debug("JS: endScript") #self._debug("JS:self._debug("JS: endScript") switchingself._debug("JS: endScript") backself._debug("JS: endScript") toself._debug("JS: endScript") parseself._debug("JS: endScript") with")self._debug("JS: endScript") self._debugbuf()self._debug("JS: endScript") self.stateself._debug("JS: endScript") =self._debug("JS: endScript") 'parse'
| 16,238
|
def jsEndScript (self, item): #self._debug("JS: endScript") assert len(self.buf) >= 2 if self.js_output: try: self.js_html.feed('') self.js_html.flush() except FilterWait: self.state = 'wait' self.waited = 'True' make_timer(0.1, lambda : self.jsEndScript(item)) return self.js_html._debugbuf() assert not self.js_html.inbuf.getvalue() assert not self.js_html.waitbuf assert len(self.buf) >= 2 self.buf[-2:-2] = [[DATA, self.js_html.outbuf.getvalue()]]+self.js_html.buf self.js_html = None if (self.js_popup + self.js_output) > 0: # delete old script del self.buf[-1] del self.buf[-1] elif not self.filterEndElement(item[1]): self.buf.append(item) #self._debug("JS: switching back to parse with") self._debugbuf() self.state = 'parse'
|
defself._debug("JS: switching back to parse with") jsEndScriptself._debug("JS: switching back to parse with") (self,self._debug("JS: switching back to parse with") item):self._debug("JS: switching back to parse with") #self._debug("JS:self._debug("JS: switching back to parse with") endScript")self._debug("JS: switching back to parse with") assertself._debug("JS: switching back to parse with") len(self.buf)self._debug("JS: switching back to parse with") >=self._debug("JS: switching back to parse with") 2self._debug("JS: switching back to parse with") ifself._debug("JS: switching back to parse with") self.js_output:self._debug("JS: switching back to parse with") try:self._debug("JS: switching back to parse with") self.js_html.feed('')self._debug("JS: switching back to parse with") self.js_html.flush()self._debug("JS: switching back to parse with") exceptself._debug("JS: switching back to parse with") FilterWait:self._debug("JS: switching back to parse with") self.stateself._debug("JS: switching back to parse with") =self._debug("JS: switching back to parse with") 'wait'self._debug("JS: switching back to parse with") self.waitedself._debug("JS: switching back to parse with") =self._debug("JS: switching back to parse with") 'True'self._debug("JS: switching back to parse with") make_timer(0.1,self._debug("JS: switching back to parse with") lambdaself._debug("JS: switching back to parse with") :self._debug("JS: switching back to parse with") self.jsEndScript(item))self._debug("JS: switching back to parse with") returnself._debug("JS: switching back to parse with") self.js_html._debugbuf()self._debug("JS: switching back to parse with") assertself._debug("JS: switching back to parse with") notself._debug("JS: switching back to parse with") self.js_html.inbuf.getvalue()self._debug("JS: switching back to parse with") assertself._debug("JS: switching back to parse with") notself._debug("JS: switching back to parse with") self.js_html.waitbufself._debug("JS: switching back to parse with") assertself._debug("JS: switching back to parse with") len(self.buf)self._debug("JS: switching back to parse with") >=self._debug("JS: switching back to parse with") 2self._debug("JS: switching back to parse with") self.buf[-2:-2]self._debug("JS: switching back to parse with") =self._debug("JS: switching back to parse with") [[DATA,self._debug("JS: switching back to parse with") self.js_html.outbuf.getvalue()]]+self.js_html.bufself._debug("JS: switching back to parse with") self.js_htmlself._debug("JS: switching back to parse with") =self._debug("JS: switching back to parse with") Noneself._debug("JS: switching back to parse with") ifself._debug("JS: switching back to parse with") (self.js_popupself._debug("JS: switching back to parse with") +self._debug("JS: switching back to parse with") self.js_output)self._debug("JS: switching back to parse with") >self._debug("JS: switching back to parse with") 0:self._debug("JS: switching back to parse with") #self._debug("JS: switching back to parse with") deleteself._debug("JS: switching back to parse with") oldself._debug("JS: switching back to parse with") scriptself._debug("JS: switching back to parse with") delself._debug("JS: switching back to parse with") self.buf[-1]self._debug("JS: switching back to parse with") delself._debug("JS: switching back to parse with") self.buf[-1]self._debug("JS: switching back to parse with") elifself._debug("JS: switching back to parse with") notself._debug("JS: switching back to parse with") self.filterEndElement(item[1]):self._debug("JS: switching back to parse with") self.buf.append(item)self._debug("JS: switching back to parse with") #self._debug("JS:self._debug("JS: switching back to parse with") switchingself._debug("JS: switching back to parse with") backself._debug("JS: switching back to parse with") toself._debug("JS: switching back to parse with") parseself._debug("JS: switching back to parse with") with")self._debug("JS: switching back to parse with") self._debugbuf()self._debug("JS: switching back to parse with") self.stateself._debug("JS: switching back to parse with") =self._debug("JS: switching back to parse with") 'parse'
| 16,239
|
def flush (self): """ Flush data of decoders (if any) and filters and write it to the client. return True if flush was successful. """ assert None == wc.log.debug(wc.LOG_PROXY, "%s HttpServer.flush", self) if not self.statuscode and self.method != 'CONNECT': wc.log.warn(wc.LOG_PROXY, "%s flush without status", self) return True data = self.flush_coders(self.decoders) try: for stage in FilterStages: data = wc.filter.applyfilter(stage, data, "finish", self.attrs) except wc.filter.FilterWait, msg: assert None == wc.log.debug(wc.LOG_PROXY, "%s FilterWait %s", self, msg) # the filter still needs some data # to save CPU time make connection unreadable for a while self.set_unreadable(1.0) return False except wc.filter.FilterRating, msg: assert None == wc.log.debug(wc.LOG_PROXY, "%s FilterRating from content %s", self, msg) self._show_rating_deny(str(msg)) return True data = self.flush_coders(self.encoders, data=data) # the client might already have closed if not self.client: return if self.defer_data: self.defer_data = False self.client.server_response(self, self.response, self.statuscode, self.headers) if not self.client: return # note that self.client still could be a ClientServerMatchMaker if data and self.statuscode != 407 and hasattr(self.client, "server_content"): self.client.server_content(data) return True
|
def flush (self): """ Flush data of decoders (if any) and filters and write it to the client. return True if flush was successful. """ assert None == wc.log.debug(wc.LOG_PROXY, "%s HttpServer.flush", self) if not self.statuscode and self.method != 'CONNECT': wc.log.warn(wc.LOG_PROXY, "%s flush without status", self) return True data = self.flush_coders(self.decoders) try: for stage in FilterStages: data = wc.filter.applyfilter(stage, data, "finish", self.attrs) except wc.filter.FilterWait, msg: assert None == wc.log.debug(wc.LOG_PROXY, "%s FilterWait %s", self, msg) # the filter still needs some data # to save CPU time make connection unreadable for a while self.set_unreadable(1.0) return False except wc.filter.FilterRating, msg: assert None == wc.log.debug(wc.LOG_PROXY, "%s FilterRating from content %s", self, msg) self._show_rating_deny(str(msg)) return True data = self.flush_coders(self.encoders, data=data) # the client might already have closed if not self.client: return if self.defer_data: self.defer_data = False self.client.server_response(self, self.response, self.statuscode, self.headers) if not self.client: return # note that self.client still could be a ClientServerMatchMaker if data and self.statuscode != 407 and \ hasattr(self.client, "server_content"): self.client.server_content(data) return True
| 16,240
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
class FixedHTMLTranslator (html4css1.HTMLTranslator): def visit_image (self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,241
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like super.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,242
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,243
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,244
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = str(im.size[0]) if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,245
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = str(im.size[1]) del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,246
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im for att_name in 'width', 'height': if atts.has_key(att_name): match = re.match(r'([0-9.]+)(\S*)$', atts[att_name]) assert match atts[att_name] = '%s%s' % ( float(match.group(1)) * (float(node['scale']) / 100), match.group(2)) style = [] for att_name in 'width', 'height': if atts.has_key(att_name): if re.match(r'^[0-9.]+$', atts[att_name]): atts[att_name] += 'px' style.append('%s: %s;' % (att_name, atts[att_name])) del atts[att_name] if style: atts['style'] = ' '.join(style) atts['alt'] = node.get('alt', atts['src']) if (isinstance(node.parent, nodes.TextElement) or (isinstance(node.parent, nodes.reference) and not isinstance(node.parent.parent, nodes.TextElement))): suffix = '' else: suffix = '\n' if node.has_key('align'): if node['align'] == 'center': if suffix: self.body.append('<div align="center" class="align-center">') self.context.append('</div>\n') suffix = '' else: atts['align'] = 'middle' self.context.append('') else: atts['align'] = node['align'] self.context.append('') atts['class'] = 'align-%s' % node['align'] else: self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,247
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
|
def visit_image(self, node): """ Like html4css1.visit_image(), but with align="middle" enforcement. """ atts = node.non_default_attributes() # The XHTML standard only allows align="middle" if atts.get('align') == u"center": atts['align'] = u"middle" if atts.has_key('classes'): del atts['classes'] # prevent duplication with node attrs atts['src'] = atts['uri'] del atts['uri'] if atts.has_key('scale'): if html4css1.Image and not (atts.has_key('width') and atts.has_key('height')): try: im = html4css1.Image.open(str(atts['src'])) except (IOError, # Source image can't be found or opened UnicodeError): # PIL doesn't like Unicode paths. pass else: if not atts.has_key('width'): atts['width'] = im.size[0] if not atts.has_key('height'): atts['height'] = im.size[1] del im if atts.has_key('width'): atts['width'] = int(round(atts['width'] * (float(atts['scale']) / 100))) if atts.has_key('height'): atts['height'] = int(round(atts['height'] * (float(atts['scale']) / 100))) del atts['scale'] if not atts.has_key('alt'): atts['alt'] = atts['src'] if isinstance(node.parent, nodes.TextElement): self.context.append('') else: div_atts = self.image_div_atts(node) self.body.append(self.starttag({}, 'div', '', **div_atts)) self.context.append('</div>\n') self.body.append(self.emptytag(node, 'img', '', **atts))
| 16,248
|
def get_topframe_bashing (self): return """<script type="text/javascript">
|
def get_topframe_bashing (self): return """<script type="text/javascript">
| 16,249
|
def get_topframe_bashing (self): return """<script type="text/javascript">
|
def get_topframe_bashing (self): return """<script type="text/javascript">
| 16,250
|
def _test(): p = HtmlPrinter() p.feed("<hTml>") p.feed("<a href>") p.feed("<a href=''>") p.feed('<a href="">') p.feed("<a href='a'>") p.feed('<a href="a">') p.feed("<a href=a>") p.feed("<a href='\"'>") p.feed("<a href=\"'\">") p.feed("<a href=' '>") p.feed("<a href=a href=b>") p.feed("<a/>") p.feed("<a href/>") p.feed("<a href=a />") p.feed("</a>") p.feed("<?bla foo?>") p.feed("<?bla?>") p.feed("<!-- - comment -->") p.feed("<!---->") p.feed("<!DOCTYPE \"vla foo>") p.flush()
|
def _test(): p = HtmlPrinter() p.feed("<a href>") p.feed("<a href=''>") p.feed('<a href="">') p.feed("<a href='a'>") p.feed('<a href="a">') p.feed("<a href=a>") p.feed("<a href='\"'>") p.feed("<a href=\"'\">") p.feed("<a href=' '>") p.feed("<a href=a href=b>") p.feed("<a/>") p.feed("<a href/>") p.feed("<a href=a />") p.feed("</a>") p.feed("<?bla foo?>") p.feed("<?bla?>") p.feed("<!-- - comment -->") p.feed("<!---->") p.feed("<!DOCTYPE \"vla foo>") p.flush()
| 16,251
|
def _test(): p = HtmlPrinter() p.feed("<hTml>") p.feed("<a href>") p.feed("<a href=''>") p.feed('<a href="">') p.feed("<a href='a'>") p.feed('<a href="a">') p.feed("<a href=a>") p.feed("<a href='\"'>") p.feed("<a href=\"'\">") p.feed("<a href=' '>") p.feed("<a href=a href=b>") p.feed("<a/>") p.feed("<a href/>") p.feed("<a href=a />") p.feed("</a>") p.feed("<?bla foo?>") p.feed("<?bla?>") p.feed("<!-- - comment -->") p.feed("<!---->") p.feed("<!DOCTYPE \"vla foo>") p.flush()
|
def _test(): p = HtmlPrinter() p.feed("<hTml>") p.feed("<a href>") p.feed("<a href=a>") p.feed("<a href='\"'>") p.feed("<a href=\"'\">") p.feed("<a href=' '>") p.feed("<a href=a href=b>") p.feed("<a/>") p.feed("<a href/>") p.feed("<a href=a />") p.feed("</a>") p.feed("<?bla foo?>") p.feed("<?bla?>") p.feed("<!-- - comment -->") p.feed("<!---->") p.feed("<!DOCTYPE \"vla foo>") p.flush()
| 16,252
|
def _test(): p = HtmlPrinter() p.feed("<hTml>") p.feed("<a href>") p.feed("<a href=''>") p.feed('<a href="">') p.feed("<a href='a'>") p.feed('<a href="a">') p.feed("<a href=a>") p.feed("<a href='\"'>") p.feed("<a href=\"'\">") p.feed("<a href=' '>") p.feed("<a href=a href=b>") p.feed("<a/>") p.feed("<a href/>") p.feed("<a href=a />") p.feed("</a>") p.feed("<?bla foo?>") p.feed("<?bla?>") p.feed("<!-- - comment -->") p.feed("<!---->") p.feed("<!DOCTYPE \"vla foo>") p.flush()
|
def _test(): p = HtmlPrinter() p.feed("<hTml>") p.feed("<a href>") p.feed("<a href=''>") p.feed('<a href="">') p.feed("<a href='a'>") p.feed('<a href="a">') p.feed("<a href=a>") p.flush()
| 16,253
|
def _broken (): p = HtmlPrinter() p.feed(""" """) p.flush()
|
def _broken (): p = HtmlPrinter() p.feed("") p.flush()
| 16,254
|
def _broken (): p = HtmlPrinter() p.feed(""" """) p.flush()
|
def _broken (): p = HtmlPrinter() p.feed(""" """) p.flush()
| 16,255
|
def _broken (): p = HtmlPrinter() for c in '<!-- -->': p.feed(c) p.flush()
|
def _broken (): p = HtmlPrinter() for c in '<a/>': p.feed(c) p.flush()
| 16,256
|
def remove_headers (headers, to_remove): """utility function to remove entries from RFC822 headers""" for h in to_remove: if headers.has_key(h): debug(BRING_IT_ON, "removing header", `h`) del headers[h]
|
def remove_headers (headers, to_remove): """utility function to remove entries from RFC822 headers""" for h in to_remove: if headers.has_key(h): del headers[h]
| 16,257
|
def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': return data = self.inbuf.getvalue() self.inbuf.close() self.inbuf = StringIO() if data: # only feed non-empty data #self._debug(NIGHTMARE, "feed", `data`) HtmlParser.feed(self, data) else: #self._debug(NIGHTMARE, "feed") pass else: # wait state --> put in input buffer #self._debug(NIGHTMARE, "wait") self.inbuf.write(data)
|
def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': return data = self.inbuf.getvalue() self.inbuf.close() self.inbuf = StringIO() if data: # only feed non-empty data #self._debug(NIGHTMARE, "feed", `data`) self.parser.feed(data) else: #self._debug(NIGHTMARE, "feed") pass else: # wait state --> put in input buffer #self._debug(NIGHTMARE, "wait") self.inbuf.write(data)
| 16,258
|
def flush (self): #self._debug(HURT_ME_PLENTY, "flush") # flushing in wait state raises a filter exception if self.state=='wait': raise FilterException("HtmlFilter[%d]: still waiting for data"%self.level) HtmlParser.flush(self)
|
def flush (self): #self._debug(HURT_ME_PLENTY, "flush") # flushing in wait state raises a filter exception if self.state=='wait': raise FilterException("HtmlFilter[%d]: still waiting for data"%self.level) HtmlParser.flush(self)
| 16,259
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.NIGHTMARE) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
| 16,260
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered += wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
| 16,261
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered += wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered += wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
| 16,262
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
|
def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, run_timers from wc.filter import FilterException attrs = wc.filter.initStateObjects(url=fname) filtered = "" data = f.read(1024) while data: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) except FilterException, msg: pass data = f.read(1024) i = 1 while 1: try: filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, "", 'finish', attrs) break except FilterException, msg: print >>sys.stderr, "Test: finish: exception:", msg proxy_poll(timeout=max(0, run_timers())) i+=1 if i==99: print "Test: oooooops" break print filtered
| 16,263
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
defif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server_set_encoding_headersif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None (server,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None filename=None):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None """if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Setif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None headers.if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None """if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewriteif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.is_rewrite()if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None bytes_remainingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None get_content_length(server.headers)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_removeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None sets.Set()if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None removeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None contentif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None lengthif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewrite:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove.add('Content-Length')if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None addif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None decodersif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.has_key('Transfer-Encoding'):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None chunkedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None tencif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers['Transfer-Encoding']if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None tencif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None !=if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'chunked':if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None wc.log.warn(wc.LOG_PROXY,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None "unknownif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None transferif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None %r,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None assumingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None chunkedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encoding",if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None tenc)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.decoders.append(UnchunkStream.UnchunkStream(server))if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.encoders.append(ChunkStream.ChunkStream(server))if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.has_key("Content-Length"):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None wc.log.warn(wc.LOG_PROXY,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'chunkedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None shouldif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None haveif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Content-Length')if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove.add("Content-Length")if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None bytes_remainingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Noneif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewrite:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Toif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None makeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None pipeliningif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None possible,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None enableif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None chunkedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encoding.if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers['Transfer-Encoding']if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None "chunked\r"if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.has_key("Content-Length"):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove.add("Content-Length")if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.encoders.append(ChunkStream.ChunkStream(server))if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None remove_headers(server.headers,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None onlyif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None decompressif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None onif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewriteif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewrite:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None returnif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None bytes_remainingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Compressedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None contentif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None (uncompressif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None onlyif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None forif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None rewritingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None modules)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.get('Content-Encoding',if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None '').lower()if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None note:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None doif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None gunzipif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None .gzif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None filesif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None inif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ('gzip',if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'x-gzip',if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'deflate')if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None andif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None \if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None (filenameif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None isif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Noneif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None orif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None filename.endswith(".gz")):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ==if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'deflate':if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.decoders.append(DeflateStream.DeflateStream())if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None else:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.decoders.append(GunzipStream.GunzipStream())if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None removeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None becauseif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None weif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None unzipif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None theif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None streamif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_removeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ['Content-Encoding']if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None removeif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None no-transformif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None cacheif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None controlif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.get('Cache-Control',if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None '').lower()if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ==if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'no-transform':if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove.append('Cache-Control')if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None remove_headers(server.headers,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None to_remove)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None addif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None warningif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers['Warning']if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None "214if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None Transformationif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None applied\r"if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encodingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None andif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encoding!='identity':if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None wc.log.warn(wc.LOG_PROXY,if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None _("unsupportedif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encoding:if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None %r"),if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None encoding)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None doif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None disableif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None filteringif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None forif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None unknownif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None content-encodingsif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None thisif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None couldif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None resultif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None inif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None aif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None DoSif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None attackif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None (serverif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None sendingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None garbageif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None #if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None asif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None content-encoding)if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None ifif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None notif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers.has_key('Content-Length'):if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None server.headers['Connection']if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None =if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None 'close\r'if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None returnif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None bytes_remainingif server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(UnchunkStream.UnchunkStream(server)) elif tenc in ('x-gzip', 'gzip'): server.decoders.append(GunzipStream.GunzipStream(server)) elif tenc == 'deflate': server.decoders.append(DeflateStream.DeflateStream(server)) else: wc.log.warn(wc.LOG_PROXY, "unsupported transfer encoding in %r", tencs) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'Transfer-Encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None
| 16,264
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
| 16,265
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
defif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server_set_encoding_headersif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' (server,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' filename=None):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' """if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Setif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' headers.if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' """if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewriteif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.is_rewrite()if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' bytes_remainingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' get_content_length(server.headers)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_removeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' sets.Set()if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' removeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' contentif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' lengthif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewrite:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove.add('Content-Length')if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' addif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' decodersif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.has_key('Transfer-Encoding'):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' chunkedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' tencif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers['Transfer-Encoding']if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' tencif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' !=if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'chunked':if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' wc.log.warn(wc.LOG_PROXY,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' "unknownif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' transferif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' %r,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' assumingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' chunkedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encoding",if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' tenc)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.decoders.append(UnchunkStream.UnchunkStream(server))if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.encoders.append(ChunkStream.ChunkStream(server))if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.has_key("Content-Length"):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' wc.log.warn(wc.LOG_PROXY,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'chunkedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' shouldif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' haveif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Content-Length')if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove.add("Content-Length")if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' bytes_remainingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Noneif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' elifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewrite:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Toif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' makeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' pipeliningif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' possible,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' enableif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' chunkedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encoding.if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers['Transfer-Encoding']if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' "chunked\r"if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.has_key("Content-Length"):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove.add("Content-Length")if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.encoders.append(ChunkStream.ChunkStream(server))if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' remove_headers(server.headers,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' onlyif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' decompressif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' onif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewriteif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewrite:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' returnif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' bytes_remainingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Compressedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' contentif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' (uncompressif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' onlyif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' forif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' rewritingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' modules)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.get('Content-Encoding',if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' '').lower()if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' note:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' doif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' gunzipif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' .gzif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' filesif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' inif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ('gzip',if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'x-gzip',if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'deflate')if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' andif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' \if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' (filenameif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' isif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Noneif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' orif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' filename.endswith(".gz")):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ==if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'deflate':if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.decoders.append(DeflateStream.DeflateStream())if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' else:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.decoders.append(GunzipStream.GunzipStream())if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' removeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' becauseif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' weif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' unzipif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' theif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' streamif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_removeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ['Content-Encoding']if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' removeif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' no-transformif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' cacheif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' controlif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.get('Cache-Control',if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' '').lower()if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ==if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'no-transform':if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove.append('Cache-Control')if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' remove_headers(server.headers,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' to_remove)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' addif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' warningif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers['Warning']if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' "214if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' Transformationif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' applied\r"if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' elifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encodingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' andif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encoding!='identity':if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' wc.log.warn(wc.LOG_PROXY,if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' _("unsupportedif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encoding:if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' %r"),if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' encoding)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' doif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' disableif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' filteringif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' forif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' unknownif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' content-encodingsif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' thisif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' couldif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' resultif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' inif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' aif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' DoSif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' attackif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' (serverif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' sendingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' garbageif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' #if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' asif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' content-encoding)if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' ifif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' notif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers.has_key('Content-Length'):if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' server.headers['Connection']if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' =if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' 'close\r'if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' returnif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' bytes_remainingif not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r'
| 16,266
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
| 16,267
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.add('Cache-Control') # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
| 16,268
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
|
def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding'): # chunked encoded tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server)) if server.headers.has_key("Content-Length"): wc.log.warn(wc.LOG_PROXY, 'chunked encoding should not have Content-Length') to_remove.add("Content-Length") bytes_remaining = None elif rewrite: # To make pipelining possible, enable chunked encoding. server.headers['Transfer-Encoding'] = "chunked\r" if server.headers.has_key("Content-Length"): to_remove.add("Content-Length") server.encoders.append(ChunkStream.ChunkStream(server)) remove_headers(server.headers, to_remove) # only decompress on rewrite if not rewrite: return bytes_remaining # Compressed content (uncompress only for rewriting modules) encoding = server.headers.get('Content-Encoding', '').lower() # note: do not gunzip .gz files if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) # remove encoding because we unzip the stream to_remove = ['Content-Encoding'] # remove no-transform cache control if server.headers.get('Cache-Control', '').lower() == 'no-transform': to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) # add warning server.headers['Warning'] = "214 Transformation applied\r" elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) # do not disable filtering for unknown content-encodings # this could result in a DoS attack (server sending garbage # as content-encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' return bytes_remaining
| 16,269
|
def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmask %d is not a valid network mask", mask) continue if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue nets.append(dq2net(host, suffix2mask(mask))) elif _host_netmask_re.match(host): host, mask = host.split("/") if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue if not is_valid_ipv4(mask): error(PROXY, "mask %s is not a valid ip network mask", mask) continue nets.append(dq2net(host, dq2mask(mask))) elif is_valid_ip(host): hostset.add(expand_ip(host)) else: try: ips = resolve_host(host) for i in ips: hostset.add(i) except socket.gaierror: pass return (hostset, nets)
|
def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmask %d is not a valid network mask", mask) continue if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue nets.append(dq2net(host, suffix2mask(mask))) elif _host_netmask_re.match(host): host, mask = host.split("/") if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue if not is_valid_ipv4(mask): error(PROXY, "mask %s is not a valid ip network mask", mask) continue nets.append(dq2net(host, dq2mask(mask))) elif is_valid_ip(host): hostset.add(expand_ip(host)[0]) else: try: ips = resolve_host(host) for i in ips: hostset.add(i) except socket.gaierror: pass return (hostset, nets)
| 16,270
|
def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmask %d is not a valid network mask", mask) continue if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue nets.append(dq2net(host, suffix2mask(mask))) elif _host_netmask_re.match(host): host, mask = host.split("/") if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue if not is_valid_ipv4(mask): error(PROXY, "mask %s is not a valid ip network mask", mask) continue nets.append(dq2net(host, dq2mask(mask))) elif is_valid_ip(host): hostset.add(expand_ip(host)) else: try: ips = resolve_host(host) for i in ips: hostset.add(i) except socket.gaierror: pass return (hostset, nets)
|
def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmask %d is not a valid network mask", mask) continue if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue nets.append(dq2net(host, suffix2mask(mask))) elif _host_netmask_re.match(host): host, mask = host.split("/") if not is_valid_ipv4(host): error(PROXY, "host %s is not a valid ip address", host) continue if not is_valid_ipv4(mask): error(PROXY, "mask %s is not a valid ip network mask", mask) continue nets.append(dq2net(host, dq2mask(mask))) elif is_valid_ip(host): hostset.add(expand_ip(host)) else: try: hostset |= resolve_host(host) except socket.gaierror: pass return (hostset, nets)
| 16,271
|
def getval (form, key): """return a formfield value""" if not form.has_key(key): return u'' item = form[key] if isinstance(item, list): item = item[0] elif hasattr(item, "value"): item = item.value return item.decode(charset)
|
def getval (form, key): """return a formfield value""" if not form.has_key(key): return u'' return get_item_value(form[key])
| 16,272
|
def getlist (form, key): """return a list of formfield values""" if not form.has_key(key): return [] item = form[key] if isinstance(item, list): l = [x.value for x in item] elif hasattr(item, "value"): l = [item.value] else: l = [item] return [ x.decode(charset) for x in l ]
|
def getlist (form, key): """return a list of formfield values""" if not form.has_key(key): return [] return get_item_list(form[key]) def get_prefix_vals (form, prefix): """return a list of (key, value) pairs where ``prefix+key'' is a valid form field""" res = [] for key, item in form.items(): if key.startswith(prefix): res.append(key[len(prefix):], get_item_value(item)) return res
| 16,273
|
def check_headers (self): """add missing content-type and/or encoding headers""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else: document = self.document gm = mimetypes.guess_type(document, None) ct = self.headers.get('Content-Type', None) if self.mime: if ct != self.mime: warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `self.mime`, `self.url`) self.headers['Content-Type'] = "%s\r"%self.mime elif gm[0]: # guessed an own content type if ct is None: warn(PROXY, i18n._("add Content-Type %s to %s"), `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] # fix some content types elif not ct.startswith(gm[0]) and \ gm[0] in _fix_content_types: warn(PROXY, i18n._("change Content-Type from %s to %s in %s"), `ct`, `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] if gm[1] and gm[1] in _fix_content_encodings: ce = self.headers.get('Content-Encoding', None) # guessed an own encoding type if ce is None: self.headers['Content-Encoding'] = "%s\r"%gm[1] warn(PROXY, i18n._("add Content-Encoding %s to %s"), `gm[1]`, `self.url`) elif ce != gm[1]: warn(PROXY, i18n._("change Content-Encoding from %s to %s in %s"), `ce`, `gm[1]`, `self.url`) self.headers['Content-Encoding'] = "%s\r"%gm[1] # hmm, fix application/x-httpd-php* if self.headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): warn(PROXY, i18n._("fix x-httpd-php Content-Type")) self.headers['Content-Type'] = 'text/html\r'
|
def check_headers (self): """add missing content-type and/or encoding headers""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else: document = self.document gm = mimetypes.guess_type(document, None) ct = self.headers.get('Content-Type', None) if self.mime: if ct is None: warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `self.mime`, `self.url`) self.headers['Content-Type'] = "%s\r"%self.mime elif gm[0]: # guessed an own content type if ct is None: warn(PROXY, i18n._("add Content-Type %s to %s"), `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] # fix some content types elif not ct.startswith(gm[0]) and \ gm[0] in _fix_content_types: warn(PROXY, i18n._("change Content-Type from %s to %s in %s"), `ct`, `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] if gm[1] and gm[1] in _fix_content_encodings: ce = self.headers.get('Content-Encoding', None) # guessed an own encoding type if ce is None: self.headers['Content-Encoding'] = "%s\r"%gm[1] warn(PROXY, i18n._("add Content-Encoding %s to %s"), `gm[1]`, `self.url`) elif ce != gm[1]: warn(PROXY, i18n._("change Content-Encoding from %s to %s in %s"), `ce`, `gm[1]`, `self.url`) self.headers['Content-Encoding'] = "%s\r"%gm[1] # hmm, fix application/x-httpd-php* if self.headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): warn(PROXY, i18n._("fix x-httpd-php Content-Type")) self.headers['Content-Type'] = 'text/html\r'
| 16,274
|
def __init__ (self, opts): self.js_filter = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.js_filter: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0
|
def __init__ (self, opts): self.javascript = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.js_filter: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0
| 16,275
|
def __init__ (self, opts): self.js_filter = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.js_filter: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0
|
def __init__ (self, opts): self.js_filter = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.javascript: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0
| 16,276
|
def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # will replay() buffered data elif self.state[0]=='wait': # flushing in wait state raises a filter exception self.waited += 1 raise FilterWait("HtmlParser[%d,wait]: waited %d times for %s"%\ (self.level, self.waited, self.state[1])) self.parser.flush()
|
deferror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None flusherror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None (self):error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self._debug("flush")error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None iferror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.waitederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None >error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None 100:error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None #error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None waitederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None tooerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None long;error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None stoperror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None jserror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None backgrounderror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None downloadererror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None anderror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None #error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None switcherror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None backerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None toerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None parseerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None error(FILTER,error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None "waitederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None tooerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None longerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None forerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None %s"%self.state[1])error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None iferror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.js_env.hasListener(self):error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.js_env.detachListener(self)error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.js_htmlerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None =error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None Noneerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.stateerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None =error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None ('parse',)error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.feed("")error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None #error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None willerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None replay()error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None bufferederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None dataerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None eliferror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.state[0]=='wait':error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None #error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None flushingerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None inerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None waiterror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None stateerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None raiseserror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None aerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None filtererror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None exceptionerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.waitederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None +=error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None 1error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None raiseerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None FilterWait("HtmlParser[%d,wait]:error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None waitederror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None %derror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None timeserror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None forerror(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None %s"%\error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None (self.level,error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.waited,error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.state[1]))error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None self.parser.flush()
| 16,277
|
def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # will replay() buffered data elif self.state[0]=='wait': # flushing in wait state raises a filter exception self.waited += 1 raise FilterWait("HtmlParser[%d,wait]: waited %d times for %s"%\ (self.level, self.waited, self.state[1])) self.parser.flush()
|
def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse self.state = ('parse',) self.feed("") # will replay() buffered data elif self.state[0]=='wait': # flushing in wait state raises a filter exception self.waited += 1 raise FilterWait("HtmlParser[%d,wait]: waited %d times for %s"%\ (self.level, self.waited, self.state[1])) self.parser.flush()
| 16,278
|
def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # will replay() buffered data elif self.state[0]=='wait': # flushing in wait state raises a filter exception self.waited += 1 raise FilterWait("HtmlParser[%d,wait]: waited %d times for %s"%\ (self.level, self.waited, self.state[1])) self.parser.flush()
|
def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # will replay() buffered data elif self.state[0]=='wait': # flushing in wait state raises a filter exception self.waited += 1 raise FilterWait("HtmlParser[%d,wait]: waited %d times for %s"%\ (self.level, self.waited, self.state[1])) self.parser.flush()
| 16,279
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append(item) return rulelist = [] filtered = False if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) self._debug("using base url %s", `self.base_url`) # search for and prevent known security flaws in HTML self.security.scan_start_tag(tag, attrs, self)
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append(item) return rulelist = [] filtered = False if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) self._debug("using base url %s", `self.base_url`) # search for and prevent known security flaws in HTML self.security.scan_start_tag(tag, attrs, self)
| 16,280
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append(item) return rulelist = [] filtered = False if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) self._debug("using base url %s", `self.base_url`) # search for and prevent known security flaws in HTML self.security.scan_start_tag(tag, attrs, self)
|
def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append(item) return rulelist = [] filtered = False if tag=="meta" and \ attrs.get('http-equiv', '').lower() =='pics-label': labels = resolve_html_entities(attrs.get('content', '')) # note: if there are no pics rules, this loop is empty for rule in self.pics: msg = check_pics(rule, labels) if msg: raise FilterPics(msg) # first labels match counts self.pics = [] elif tag=="body": # headers finished if self.pics: # no pics data found self.pics = [] elif tag=="base" and attrs.has_key('href'): self.base_url = strip_quotes(attrs['href']) self._debug("using base url %s", `self.base_url`) # search for and prevent known security flaws in HTML self.security.scan_start_tag(tag, attrs, self)
| 16,281
|
def endElement (self, tag): """We know the following: if a rule matches, it must be the one on the top of the stack. So we look only at the top rule.
|
def endElement (self, tag): """We know the following: if a rule matches, it must be the one on the top of the stack. So we look only at the top rule.
| 16,282
|
def jsScriptSrc (self, url, language): """Start a background download for <script src=""> tags""" assert self.state[0]=='parse', "non-parse state %s" % str(self.state) ver = get_js_ver(language) if self.base_url: url = urlparse.urljoin(self.base_url, url) else: url = urlparse.urljoin(self.url, url) url = norm_url(url) if _has_ws(url): warn(PARSER, "HtmlParser[%d]: broken JS url %s at %s", self.level, `url`, `self.url`) return self.state = ('wait', url) self.waited = 1 self.js_src = True client = HttpProxyClient(self.jsScriptData, (url, ver)) ClientServerMatchmaker(client, "GET %s HTTP/1.1" % url, #request WcMessage(StringIO('')), #headers '', #content {'nofilter': None}, # nofilter 'identity', # compress mime = "application/x-javascript", )
|
def jsScriptSrc (self, url, language): """Start a background download for <script src=""> tags""" assert self.state[0]=='parse', "non-parse state %s" % str(self.state) ver = get_js_ver(language) if self.base_url: url = urlparse.urljoin(self.base_url, url) else: url = urlparse.urljoin(self.url, url) url = norm_url(url) if _has_ws(url): warn(PARSER, "HtmlParser[%d]: broken JS url %s at %s", self.level, `url`, `self.url`) return self.state = ('wait', url) self.waited = 1 self.js_src = True client = HttpProxyClient(self.jsScriptData, (url, ver)) ClientServerMatchmaker(client, "GET %s HTTP/1.1" % url, #request WcMessage(StringIO('')), #headers '', #content {'nofilter': None}, # nofilter 'identity', # compress mime = "application/x-javascript", )
| 16,283
|
def jsScript (self, script, ver, item): """execute given script with javascript version ver""" self._debug("JS: jsScript %s %s", ver, `script`) assert self.state[0]=='parse', "non-parse state %s" % str(self.state) assert len(self.buf) >= 2, "too small buffer %s" % str(self.buf) self.js_output = 0 self.js_env.attachListener(self) # start recursive html filter (used by jsProcessData) self.js_html = FilterHtmlParser(self.rules, self.pics, self.url, comments=self.comments, javascript=self.js_filter, level=self.level+1) # execute self.js_env.executeScript(unescape_js(script), ver) self.js_env.detachListener(self) # wait for recursive filter to finish self.jsEndScript(item)
|
def jsScript (self, script, ver, item): """execute given script with javascript version ver""" self._debug("JS: jsScript %s %s", ver, `script`) assert self.state[0]=='parse', "non-parse state %s" % str(self.state) assert len(self.buf) >= 2, "too small buffer %s" % str(self.buf) self.js_output = 0 self.js_env.attachListener(self) # start recursive html filter (used by jsProcessData) self.js_html = FilterHtmlParser(self.rules, self.pics, self.url, comments=self.comments, javascript=self.javascript, level=self.level+1) # execute self.js_env.executeScript(unescape_js(script), ver) self.js_env.detachListener(self) # wait for recursive filter to finish self.jsEndScript(item)
| 16,284
|
def fileno(self): return self.socket.fileno()
|
def fileno(self): return self.socket.fileno()
| 16,285
|
def spliturl (url): """split url in a tuple (scheme, hostname, port, document) where hostname is always lowercased""" # XXX this relies on scheme==http! scheme, netloc = splittype(url) host, document = splithost(netloc) if not host: hostname = "localhost" port = config['port'] else: hostname, port = splitport(host) if port is None: port = 80 else: port = int(port) return scheme, hostname.lower(), port, document
|
def spliturl (url): """split url in a tuple (scheme, hostname, port, document) where hostname is always lowercased""" # XXX this relies on scheme==http! scheme, netloc = splittype(url) host, document = splithost(netloc) port = 80 if host: host = host.lower() host, port = splitnport(host, 80) return scheme, host, port, document
| 16,286
|
def _form_removerule (rule): curfolder.rules.remove(rule) global currule currule = None info.append("Rule removed")
|
def _form_removerule (rule): curfolder.rules.remove(rule) global currule currule = None info.append(i18n._("Rule removed"))
| 16,287
|
def _form_rewrite_addattr (form): name = getval(form, "attrname").strip() if not name: error.append(i18n._("Empty attribute name")) return value = getval(form, "attrval") currule.attrs[name] = value info.append("Attribute added")
|
def _form_rewrite_addattr (form): name = getval(form, "attrname").strip() if not name: error.append(i18n._("Empty attribute name")) return value = getval(form, "attrval") currule.attrs[name] = value info.append(i18n._("Rewrite attribute added"))
| 16,288
|
def _form_rewrite_removeattrs (form): toremove = getlist(form, 'delattr') if toremove: for attr in toremove: del currule.attrs[attr] info.append("Attributes removed")
|
def _form_rewrite_removeattrs (form): toremove = getlist(form, 'delattr') if toremove: for attr in toremove: del currule.attrs[attr] info.append(i18n._("Rewrite attributes removed"))
| 16,289
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description changed")
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append(i18n._("Empty rule title")) return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description changed")
| 16,290
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description changed")
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append(i18n._("Rule title changed")) desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description changed")
| 16,291
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description changed")
|
def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append(i18n._("Rule description changed"))
| 16,292
|
def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append("Rule match url changed") dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.append("Rule dontmatch url changed")
|
def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append(i18n._("Rule match url changed")) dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.append("Rule dontmatch url changed")
| 16,293
|
def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append("Rule match url changed") dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.append("Rule dontmatch url changed")
|
def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append("Rule match url changed") dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.append(i18n._("Rule dontmatch url changed"))
| 16,294
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append(i18n._("Rule url scheme changed")) host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
| 16,295
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append(i18n._("Rule url host changed")) port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
| 16,296
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append(i18n._("Rule url port changed")) path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
| 16,297
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append(i18n._("Rule url path changed")) parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
| 16,298
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append("Rule url parameters changed") query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
|
def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_urlport').strip() if port!=currule.port: currule.port = port info.append("Rule url port changed") path = getval(form, 'rule_urlpath').strip() if path!=currule.path: currule.path = path info.append("Rule url path changed") parameters = getval(form, 'rule_urlparameters').strip() if parameters!=currule.parameters: currule.parameters = parameters info.append(i18n._("Rule url parameters changed")) query = getval(form, 'rule_urlquery').strip() if query!=currule.query: currule.query = query info.append("Rule url query changed") fragment = getval(form, 'rule_urlfragment').strip() if fragment!=currule.fragment: currule.fragment = fragment info.append("Rule url fragment changed")
| 16,299
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.