rem stringlengths 0 322k | add stringlengths 0 2.05M | context stringlengths 8 228k |
|---|---|---|
debug(BRING_IT_ON, "removing header", `h`) | def remove_headers (headers, to_remove): """utility function to remove entries from RFC822 headers""" for h in to_remove: if headers.has_key(h): debug(BRING_IT_ON, "removing header", `h`) del headers[h] | |
HtmlParser.feed(self, data) | self.parser.feed(data) | def feed (self, data): """feed some data to the parser""" if self.state=='parse': # look if we must replay something if self.waited: self.waited = 0 waitbuf, self.waitbuf = self.waitbuf, [] self.replay(waitbuf) if self.state!='parse': return data = self.inbuf.getvalue() self.inbuf.close() self.inbuf = StringIO() if dat... |
HtmlParser.flush(self) | self.parser.flush() | def flush (self): #self._debug(HURT_ME_PLENTY, "flush") # flushing in wait state raises a filter exception if self.state=='wait': raise FilterException("HtmlFilter[%d]: still waiting for data"%self.level) HtmlParser.flush(self) |
wc.set_debuglevel(wc.HURT_ME_PLENTY) | wc.set_debuglevel(wc.NIGHTMARE) | def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, r... |
filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) | filtered += wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, data, 'filter', attrs) | def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, r... |
filtered = wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, | filtered += wc.filter.applyfilter(wc.filter.FILTER_RESPONSE_MODIFY, | def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, r... |
print "Test: finish: exception:", msg | print >>sys.stderr, "Test: finish: exception:", msg | def _main(): fname = sys.argv[1] if fname=="-": f = sys.stdin else: f = file(fname) import wc, time wc.config = wc.Configuration() # set debug level wc.set_debuglevel(wc.HURT_ME_PLENTY) wc.config['filters'] = ['Replacer', 'Rewriter', 'BinaryCharFilter'] wc.config.init_filter_modules() from wc.proxy import proxy_poll, r... |
if server.headers.has_key('Transfer-Encoding'): to_remove.add('Transfer-Encoding') tencs = server.headers['Transfer-Encoding'].lower() for tenc in tencs.split(","): tenc = tenc.strip() if ";" in tenc: tenc = tenc.split(";", 1)[0] if not tenc or tenc == 'identity': continue if tenc == 'chunked': server.decoders.append(U... | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... | |
if server.headers.has_key('Transfer-Encoding'): tenc = server.headers['Transfer-Encoding'] if tenc != 'chunked': wc.log.warn(wc.LOG_PROXY, "unknown transfer encoding %r, assuming chunked encoding", tenc) server.decoders.append(UnchunkStream.UnchunkStream(server)) server.encoders.append(ChunkStream.ChunkStream(server))... | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... | |
if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... | |
encoding = server.headers.get('Content-Encoding', '').lower() if encoding in ('gzip', 'x-gzip', 'deflate') and \ (filename is None or not filename.endswith(".gz")): if encoding == 'deflate': server.decoders.append(DeflateStream.DeflateStream()) else: server.decoders.append(GunzipStream.GunzipStream()) to_remove = ['C... | if server.headers.has_key('Content-Encoding'): to_remove.add('Content-Encoding') cencs = server.headers['Content-Encoding'].lower() for cenc in cencs.split(","): cenc = cenc.strip() if ";" in cenc: cenc = cenc.split(";", 1)[0] if not cenc or cenc == 'identity': continue if filename is not None and \ (filename.endswith(... | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... |
to_remove.append('Cache-Control') remove_headers(server.headers, to_remove) | to_remove.add('Cache-Control') | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... |
elif encoding and encoding!='identity': wc.log.warn(wc.LOG_PROXY, _("unsupported encoding: %r"), encoding) if not server.headers.has_key('Content-Length'): server.headers['Connection'] = 'close\r' | remove_headers(server.headers, to_remove) | def server_set_encoding_headers (server, filename=None): """ Set encoding headers. """ rewrite = server.is_rewrite() bytes_remaining = get_content_length(server.headers) to_remove = sets.Set() # remove content length if rewrite: to_remove.add('Content-Length') # add decoders if server.headers.has_key('Transfer-Encoding... |
hostset.add(expand_ip(host)) | hostset.add(expand_ip(host)[0]) | def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmas... |
ips = resolve_host(host) for i in ips: hostset.add(i) | hostset |= resolve_host(host) | def hosts2map (hosts): """return a set of named hosts, and a list of subnets (host/netmask adresses). Only IPv4 host/netmasks are supported. """ hostset = Set() nets = [] for host in hosts: if _host_bitmask_re.match(host): host, mask = host.split("/") mask = int(mask) if not is_valid_bitmask(mask): error(PROXY, "bitmas... |
item = form[key] if isinstance(item, list): item = item[0] elif hasattr(item, "value"): item = item.value return item.decode(charset) | return get_item_value(form[key]) | def getval (form, key): """return a formfield value""" if not form.has_key(key): return u'' item = form[key] if isinstance(item, list): item = item[0] elif hasattr(item, "value"): item = item.value return item.decode(charset) |
item = form[key] if isinstance(item, list): l = [x.value for x in item] elif hasattr(item, "value"): l = [item.value] else: l = [item] return [ x.decode(charset) for x in l ] | return get_item_list(form[key]) def get_prefix_vals (form, prefix): """return a list of (key, value) pairs where ``prefix+key'' is a valid form field""" res = [] for key, item in form.items(): if key.startswith(prefix): res.append(key[len(prefix):], get_item_value(item)) return res | def getlist (form, key): """return a list of formfield values""" if not form.has_key(key): return [] item = form[key] if isinstance(item, list): l = [x.value for x in item] elif hasattr(item, "value"): l = [item.value] else: l = [item] return [ x.decode(charset) for x in l ] |
if ct != self.mime: | if ct is None: | def check_headers (self): """add missing content-type and/or encoding headers""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else... |
self.js_filter = opts['javascript'] and jslib | self.javascript = opts['javascript'] and jslib | def __init__ (self, opts): self.js_filter = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.js_filter: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0 |
if self.js_filter: | if self.javascript: | def __init__ (self, opts): self.js_filter = opts['javascript'] and jslib self.js_html = None self.js_src = False self.js_script = '' if self.js_filter: self.js_env = jslib.new_jsenv() self.js_output = 0 self.js_popup = 0 |
error(FILTER, "waited too long for %s"%self.state[1]) self.js_client.finish() self.js_html = None | def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # wi... | |
error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None | def flush (self): self._debug("flush") if self.waited > 100: # waited too long; stop js background downloader and # switch back to parse error(FILTER, "waited too long for %s"%self.state[1]) if self.js_env.hasListener(self): self.js_env.detachListener(self) self.js_html = None self.state = ('parse',) self.feed("") # wi... | |
elif self.js_filter: | elif self.javascript: | def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append... |
if not self.rulestack and not self.js_filter: | if not self.rulestack and not self.javascript: | def startElement (self, tag, attrs): """We get a new start tag. New rules could be appended to the pending rules. No rules can be removed from the list.""" # default data self._debug("startElement %s", `tag`) tag = check_spelling(tag, self.url) item = [STARTTAG, tag, attrs] if self.state[0]=='wait': self.waitbuf.append... |
if self.js_filter and tag=='script': | if self.javascript and tag=='script': | def endElement (self, tag): """We know the following: if a rule matches, it must be the one on the top of the stack. So we look only at the top rule. |
client = HttpProxyClient(self.jsScriptData, (url, ver)) ClientServerMatchmaker(client, "GET %s HTTP/1.1" % url, WcMessage(StringIO('')), '', | self.js_client = HttpProxyClient(self.jsScriptData, (url, ver)) ClientServerMatchmaker(self.js_client, "GET %s HTTP/1.1" % url, WcMessage(StringIO('')), '', | def jsScriptSrc (self, url, language): """Start a background download for <script src=""> tags""" assert self.state[0]=='parse', "non-parse state %s" % str(self.state) ver = get_js_ver(language) if self.base_url: url = urlparse.urljoin(self.base_url, url) else: url = urlparse.urljoin(self.url, url) url = norm_url(url) ... |
comments=self.comments, javascript=self.js_filter, level=self.level+1) | comments=self.comments, javascript=self.javascript, level=self.level+1) | def jsScript (self, script, ver, item): """execute given script with javascript version ver""" self._debug("JS: jsScript %s %s", ver, `script`) assert self.state[0]=='parse', "non-parse state %s" % str(self.state) assert len(self.buf) >= 2, "too small buffer %s" % str(self.buf) self.js_output = 0 self.js_env.attachList... |
from urllib import splittype, splithost, splitport | from urllib import splittype, splithost, splitnport | def fileno(self): return self.socket.fileno() |
if not host: hostname = "localhost" port = config['port'] else: hostname, port = splitport(host) if port is None: port = 80 else: port = int(port) return scheme, hostname.lower(), port, document | port = 80 if host: host = host.lower() host, port = splitnport(host, 80) return scheme, host, port, document | def spliturl (url): """split url in a tuple (scheme, hostname, port, document) where hostname is always lowercased""" # XXX this relies on scheme==http! scheme, netloc = splittype(url) host, document = splithost(netloc) if not host: hostname = "localhost" port = config['port'] else: hostname, port = splitport(host) if ... |
info.append("Rule removed") | info.append(i18n._("Rule removed")) | def _form_removerule (rule): curfolder.rules.remove(rule) global currule currule = None info.append("Rule removed") |
info.append("Attribute added") | info.append(i18n._("Rewrite attribute added")) | def _form_rewrite_addattr (form): name = getval(form, "attrname").strip() if not name: error.append(i18n._("Empty attribute name")) return value = getval(form, "attrval") currule.attrs[name] = value info.append("Attribute added") |
info.append("Attributes removed") | info.append(i18n._("Rewrite attributes removed")) | def _form_rewrite_removeattrs (form): toremove = getlist(form, 'delattr') if toremove: for attr in toremove: del currule.attrs[attr] info.append("Attributes removed") |
error.append("Empty rule title") | error.append(i18n._("Empty rule title")) | def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description chan... |
info.append("Rule title changed") | info.append(i18n._("Rule title changed")) | def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description chan... |
info.append("Rule description changed") | info.append(i18n._("Rule description changed")) | def _form_rule_titledesc (form): title = getval(form, 'rule_title') if not title: error.append("Empty rule title") return if title!=currule.title: currule.title = title info.append("Rule title changed") desc = getval(form, 'rule_description') if desc!=currule.desc: currule.desc = desc info.append("Rule description chan... |
info.append("Rule match url changed") | info.append(i18n._("Rule match url changed")) | def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append("Rule match url changed") dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.appen... |
info.append("Rule dontmatch url changed") | info.append(i18n._("Rule dontmatch url changed")) | def _form_rule_matchurl (form): matchurl = getval(form, 'rule_matchurl').strip() if matchurl!=currule.matchurl: currule.matchurl = matchurl info.append("Rule match url changed") dontmatchurl = getval(form, 'rule_dontmatchurl').strip() if dontmatchurl!=currule.dontmatchurl: currule.dontmatchurl = dontmatchurl info.appen... |
info.append("Rule url scheme changed") | info.append(i18n._("Rule url scheme changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url host changed") | info.append(i18n._("Rule url host changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url port changed") | info.append(i18n._("Rule url port changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url path changed") | info.append(i18n._("Rule url path changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url parameters changed") | info.append(i18n._("Rule url parameters changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url query changed") | info.append(i18n._("Rule url query changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule url fragment changed") | info.append(i18n._("Rule url fragment changed")) | def _form_rule_urlparts (form): scheme = getval(form, 'rule_urlscheme').strip() if scheme!=currule.scheme: currule.scheme = scheme info.append("Rule url scheme changed") host = getval(form, 'rule_urlhost').strip() if host!=currule.host: currule.host = host info.append("Rule url host changed") port = getval(form, 'rule_... |
info.append("Rule blocked url changed") | info.append(i18n._("Rule blocked url changed")) | def _form_apply_block (form): _form_rule_urlparts(form) url = getval(form, 'rule_blockedurl').strip() if url!=currule.url: currule.url = url info.append("Rule blocked url changed") |
error.append("Empty header rule name") | error.append(i18n._("Empty header rule name")) | def _form_apply_header (form): _form_rule_matchurl(form) name = getval(form, 'rule_headername').strip() if not name: error.append("Empty header rule name") elif name!=currule.name: currule.name = name info.append("Rule header name changed") value = getval(form, 'rule_headervalue').strip() if value!=currule.value: curru... |
info.append("Rule header name changed") | info.append(i18n._("Rule header name changed")) | def _form_apply_header (form): _form_rule_matchurl(form) name = getval(form, 'rule_headername').strip() if not name: error.append("Empty header rule name") elif name!=currule.name: currule.name = name info.append("Rule header name changed") value = getval(form, 'rule_headervalue').strip() if value!=currule.value: curru... |
info.append("Rule header value changed") | info.append(i18n._("Rule header value changed")) | def _form_apply_header (form): _form_rule_matchurl(form) name = getval(form, 'rule_headername').strip() if not name: error.append("Empty header rule name") elif name!=currule.name: currule.name = name info.append("Rule header name changed") value = getval(form, 'rule_headervalue').strip() if value!=currule.value: curru... |
error.append("Invalid image width value") | error.append(i18n._("Invalid image width value")) | def _form_apply_image (form): _form_rule_matchurl(form) width = getval(form, 'rule_imgwidth').strip() try: width = int(width) except ValueError: error.append("Invalid image width value") return if width!=currule.width: currule.width = width info.append("Rule image width changed") height = getval(form, 'rule_imgheight')... |
info.append("Rule image width changed") | info.append(i18n._("Rule image width changed")) | def _form_apply_image (form): _form_rule_matchurl(form) width = getval(form, 'rule_imgwidth').strip() try: width = int(width) except ValueError: error.append("Invalid image width value") return if width!=currule.width: currule.width = width info.append("Rule image width changed") height = getval(form, 'rule_imgheight')... |
error.append("Invalid image height value") | error.append(i18n._("Invalid image height value")) | def _form_apply_image (form): _form_rule_matchurl(form) width = getval(form, 'rule_imgwidth').strip() try: width = int(width) except ValueError: error.append("Invalid image width value") return if width!=currule.width: currule.width = width info.append("Rule image width changed") height = getval(form, 'rule_imgheight')... |
info.append("Rule image height changed") | info.append(i18n._("Rule image height changed")) | def _form_apply_image (form): _form_rule_matchurl(form) width = getval(form, 'rule_imgwidth').strip() try: width = int(width) except ValueError: error.append("Invalid image width value") return if width!=currule.width: currule.width = width info.append("Rule image width changed") height = getval(form, 'rule_imgheight')... |
print "XXX apply pics" | for service in pics_services: if form.has_key("service_%s"%service): if not currule.ratings.has_key(service): currule.ratings[service] = {} for category in pics_categories[service]: currule.ratings[service][category] = 0 info.append(i18n._("PICS service %s enabled") % \ pics_data[service]['name']) else: if currule.rati... | def _form_apply_pics (form): _form_rule_matchurl(form) print "XXX apply pics" |
info.append("Rule replace search changed") | info.append(i18n._("Rule replace search changed")) | def _form_apply_replace (form): _form_rule_matchurl(form) # note: do not strip() the search and replace form values search = getval(form, 'rule_search') if search!=currule.search: currule.search = search info.append("Rule replace search changed") replace = getval(form, 'rule_replace') if replace!=currule.replace: curru... |
info.append("Rule replacement changed") | info.append(i18n._("Rule replacement changed")) | def _form_apply_replace (form): _form_rule_matchurl(form) # note: do not strip() the search and replace form values search = getval(form, 'rule_search') if search!=currule.search: currule.search = search info.append("Rule replace search changed") replace = getval(form, 'rule_replace') if replace!=currule.replace: curru... |
extra = self.persistent and "persistent " or "" | extra = "" if self.persistent: extra += "persistent " if self.server: extra += "server " | def __repr__ (self): """object representation""" extra = self.persistent and "persistent " or "" if self.request: try: extra += self.request.split()[1] except IndexError: extra += '???'+self.request else: extra += 'being read' return '<%s:%-8s %s>' % ('client', self.state, extra) |
assert self.server, "%s server_content(%s) had no server" % \ | assert self.server, "%s server_content(%r) had no server" % \ | def server_content (self, data): """The server received some content. Write it to the client.""" assert self.server, "%s server_content(%s) had no server" % \ (self, data) if data: self.write(data) |
super(HttpClient, self).handle_close() | def handle_close (self): """The client closed the connection, so cancel the server connection""" wc.log.debug(wc.LOG_PROXY, '%s handle_close', self) self.send_buffer = '' super(HttpClient, self).handle_close() if self.server: self.server.client_abort() self.server = None # If there isn't a server, then it's in the proc... | |
self.headers['Host'] += "%s:%d\r"%(hostname, port) | self.headers['Host'] = "%s:%d\r"%(hostname, port) | def __init__ (self, client, request, headers, content, nofilter, compress, mime=None): self.client = client self.server = None self.request = request self.headers = headers self.compress = compress self.content = content self.nofilter = nofilter self.mime = mime debug(PROXY, "ClientServer: %s", `self.request`) self.met... |
data = pickle.load(fp) | rating_cache = pickle.load(fp) | def rating_cache_load (): """load cached rating data from disk or return an empty cache if no cached data is found""" if os.path.isfile(rating_cachefile): fp = file(rating_cachefile) data = pickle.load(fp) fp.close() # remove invalid entries for url in data: if not is_valid_url(url): error(FILTER, "Invalid rating url %... |
for url in data: | toremove = [] for url in rating_cache: | def rating_cache_load (): """load cached rating data from disk or return an empty cache if no cached data is found""" if os.path.isfile(rating_cachefile): fp = file(rating_cachefile) data = pickle.load(fp) fp.close() # remove invalid entries for url in data: if not is_valid_url(url): error(FILTER, "Invalid rating url %... |
del data[url] return data return {} | toremove.append(url) if toremove: for url in toremove: del rating_cache[url] rating_cache_write() | def rating_cache_load (): """load cached rating data from disk or return an empty cache if no cached data is found""" if os.path.isfile(rating_cachefile): fp = file(rating_cachefile) data = pickle.load(fp) fp.close() # remove invalid entries for url in data: if not is_valid_url(url): error(FILTER, "Invalid rating url %... |
rating_cache = rating_cache_load() | rating_cache = {} rating_cache_load() | def rating_cache_load (): """load cached rating data from disk or return an empty cache if no cached data is found""" if os.path.isfile(rating_cachefile): fp = file(rating_cachefile) data = pickle.load(fp) fp.close() # remove invalid entries for url in data: if not is_valid_url(url): error(FILTER, "Invalid rating url %... |
_attrs = {} | _attrs = wc.containers.ListDict() | def dict_attrs (attrs): _attrs = {} for name in attrs.getQNames(): _attrs[name] = attrs.getValueByQName(name) return _attrs |
self.prefixuri = {} self.uriprefix = {} | self.ns_current = [] self.ns_stack = [] | def __init__ (self, xmlrules, htmlrules, url, localhost): """ Init rules and buffers. """ # filter rules self.xmlrules = xmlrules self.htmlrules = htmlrules self.url = url # XML namespaces {name -> uri} and {uri -> name} self.prefixuri = {} self.uriprefix = {} # already filtered XML data self.outbuf = StringIO() self.t... |
print "XXX setDocumentLocator", locator | print >>sys.stderr, "XXX setDocumentLocator", locator | def setDocumentLocator (self, locator): print "XXX setDocumentLocator", locator |
self.prefixuri[prefix] = uri self.uriprefix[uri] = prefix | ns = (prefix, uri) self.ns_stack.append(ns) self.ns_current.append(ns) | def startPrefixMapping (self, prefix, uri): self.prefixuri[prefix] = uri self.uriprefix[uri] = prefix |
if prefix in self.prefixuri: uri = self.prefixuri[prefix] del self.uriprefix[uri] del self.prefixuri[prefix] else: self.error("Removing unknown prefix mapping %r" % prefix) | if not self.ns_stack or self.ns_stack[-1][0] != prefix: self.error("Removing unknown prefix mapping (%r)" % prefix) del self.ns_stack[-1] def find_namespace (self, uri): for prefix, nsuri in reversed(self.ns_stack): if nsuri == uri: return (prefix, uri) return None | def endPrefixMapping (self, prefix): if prefix in self.prefixuri: uri = self.prefixuri[prefix] del self.uriprefix[uri] del self.prefixuri[prefix] else: self.error("Removing unknown prefix mapping %r" % prefix) |
if not self.stack: for prefix, uri in self.prefixuri.items(): if prefix: attrs[u"xmlns:%s" % prefix] = uri else: attrs[u"xmlns"] = uri | for prefix, uri in self.ns_current: if prefix: attrs["xmlns:%s" % prefix] = uri else: attrs["xmlns"] = uri self.ns_current = [] | def startElement (self, name, attrs): attrs = dict_attrs(attrs) if not self.stack: for prefix, uri in self.prefixuri.items(): if prefix: attrs[u"xmlns:%s" % prefix] = uri else: attrs[u"xmlns"] = uri self.stack.append((wc.filter.xmlfilt.STARTTAG, name, attrs)) item = [wc.filter.xmlfilt.STARTTAG, name, attrs] self.tagbuf... |
if name[0]: ns = self.uriprefix[name[0]] if ns: name = u"%s:%s" % (ns, name[1]) else: name = name[1] else: name = name[1] self.startElement(name, attrs) | tag = name[1] namespace = self.find_namespace(name[0]) if namespace and namespace[0]: tag = u"%s:%s" % (namespace[0], name[1]) self.startElement(tag, attrs) | def startElementNS (self, name, qname, attrs): if name[0]: ns = self.uriprefix[name[0]] if ns: name = u"%s:%s" % (ns, name[1]) else: name = name[1] else: name = name[1] self.startElement(name, attrs) |
if name[0]: ns = self.uriprefix[name[0]] if ns: name = u"%s:%s" % (ns, name[1]) else: name = name[1] else: name = name[1] self.endElement(name) | tag = name[1] namespace = self.find_namespace(name[0]) if namespace and namespace[0]: tag = u"%s:%s" % (namespace[0], name[1]) self.endElement(tag) | def endElementNS (self, name, qname): if name[0]: ns = self.uriprefix[name[0]] if ns: name = u"%s:%s" % (ns, name[1]) else: name = name[1] else: name = name[1] self.endElement(name) |
print "XXX skippedEntity", name | print >>sys.stderr, "XXX skippedEntity", name | def skippedEntity (self, name): print "XXX skippedEntity", name |
print "XXX notationDecl", name, publicId, systemId | print >>sys.stderr, "XXX notationDecl", name, publicId, systemId | def notationDecl (self, name, publicId, systemId): print "XXX notationDecl", name, publicId, systemId |
print "XXX unparsedEntityDecl", name, publicId, systemId, ndata | print >>sys.stderr, "XXX unparsedEntityDecl", name, publicId, systemId, ndata | def unparsedEntityDecl (self, name, publicId, systemId, ndata): print "XXX unparsedEntityDecl", name, publicId, systemId, ndata |
if not path: | if not path or path=='/': | def norm_url (url): """replace empty paths with / and normalize them""" urlparts = list(urlparse.urlparse(url)) path = urlparts[2] if not path: urlparts[2] = '/' else: # XXX only windows and posix?? # collapse redundant path segments urlparts[2] = os.path.normpath(path).replace('\\', '/') if path.endswith('/'): urlpart... |
if not self.mimelist: return True | def applies_to_mime (self, mime): if mime is None: return False if not self.mimelist: return True for ro in self.mimelist: if ro.match(mime): return True return False | |
import profile, wc | import profile | def _main (): """USAGE: test/run.sh test/pconfig.py""" from test import initlog initlog("test/logging.conf") import profile, wc profile.run("config = wc.Configuration()", "filter.prof") |
os.makedirs(os.path.dirname("downloads/"+target)) | d = os.path.dirname("downloads/"+target) if not os.path.isdir(d): os.makedirs(d) | def geturl (basedir, file, fun, saveas=None): if saveas is not None: target = saveas else: target = file if os.path.exists("downloads/"+target): print "downloads/%s already exists"%target else: print "downloading", basedir+file os.makedirs(os.path.dirname("downloads/"+target)) urldata = urllib2.urlopen(basedir+file) f ... |
print "remove old data..." | print "remove old extracted data..." | def remove_old_data (): print "remove old data..." for d in ("extracted", "config/blacklists_new"): if os.path.isdir(d): rm_rf(d) |
print "read data..." read_blacklists("config/blacklists") | def remove_old_data (): print "remove old data..." for d in ("extracted", "config/blacklists_new"): if os.path.isdir(d): rm_rf(d) | |
auth = ",".join(creds['NTLM'][0]) | attrs = { 'host': creds['NTLM'][0]['host'], 'domain': creds['NTLM'][0]['domain'], 'type': NTLMSSP_CHALLENGE, } auth = ",".join(get_challenges(**attrs)) | def process_headers (self): """read and filter client request headers""" # Two newlines ends headers i = self.recv_buffer.find('\r\n\r\n') if i < 0: return i += 4 # Skip over newline terminator # the first 2 chars are the newline of request fp = StringIO(self.read(i)[2:]) msg = WcMessage(fp) # put unparsed data (if any... |
raise RatingParseError("Invalid rating url %s." % repr(url)) | raise wc.filter.rating.RatingParseError( "Invalid rating url %s." % repr(url)) | def check_url (self, url): if not wc.url.is_safe_url(url): raise RatingParseError("Invalid rating url %s." % repr(url)) |
self.ratings = [] | self.ratings = {} for category in wc.filter.rating.categories: self.ratings[category.name] = None | def __init__ (self, sid=None, titles=None, descriptions=None, disable=0, matchurls=None, nomatchurls=None): super(RatingRule, self).__init__(sid=sid, titles=titles, descriptions=descriptions, disable=disable, matchurls=matchurls, nomatchurls=nomatchurls) # list of RuleRating objects self.ratings = [] self.url = "" |
self.ratings.append((self._category, self._data)) | self.ratings[self._category] = self._data | def end_data (self, name): super(RatingRule, self).end_data(name) if name == 'category': assert self._category self.ratings.append((self._category, self._data)) pass elif name == 'url': self.url = self._data |
if mime is not None and not attrs['mime'].startswith(mime): | origmime = attrs['mime'] if mime is not None and origmime is not None and \ not origmime.startswith(mime): | def recognize (self, buf, attrs): # note: recognizing a mime type fixes exploits like # CVE-2002-0025 and CVE-2002-0024 wc.log.debug(wc.LOG_FILTER, "MIME recognize %d bytes of data", buf.tell()) try: mime = wc.magic.classify(buf) wc.log.debug(wc.LOG_FILTER, "MIME recognized %r", mime) if mime is not None and not attrs[... |
attrs['mime'], mime, attrs['url']) | origmime, mime, attrs['url']) | def recognize (self, buf, attrs): # note: recognizing a mime type fixes exploits like # CVE-2002-0025 and CVE-2002-0024 wc.log.debug(wc.LOG_FILTER, "MIME recognize %d bytes of data", buf.tell()) try: mime = wc.magic.classify(buf) wc.log.debug(wc.LOG_FILTER, "MIME recognized %r", mime) if mime is not None and not attrs[... |
self['noproxyfor'] = [{}, [], {}] self['allowedhosts'] = [{}, [], {}] | self['noproxyfor'] = None self['allowedhosts'] = None | def reset (self): """Reset to default values""" self['port'] = 8080 self['proxyuser'] = "" self['proxypass'] = "" self['parentproxy'] = "" self['parentproxyport'] = 3128 self['parentproxyuser'] = "" self['parentproxypass'] = "" self['logfile'] = "" self['strict_whitelist'] = 0 self['debuglevel'] = 0 self['rules'] = [] ... |
if self.config['noproxyfor']: | if self.config['noproxyfor'] is not None: | def start_element (self, name, attrs): if name=='webcleaner': for key,val in attrs.items(): self.config[str(key)] = unxmlify(val) for key in ('port','parentproxyport', 'debuglevel','colorize','showerrors', 'strict_whitelist'): self.config[key] = int(self.config[key]) for key in ('version', 'parentproxy', 'logfile', 'pr... |
if self.config['allowedhosts']: | else: self.config['noproxyfor'] = [{}, [], {}] if self.config['allowedhosts'] is not None: | def start_element (self, name, attrs): if name=='webcleaner': for key,val in attrs.items(): self.config[str(key)] = unxmlify(val) for key in ('port','parentproxyport', 'debuglevel','colorize','showerrors', 'strict_whitelist'): self.config[key] = int(self.config[key]) for key in ('version', 'parentproxy', 'logfile', 'pr... |
if not attrs.has_key('buffer') or attrs['buffer'].closed: | if not attrs.has_key('buffer'): return data if attrs['blocked']: return '' if attrs['buffer'].closed: | def filter (self, data, **attrs): if not attrs.has_key('buffer') or attrs['buffer'].closed: # we do not block this image # or we do not have enough buffer data yet return data buf = attrs['buffer'] buf.write(data) if buf.tell() > self.min_bufsize: if self.check_sizes(buf, attrs['sizes']): # size is ok data = buf.getval... |
if self.check_sizes(buf, attrs['sizes']): data = buf.getvalue() buf.close() return data | attrs['blocked'] = not self.check_sizes(buf, attrs['sizes'], attrs['url']) data = buf.getvalue() buf.close() if attrs['blocked']: return self.blockdata return data | def filter (self, data, **attrs): if not attrs.has_key('buffer') or attrs['buffer'].closed: # we do not block this image # or we do not have enough buffer data yet return data buf = attrs['buffer'] buf.write(data) if buf.tell() > self.min_bufsize: if self.check_sizes(buf, attrs['sizes']): # size is ok data = buf.getval... |
if not attrs.has_key('buffer') or attrs['buffer'].closed: | if not attrs.has_key('buffer'): return data if attrs['blocked']: return '' if attrs['buffer'].closed: | def finish (self, data, **attrs): if not attrs.has_key('buffer') or attrs['buffer'].closed: # we do not block this image return data buf = attrs['buffer'] buf.write(data) if self.check_sizes(buf, attrs['sizes']): # size is ok data = buf.getvalue() buf.close() return data return '' |
if self.check_sizes(buf, attrs['sizes']): data = buf.getvalue() buf.close() return data return '' | attrs['blocked'] = not self.check_sizes(buf, attrs['sizes'], attrs['url']) data = buf.getvalue() buf.close() if attrs['blocked']: return self.blockdata return data | def finish (self, data, **attrs): if not attrs.has_key('buffer') or attrs['buffer'].closed: # we do not block this image return data buf = attrs['buffer'] buf.write(data) if self.check_sizes(buf, attrs['sizes']): # size is ok data = buf.getvalue() buf.close() return data return '' |
def check_sizes (self, buf, sizes): | def check_sizes (self, buf, sizes, url): | def check_sizes (self, buf, sizes): try: buf.seek(0) img = Image.open(buf, 'r') for size, formats in sizes: if size==img.size: # size matches, look for format restriction if not formats: return False elif img.format.lower() in formats: return False except IOError: exception(FILTER, "Could not get image size") return Tr... |
exception(FILTER, "Could not get image size") | exception(FILTER, "Could not get image size from %s", url) | def check_sizes (self, buf, sizes): try: buf.seek(0) img = Image.open(buf, 'r') for size, formats in sizes: if size==img.size: # size matches, look for format restriction if not formats: return False elif img.format.lower() in formats: return False except IOError: exception(FILTER, "Could not get image size") return Tr... |
replace = re.sub(r"\$(\d)", r"\\1", replace) | replace = re.sub(r"\${?(\d)}?", r"\\\1", replace) | def convert_adzapper_replace (replace): # replace Perl back references with Python ones replace = re.sub(r"\$(\d)", r"\\1", replace) return replace |
wc.log.info(wc.LOG_PROXY, '%s connect timed out', self) | wc.log.debug(wc.LOG_PROXY, '%s connect timed out', self) | def check_connect (self, addr): """ Check if the connection is etablished. See also http://cr.yp.to/docs/connect.html and connect(2) manpage. """ wc.log.debug(wc.LOG_PROXY, '%s check connect', self) self.connect_checks += 1 if self.connect_checks >= 50: wc.log.info(wc.LOG_PROXY, '%s connect timed out', self) self.handl... |
os.system('invoke-rc.d webcleaner start') | from wc import daemon daemon.start(parent_exit=0) | def onCmdProxyStart (self, sender, sel, ptr): os.system('invoke-rc.d webcleaner start') debug(GUI, "webcleaner start") return 1 |
os.system('invoke-rc.d webcleaner stop') | from wc import daemon daemon.stop() | def onCmdProxyStop (self, sender, sel, ptr): os.system('invoke-rc.d webcleaner stop') debug(GUI, "webcleaner stop") return 1 |
os.system('invoke-rc.d webcleaner restart') | from wc import daemon daemon.restart(parent_exit=0) | def onCmdProxyRestart (self, sender, sel, ptr): os.system('invoke-rc.d webcleaner restart') debug(GUI, "webcleaner restart") return 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.