_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q263500
Client.disconnect
validation
def disconnect(self): """Disconnect from the server""" logger.info(u'Disconnecting') self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() self.state = DISCONNECTED
python
{ "resource": "" }
q263501
Client.send
validation
def send(self, command, timeout=5): """Send a command to the server :param string command: command to send """ logger.info(u'Sending %s' % command) _, writable, __ = select.select([], [self.sock], [], timeout) if not writable: raise SendTimeoutError() writable[0].sendall(command + '\n')
python
{ "resource": "" }
q263502
Client._readline
validation
def _readline(self): """Read a line from the server. Data is read from the socket until a character ``\n`` is found :return: the read line :rtype: string """ line = '' while 1: readable, _, __ = select.select([self.sock], [], [], 0.5) if self._stop: break if not readable: continue data = readable[0].recv(1) if data == '\n': break line += unicode(data, self.encoding) return line
python
{ "resource": "" }
q263503
Client._readblock
validation
def _readblock(self): """Read a block from the server. Lines are read until a character ``.`` is found :return: the read block :rtype: string """ block = '' while not self._stop: line = self._readline() if line == '.': break block += line return block
python
{ "resource": "" }
q263504
Client._readxml
validation
def _readxml(self): """Read a block and return the result as XML :return: block as xml :rtype: xml.etree.ElementTree """ block = re.sub(r'<(/?)s>', r'&lt;\1s&gt;', self._readblock()) try: xml = XML(block) except ParseError: xml = None return xml
python
{ "resource": "" }
q263505
cli
validation
def cli(id): """Analyse an OpenStreetMap changeset.""" ch = Analyse(id) ch.full_analysis() click.echo( 'Created: %s. Modified: %s. Deleted: %s' % (ch.create, ch.modify, ch.delete) ) if ch.is_suspect: click.echo('The changeset {} is suspect! Reasons: {}'.format( id, ', '.join(ch.suspicion_reasons) )) else: click.echo('The changeset %s is not suspect!' % id)
python
{ "resource": "" }
q263506
get_user_details
validation
def get_user_details(user_id): """Get information about number of changesets, blocks and mapping days of a user, using both the OSM API and the Mapbox comments APIself. """ reasons = [] try: url = OSM_USERS_API.format(user_id=requests.compat.quote(user_id)) user_request = requests.get(url) if user_request.status_code == 200: user_data = user_request.content xml_data = ET.fromstring(user_data).getchildren()[0].getchildren() changesets = [i for i in xml_data if i.tag == 'changesets'][0] blocks = [i for i in xml_data if i.tag == 'blocks'][0] if int(changesets.get('count')) <= 5: reasons.append('New mapper') elif int(changesets.get('count')) <= 30: url = MAPBOX_USERS_API.format( user_id=requests.compat.quote(user_id) ) user_request = requests.get(url) if user_request.status_code == 200: mapping_days = int( user_request.json().get('extra').get('mapping_days') ) if mapping_days <= 5: reasons.append('New mapper') if int(blocks.getchildren()[0].get('count')) > 1: reasons.append('User has multiple blocks') except Exception as e: message = 'Could not verify user of the changeset: {}, {}' print(message.format(user_id, str(e))) return reasons
python
{ "resource": "" }
q263507
changeset_info
validation
def changeset_info(changeset): """Return a dictionary with id, user, user_id, bounds, date of creation and all the tags of the changeset. Args: changeset: the XML string of the changeset. """ keys = [tag.attrib.get('k') for tag in changeset.getchildren()] keys += ['id', 'user', 'uid', 'bbox', 'created_at'] values = [tag.attrib.get('v') for tag in changeset.getchildren()] values += [ changeset.get('id'), changeset.get('user'), changeset.get('uid'), get_bounds(changeset), changeset.get('created_at') ] return dict(zip(keys, values))
python
{ "resource": "" }
q263508
get_changeset
validation
def get_changeset(changeset): """Get the changeset using the OSM API and return the content as a XML ElementTree. Args: changeset: the id of the changeset. """ url = 'https://www.openstreetmap.org/api/0.6/changeset/{}/download'.format( changeset ) return ET.fromstring(requests.get(url).content)
python
{ "resource": "" }
q263509
get_metadata
validation
def get_metadata(changeset): """Get the metadata of a changeset using the OSM API and return it as a XML ElementTree. Args: changeset: the id of the changeset. """ url = 'https://www.openstreetmap.org/api/0.6/changeset/{}'.format(changeset) return ET.fromstring(requests.get(url).content).getchildren()[0]
python
{ "resource": "" }
q263510
ChangesetList.get_area
validation
def get_area(self, geojson): """Read the first feature from the geojson and return it as a Polygon object. """ geojson = json.load(open(geojson, 'r')) self.area = Polygon(geojson['features'][0]['geometry']['coordinates'][0])
python
{ "resource": "" }
q263511
ChangesetList.filter
validation
def filter(self): """Filter the changesets that intersects with the geojson geometry.""" self.content = [ ch for ch in self.xml.getchildren() if get_bounds(ch).intersects(self.area) ]
python
{ "resource": "" }
q263512
Analyse.set_fields
validation
def set_fields(self, changeset): """Set the fields of this class with the metadata of the analysed changeset. """ self.id = int(changeset.get('id')) self.user = changeset.get('user') self.uid = changeset.get('uid') self.editor = changeset.get('created_by', None) self.review_requested = changeset.get('review_requested', False) self.host = changeset.get('host', 'Not reported') self.bbox = changeset.get('bbox').wkt self.comment = changeset.get('comment', 'Not reported') self.source = changeset.get('source', 'Not reported') self.imagery_used = changeset.get('imagery_used', 'Not reported') self.date = datetime.strptime( changeset.get('created_at'), '%Y-%m-%dT%H:%M:%SZ' ) self.suspicion_reasons = [] self.is_suspect = False self.powerfull_editor = False
python
{ "resource": "" }
q263513
Analyse.label_suspicious
validation
def label_suspicious(self, reason): """Add suspicion reason and set the suspicious flag.""" self.suspicion_reasons.append(reason) self.is_suspect = True
python
{ "resource": "" }
q263514
Analyse.full_analysis
validation
def full_analysis(self): """Execute the count and verify_words methods.""" self.count() self.verify_words() self.verify_user() if self.review_requested == 'yes': self.label_suspicious('Review requested')
python
{ "resource": "" }
q263515
Analyse.verify_words
validation
def verify_words(self): """Verify the fields source, imagery_used and comment of the changeset for some suspect words. """ if self.comment: if find_words(self.comment, self.suspect_words, self.excluded_words): self.label_suspicious('suspect_word') if self.source: for word in self.illegal_sources: if word in self.source.lower(): self.label_suspicious('suspect_word') break if self.imagery_used: for word in self.illegal_sources: if word in self.imagery_used.lower(): self.label_suspicious('suspect_word') break self.suspicion_reasons = list(set(self.suspicion_reasons))
python
{ "resource": "" }
q263516
Analyse.verify_editor
validation
def verify_editor(self): """Verify if the software used in the changeset is a powerfull_editor. """ powerful_editors = [ 'josm', 'level0', 'merkaartor', 'qgis', 'arcgis', 'upload.py', 'osmapi', 'Services_OpenStreetMap' ] if self.editor is not None: for editor in powerful_editors: if editor in self.editor.lower(): self.powerfull_editor = True break if 'iD' in self.editor: trusted_hosts = [ 'www.openstreetmap.org/id', 'www.openstreetmap.org/edit', 'improveosm.org', 'strava.github.io/iD', 'preview.ideditor.com/release', 'preview.ideditor.com/master', 'hey.mapbox.com/iD-internal', 'projets.pavie.info/id-indoor', 'maps.mapcat.com/edit', 'id.softek.ir' ] if self.host.split('://')[-1].strip('/') not in trusted_hosts: self.label_suspicious('Unknown iD instance') else: self.powerfull_editor = True self.label_suspicious('Software editor was not declared')
python
{ "resource": "" }
q263517
Analyse.count
validation
def count(self): """Count the number of elements created, modified and deleted by the changeset and analyses if it is a possible import, mass modification or a mass deletion. """ xml = get_changeset(self.id) actions = [action.tag for action in xml.getchildren()] self.create = actions.count('create') self.modify = actions.count('modify') self.delete = actions.count('delete') self.verify_editor() try: if (self.create / len(actions) > self.percentage and self.create > self.create_threshold and (self.powerfull_editor or self.create > self.top_threshold)): self.label_suspicious('possible import') elif (self.modify / len(actions) > self.percentage and self.modify > self.modify_threshold): self.label_suspicious('mass modification') elif ((self.delete / len(actions) > self.percentage and self.delete > self.delete_threshold) or self.delete > self.top_threshold): self.label_suspicious('mass deletion') except ZeroDivisionError: print('It seems this changeset was redacted')
python
{ "resource": "" }
q263518
_unwrap_stream
validation
def _unwrap_stream(uri, timeout, scanner, requests_session): """ Get a stream URI from a playlist URI, ``uri``. Unwraps nested playlists until something that's not a playlist is found or the ``timeout`` is reached. """ original_uri = uri seen_uris = set() deadline = time.time() + timeout while time.time() < deadline: if uri in seen_uris: logger.info( 'Unwrapping stream from URI (%s) failed: ' 'playlist referenced itself', uri) return None else: seen_uris.add(uri) logger.debug('Unwrapping stream from URI: %s', uri) try: scan_timeout = deadline - time.time() if scan_timeout < 0: logger.info( 'Unwrapping stream from URI (%s) failed: ' 'timed out in %sms', uri, timeout) return None scan_result = scanner.scan(uri, timeout=scan_timeout) except exceptions.ScannerError as exc: logger.debug('GStreamer failed scanning URI (%s): %s', uri, exc) scan_result = None if scan_result is not None and not ( scan_result.mime.startswith('text/') or scan_result.mime.startswith('application/')): logger.debug( 'Unwrapped potential %s stream: %s', scan_result.mime, uri) return uri download_timeout = deadline - time.time() if download_timeout < 0: logger.info( 'Unwrapping stream from URI (%s) failed: timed out in %sms', uri, timeout) return None content = http.download( requests_session, uri, timeout=download_timeout) if content is None: logger.info( 'Unwrapping stream from URI (%s) failed: ' 'error downloading URI %s', original_uri, uri) return None uris = playlists.parse(content) if not uris: logger.debug( 'Failed parsing URI (%s) as playlist; found potential stream.', uri) return uri # TODO Test streams and return first that seems to be playable logger.debug( 'Parsed playlist (%s) and found new URI: %s', uri, uris[0]) uri = uris[0]
python
{ "resource": "" }
q263519
Worker.serve
validation
def serve(self, sock, request_handler, error_handler, debug=False, request_timeout=60, ssl=None, request_max_size=None, reuse_port=False, loop=None, protocol=HttpProtocol, backlog=100, **kwargs): """Start asynchronous HTTP Server on an individual process. :param request_handler: Sanic request handler with middleware :param error_handler: Sanic error handler with middleware :param debug: enables debug output (slows server) :param request_timeout: time in seconds :param ssl: SSLContext :param sock: Socket for the server to accept connections from :param request_max_size: size in bytes, `None` for no limit :param reuse_port: `True` for multiple workers :param loop: asyncio compatible event loop :param protocol: subclass of asyncio protocol class :return: Nothing """ if debug: loop.set_debug(debug) server = partial( protocol, loop=loop, connections=self.connections, signal=self.signal, request_handler=request_handler, error_handler=error_handler, request_timeout=request_timeout, request_max_size=request_max_size, ) server_coroutine = loop.create_server( server, host=None, port=None, ssl=ssl, reuse_port=reuse_port, sock=sock, backlog=backlog ) # Instead of pulling time at the end of every request, # pull it once per minute loop.call_soon(partial(update_current_time, loop)) return server_coroutine
python
{ "resource": "" }
q263520
Pantheon.spawn
validation
def spawn(self, generations): """Grow this Pantheon by multiplying Gods.""" egg_donors = [god for god in self.gods.values() if god.chromosomes == 'XX'] sperm_donors = [god for god in self.gods.values() if god.chromosomes == 'XY'] for i in range(generations): print("\nGENERATION %d\n" % (i+1)) gen_xx = [] gen_xy = [] for egg_donor in egg_donors: sperm_donor = random.choice(sperm_donors) brood = self.breed(egg_donor, sperm_donor) for child in brood: if child.divinity > human: # divine offspring join the Pantheon self.add_god(child) if child.chromosomes == 'XX': gen_xx.append(child) else: gen_xy.append(child) # elder gods leave the breeding pool egg_donors = [ed for ed in egg_donors if ed.generation > (i-2)] sperm_donors = [sd for sd in sperm_donors if sd.generation > (i-3)] # mature offspring join the breeding pool egg_donors += gen_xx sperm_donors += gen_xy
python
{ "resource": "" }
q263521
Pantheon.breed
validation
def breed(self, egg_donor, sperm_donor): """Get it on.""" offspring = [] try: num_children = npchoice([1,2], 1, p=[0.8, 0.2])[0] # 20% chance of twins for _ in range(num_children): child = God(egg_donor, sperm_donor) offspring.append(child) send_birth_announcement(egg_donor, sperm_donor, child) except ValueError: print("Breeding error occurred. Likely the generator ran out of names.") return offspring
python
{ "resource": "" }
q263522
cosine
validation
def cosine(vec1, vec2): """Compare vectors. Borrowed from A. Parish.""" if norm(vec1) > 0 and norm(vec2) > 0: return dot(vec1, vec2) / (norm(vec1) * norm(vec2)) else: return 0.0
python
{ "resource": "" }
q263523
God.set_gender
validation
def set_gender(self, gender=None): """This model recognizes that sex chromosomes don't always line up with gender. Assign M, F, or NB according to the probabilities in p_gender. """ if gender and gender in genders: self.gender = gender else: if not self.chromosomes: self.set_chromosomes() self.gender = npchoice(genders, 1, p=p_gender[self.chromosomes])[0]
python
{ "resource": "" }
q263524
God.set_inherited_traits
validation
def set_inherited_traits(self, egg_donor, sperm_donor): """Accept either strings or Gods as inputs.""" if type(egg_donor) == str: self.reproduce_asexually(egg_donor, sperm_donor) else: self.reproduce_sexually(egg_donor, sperm_donor)
python
{ "resource": "" }
q263525
God.reproduce_asexually
validation
def reproduce_asexually(self, egg_word, sperm_word): """Produce two gametes, an egg and a sperm, from the input strings. Combine them to produce a genome a la sexual reproduction. """ egg = self.generate_gamete(egg_word) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.generation = 1 self.divinity = god
python
{ "resource": "" }
q263526
God.reproduce_sexually
validation
def reproduce_sexually(self, egg_donor, sperm_donor): """Produce two gametes, an egg and a sperm, from input Gods. Combine them to produce a genome a la sexual reproduction. Assign divinity according to probabilities in p_divinity. The more divine the parents, the more divine their offspring. """ egg_word = random.choice(egg_donor.genome) egg = self.generate_gamete(egg_word) sperm_word = random.choice(sperm_donor.genome) sperm = self.generate_gamete(sperm_word) self.genome = list(set(egg + sperm)) # Eliminate duplicates self.parents = [egg_donor.name, sperm_donor.name] self.generation = max(egg_donor.generation, sperm_donor.generation) + 1 sum_ = egg_donor.divinity + sperm_donor.divinity self.divinity = int(npchoice(divinities, 1, p=p_divinity[sum_])[0])
python
{ "resource": "" }
q263527
God.generate_gamete
validation
def generate_gamete(self, egg_or_sperm_word): """Extract 23 'chromosomes' aka words from 'gene pool' aka list of tokens by searching the list of tokens for words that are related to the given egg_or_sperm_word. """ p_rate_of_mutation = [0.9, 0.1] should_use_mutant_pool = (npchoice([0,1], 1, p=p_rate_of_mutation)[0] == 1) if should_use_mutant_pool: pool = tokens.secondary_tokens else: pool = tokens.primary_tokens return get_matches(egg_or_sperm_word, pool, 23)
python
{ "resource": "" }
q263528
God.print_parents
validation
def print_parents(self): """Print parents' names and epithets.""" if self.gender == female: title = 'Daughter' elif self.gender == male: title = 'Son' else: title = 'Child' p1 = self.parents[0] p2 = self.parents[1] template = '%s of %s, the %s, and %s, the %s.' print(template % (title, p1.name, p1.epithet, p2.name, p2.epithet))
python
{ "resource": "" }
q263529
Stage.instance
validation
def instance(self, counter=None, pipeline_counter=None): """Returns all the information regarding a specific stage run See the `Go stage instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-stage-instance Args: counter (int): The stage instance to fetch. If falsey returns the latest stage instance from :meth:`history`. pipeline_counter (int): The pipeline instance for which to fetch the stage. If falsey returns the latest pipeline instance. Returns: Response: :class:`gocd.api.response.Response` object """ pipeline_counter = pipeline_counter or self.pipeline_counter pipeline_instance = None if not pipeline_counter: pipeline_instance = self.server.pipeline(self.pipeline_name).instance() self.pipeline_counter = int(pipeline_instance['counter']) if not counter: if pipeline_instance is None: pipeline_instance = ( self.server .pipeline(self.pipeline_name) .instance(pipeline_counter) ) for stages in pipeline_instance['stages']: if stages['name'] == self.stage_name: return self.instance( counter=int(stages['counter']), pipeline_counter=pipeline_counter ) return self._get('/instance/{pipeline_counter:d}/{counter:d}' .format(pipeline_counter=pipeline_counter, counter=counter))
python
{ "resource": "" }
q263530
Server.request
validation
def request(self, path, data=None, headers=None, method=None): """Performs a HTTP request to the Go server Args: path (str): The full path on the Go server to request. This includes any query string attributes. data (str, dict, bool, optional): If any data is present this request will become a POST request. headers (dict, optional): Headers to set for this particular request Raises: HTTPError: when the HTTP request fails. Returns: file like object: The response from a :func:`urllib2.urlopen` call """ if isinstance(data, str): data = data.encode('utf-8') response = urlopen(self._request(path, data=data, headers=headers, method=method)) self._set_session_cookie(response) return response
python
{ "resource": "" }
q263531
Server.add_logged_in_session
validation
def add_logged_in_session(self, response=None): """Make the request appear to be coming from a browser This is to interact with older parts of Go that doesn't have a proper API call to be made. What will be done: 1. If no response passed in a call to `go/api/pipelines.xml` is made to get a valid session 2. `JSESSIONID` will be populated from this request 3. A request to `go/pipelines` will be so the `authenticity_token` (CSRF) can be extracted. It will then silently be injected into `post_args` on any POST calls that doesn't start with `go/api` from this point. Args: response: a :class:`Response` object from a previously successful API call. So we won't have to query `go/api/pipelines.xml` unnecessarily. Raises: HTTPError: when the HTTP request fails. AuthenticationFailed: when failing to get the `session_id` or the `authenticity_token`. """ if not response: response = self.get('go/api/pipelines.xml') self._set_session_cookie(response) if not self._session_id: raise AuthenticationFailed('No session id extracted from request.') response = self.get('go/pipelines') match = re.search( r'name="authenticity_token".+?value="([^"]+)', response.read().decode('utf-8') ) if match: self._authenticity_token = match.group(1) else: raise AuthenticationFailed('Authenticity token not found on page')
python
{ "resource": "" }
q263532
flatten
validation
def flatten(d): """Return a dict as a list of lists. >>> flatten({"a": "b"}) [['a', 'b']] >>> flatten({"a": [1, 2, 3]}) [['a', [1, 2, 3]]] >>> flatten({"a": {"b": "c"}}) [['a', 'b', 'c']] >>> flatten({"a": {"b": {"c": "e"}}}) [['a', 'b', 'c', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}}) [['a', 'b', 'c'], ['a', 'd', 'e']] >>> flatten({"a": {"b": "c", "d": "e"}, "b": {"c": "d"}}) [['a', 'b', 'c'], ['a', 'd', 'e'], ['b', 'c', 'd']] """ if not isinstance(d, dict): return [[d]] returned = [] for key, value in d.items(): # Each key, value is treated as a row. nested = flatten(value) for nest in nested: current_row = [key] current_row.extend(nest) returned.append(current_row) return returned
python
{ "resource": "" }
q263533
Pipeline.instance
validation
def instance(self, counter=None): """Returns all the information regarding a specific pipeline run See the `Go pipeline instance documentation`__ for examples. .. __: http://api.go.cd/current/#get-pipeline-instance Args: counter (int): The pipeline instance to fetch. If falsey returns the latest pipeline instance from :meth:`history`. Returns: Response: :class:`gocd.api.response.Response` object """ if not counter: history = self.history() if not history: return history else: return Response._from_json(history['pipelines'][0]) return self._get('/instance/{counter:d}'.format(counter=counter))
python
{ "resource": "" }
q263534
Pipeline.schedule
validation
def schedule(self, variables=None, secure_variables=None, materials=None, return_new_instance=False, backoff_time=1.0): """Schedule a pipeline run Aliased as :meth:`run`, :meth:`schedule`, and :meth:`trigger`. Args: variables (dict, optional): Variables to set/override secure_variables (dict, optional): Secure variables to set/override materials (dict, optional): Material revisions to be used for this pipeline run. The exact format for this is a bit iffy, have a look at the official `Go pipeline scheduling documentation`__ or inspect a call from triggering manually in the UI. return_new_instance (bool): Returns a :meth:`history` compatible response for the newly scheduled instance. This is primarily so users easily can get the new instance number. **Note:** This is done in a very naive way, it just checks that the instance number is higher than before the pipeline was triggered. backoff_time (float): How long between each check for :arg:`return_new_instance`. .. __: http://api.go.cd/current/#scheduling-pipelines Returns: Response: :class:`gocd.api.response.Response` object """ scheduling_args = dict( variables=variables, secure_variables=secure_variables, material_fingerprint=materials, headers={"Confirm": True}, ) scheduling_args = dict((k, v) for k, v in scheduling_args.items() if v is not None) # TODO: Replace this with whatever is the official way as soon as gocd#990 is fixed. # https://github.com/gocd/gocd/issues/990 if return_new_instance: pipelines = self.history()['pipelines'] if len(pipelines) == 0: last_run = None else: last_run = pipelines[0]['counter'] response = self._post('/schedule', ok_status=202, **scheduling_args) if not response: return response max_tries = 10 while max_tries > 0: current = self.instance() if not last_run and current: return current elif last_run and current['counter'] > last_run: return current else: time.sleep(backoff_time) max_tries -= 1 # I can't come up with a scenario in testing where this would happen, but it seems # better than returning None. return response else: return self._post('/schedule', ok_status=202, **scheduling_args)
python
{ "resource": "" }
q263535
Pipeline.console_output
validation
def console_output(self, instance=None): """Yields the output and metadata from all jobs in the pipeline Args: instance: The result of a :meth:`instance` call, if not supplied the latest of the pipeline will be used. Yields: tuple: (metadata (dict), output (str)). metadata contains: - pipeline - pipeline_counter - stage - stage_counter - job - job_result """ if instance is None: instance = self.instance() for stage in instance['stages']: for job in stage['jobs']: if job['result'] not in self.final_results: continue artifact = self.artifact( instance['counter'], stage['name'], job['name'], stage['counter'] ) output = artifact.get('cruise-output/console.log') yield ( { 'pipeline': self.name, 'pipeline_counter': instance['counter'], 'stage': stage['name'], 'stage_counter': stage['counter'], 'job': job['name'], 'job_result': job['result'], }, output.body )
python
{ "resource": "" }
q263536
TemplateConfig.edit
validation
def edit(self, config, etag): """Update template config for specified template name. .. __: https://api.go.cd/current/#edit-template-config Returns: Response: :class:`gocd.api.response.Response` object """ data = self._json_encode(config) headers = self._default_headers() if etag is not None: headers["If-Match"] = etag return self._request(self.name, ok_status=None, data=data, headers=headers, method="PUT")
python
{ "resource": "" }
q263537
TemplateConfig.create
validation
def create(self, config): """Create template config for specified template name. .. __: https://api.go.cd/current/#create-template-config Returns: Response: :class:`gocd.api.response.Response` object """ assert config["name"] == self.name, "Given config is not for this template" data = self._json_encode(config) headers = self._default_headers() return self._request("", ok_status=None, data=data, headers=headers)
python
{ "resource": "" }
q263538
TemplateConfig.delete
validation
def delete(self): """Delete template config for specified template name. .. __: https://api.go.cd/current/#delete-a-template Returns: Response: :class:`gocd.api.response.Response` object """ headers = self._default_headers() return self._request(self.name, ok_status=None, data=None, headers=headers, method="DELETE")
python
{ "resource": "" }
q263539
PipelineGroups.pipelines
validation
def pipelines(self): """Returns a set of all pipelines from the last response Returns: set: Response success: all the pipelines available in the response Response failure: an empty set """ if not self.response: return set() elif self._pipelines is None and self.response: self._pipelines = set() for group in self.response.payload: for pipeline in group['pipelines']: self._pipelines.add(pipeline['name']) return self._pipelines
python
{ "resource": "" }
q263540
Artifact.get_directory
validation
def get_directory(self, path_to_directory, timeout=30, backoff=0.4, max_wait=4): """Gets an artifact directory by its path. See the `Go artifact directory documentation`__ for example responses. .. __: http://api.go.cd/current/#get-artifact-directory .. note:: Getting a directory relies on Go creating a zip file of the directory in question. Because of this Go will zip the file in the background and return a 202 Accepted response. It's then up to the client to check again later and get the final file. To work with normal assumptions this :meth:`get_directory` will retry itself up to ``timeout`` seconds to get a 200 response to return. At that point it will then return the response as is, no matter whether it's still 202 or 200. The retry is done with an exponential backoff with a max value between retries. See the ``backoff`` and ``max_wait`` variables. If you want to handle the retry logic yourself then use :meth:`get` and add '.zip' as a suffix on the directory. Args: path_to_directory (str): The path to the directory to get. It can be nested eg ``target/dist.zip`` timeout (int): How many seconds we will wait in total for a successful response from Go when we're receiving 202 backoff (float): The initial value used for backoff, raises exponentially until it reaches ``max_wait`` max_wait (int): The max time between retries Returns: Response: :class:`gocd.api.response.Response` object A successful response is a zip-file. """ response = None started_at = None time_elapsed = 0 i = 0 while time_elapsed < timeout: response = self._get('{0}.zip'.format(path_to_directory)) if response: break else: if started_at is None: started_at = time.time() time.sleep(min(backoff * (2 ** i), max_wait)) i += 1 time_elapsed = time.time() - started_at return response
python
{ "resource": "" }
q263541
config_loader
validation
def config_loader(app, **kwargs_config): """Configuration loader. Adds support for loading templates from the Flask application's instance folder (``<instance_folder>/templates``). """ # This is the only place customize the Flask application right after # it has been created, but before all extensions etc are loaded. local_templates_path = os.path.join(app.instance_path, 'templates') if os.path.exists(local_templates_path): # Let's customize the template loader to look into packages # and application templates folders. app.jinja_loader = ChoiceLoader([ FileSystemLoader(local_templates_path), app.jinja_loader, ]) app.jinja_options = dict( app.jinja_options, cache_size=1000, bytecode_cache=BytecodeCache(app) ) invenio_config_loader(app, **kwargs_config)
python
{ "resource": "" }
q263542
app_class
validation
def app_class(): """Create Flask application class. Invenio-Files-REST needs to patch the Werkzeug form parsing in order to support streaming large file uploads. This is done by subclassing the Flask application class. """ try: pkg_resources.get_distribution('invenio-files-rest') from invenio_files_rest.app import Flask as FlaskBase except pkg_resources.DistributionNotFound: from flask import Flask as FlaskBase # Add Host header validation via APP_ALLOWED_HOSTS configuration variable. class Request(TrustedHostsMixin, FlaskBase.request_class): pass class Flask(FlaskBase): request_class = Request return Flask
python
{ "resource": "" }
q263543
InvenioApp.init_app
validation
def init_app(self, app, **kwargs): """Initialize application object. :param app: An instance of :class:`~flask.Flask`. """ # Init the configuration self.init_config(app) # Enable Rate limiter self.limiter = Limiter(app, key_func=get_ipaddr) # Enable secure HTTP headers if app.config['APP_ENABLE_SECURE_HEADERS']: self.talisman = Talisman( app, **app.config.get('APP_DEFAULT_SECURE_HEADERS', {}) ) # Enable PING view if app.config['APP_HEALTH_BLUEPRINT_ENABLED']: blueprint = Blueprint('invenio_app_ping', __name__) @blueprint.route('/ping') def ping(): """Load balancer ping view.""" return 'OK' ping.talisman_view_options = {'force_https': False} app.register_blueprint(blueprint) requestid_header = app.config.get('APP_REQUESTID_HEADER') if requestid_header: @app.before_request def set_request_id(): """Extracts a request id from an HTTP header.""" request_id = request.headers.get(requestid_header) if request_id: # Capped at 200 to protect against malicious clients # sending very large headers. g.request_id = request_id[:200] # If installed register the Flask-DebugToolbar extension try: from flask_debugtoolbar import DebugToolbarExtension app.extensions['flask-debugtoolbar'] = DebugToolbarExtension(app) except ImportError: app.logger.debug('Flask-DebugToolbar extension not installed.') # Register self app.extensions['invenio-app'] = self
python
{ "resource": "" }
q263544
InvenioApp.init_config
validation
def init_config(self, app): """Initialize configuration. :param app: An instance of :class:`~flask.Flask`. """ config_apps = ['APP_', 'RATELIMIT_'] flask_talisman_debug_mode = ["'unsafe-inline'"] for k in dir(config): if any([k.startswith(prefix) for prefix in config_apps]): app.config.setdefault(k, getattr(config, k)) if app.config['DEBUG']: app.config.setdefault('APP_DEFAULT_SECURE_HEADERS', {}) headers = app.config['APP_DEFAULT_SECURE_HEADERS'] # ensure `content_security_policy` is not set to {} if headers.get('content_security_policy') != {}: headers.setdefault('content_security_policy', {}) csp = headers['content_security_policy'] # ensure `default-src` is not set to [] if csp.get('default-src') != []: csp.setdefault('default-src', []) # add default `content_security_policy` value when debug csp['default-src'] += flask_talisman_debug_mode
python
{ "resource": "" }
q263545
camel2word
validation
def camel2word(string): """Covert name from CamelCase to "Normal case". >>> camel2word('CamelCase') 'Camel case' >>> camel2word('CaseWithSpec') 'Case with spec' """ def wordize(match): return ' ' + match.group(1).lower() return string[0] + re.sub(r'([A-Z])', wordize, string[1:])
python
{ "resource": "" }
q263546
SpecPlugin.format_seconds
validation
def format_seconds(self, n_seconds): """Format a time in seconds.""" func = self.ok if n_seconds >= 60: n_minutes, n_seconds = divmod(n_seconds, 60) return "%s minutes %s seconds" % ( func("%d" % n_minutes), func("%.3f" % n_seconds)) else: return "%s seconds" % ( func("%.3f" % n_seconds))
python
{ "resource": "" }
q263547
ppdict
validation
def ppdict(dict_to_print, br='\n', html=False, key_align='l', sort_keys=True, key_preffix='', key_suffix='', value_prefix='', value_suffix='', left_margin=3, indent=2): """Indent representation of a dict""" if dict_to_print: if sort_keys: dic = dict_to_print.copy() keys = list(dic.keys()) keys.sort() dict_to_print = OrderedDict() for k in keys: dict_to_print[k] = dic[k] tmp = ['{'] ks = [type(x) == str and "'%s'" % x or x for x in dict_to_print.keys()] vs = [type(x) == str and "'%s'" % x or x for x in dict_to_print.values()] max_key_len = max([len(str(x)) for x in ks]) for i in range(len(ks)): k = {1: str(ks[i]).ljust(max_key_len), key_align == 'r': str(ks[i]).rjust(max_key_len)}[1] v = vs[i] tmp.append(' ' * indent + '{}{}{}:{}{}{},'.format(key_preffix, k, key_suffix, value_prefix, v, value_suffix)) tmp[-1] = tmp[-1][:-1] # remove the ',' in the last item tmp.append('}') if left_margin: tmp = [' ' * left_margin + x for x in tmp] if html: return '<code>{}</code>'.format(br.join(tmp).replace(' ', '&nbsp;')) else: return br.join(tmp) else: return '{}'
python
{ "resource": "" }
q263548
_assert_contains
validation
def _assert_contains(haystack, needle, invert, escape=False): """ Test for existence of ``needle`` regex within ``haystack``. Say ``escape`` to escape the ``needle`` if you aren't really using the regex feature & have special characters in it. """ myneedle = re.escape(needle) if escape else needle matched = re.search(myneedle, haystack, re.M) if (invert and matched) or (not invert and not matched): raise AssertionError("'%s' %sfound in '%s'" % ( needle, "" if invert else "not ", haystack ))
python
{ "resource": "" }
q263549
flag_inner_classes
validation
def flag_inner_classes(obj): """ Mutates any attributes on ``obj`` which are classes, with link to ``obj``. Adds a convenience accessor which instantiates ``obj`` and then calls its ``setup`` method. Recurses on those objects as well. """ for tup in class_members(obj): tup[1]._parent = obj tup[1]._parent_inst = None tup[1].__getattr__ = my_getattr flag_inner_classes(tup[1])
python
{ "resource": "" }
q263550
pvpc_calc_tcu_cp_feu_d
validation
def pvpc_calc_tcu_cp_feu_d(df, verbose=True, convert_kwh=True): """Procesa TCU, CP, FEU diario. :param df: :param verbose: :param convert_kwh: :return: """ if 'TCU' + TARIFAS[0] not in df.columns: # Pasa de €/MWh a €/kWh: if convert_kwh: cols_mwh = [c + t for c in COLS_PVPC for t in TARIFAS if c != 'COF'] df[cols_mwh] = df[cols_mwh].applymap(lambda x: x / 1000.) # Obtiene columnas TCU, CP, precio día gb_t = df.groupby(lambda x: TARIFAS[np.argmax([t in x for t in TARIFAS])], axis=1) for k, g in gb_t: if verbose: print('TARIFA {}'.format(k)) print(g.head()) # Cálculo de TCU df['TCU{}'.format(k)] = g[k] - g['TEU{}'.format(k)] # Cálculo de CP # cols_cp = [c + k for c in ['FOS', 'FOM', 'INT', 'PCAP', 'PMH', 'SAH']] cols_cp = [c + k for c in COLS_PVPC if c not in ['', 'COF', 'TEU']] df['CP{}'.format(k)] = g[cols_cp].sum(axis=1) # Cálculo de PERD --> No es posible así, ya que los valores base ya vienen con PERD # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['TCU{}'.format(k)] / dfs_pvpc[k]['CP{}'.format(k)] # dfs_pvpc[k]['PERD{}'.format(k)] = dfs_pvpc[k]['INT{}'.format(k)] / 1.92 # Cálculo de FEU diario cols_k = ['TEU' + k, 'TCU' + k, 'COF' + k] g = df[cols_k].groupby('TEU' + k) pr = g.apply(lambda x: x['TCU' + k].dot(x['COF' + k]) / x['COF' + k].sum()) pr.name = 'PD_' + k df = df.join(pr, on='TEU' + k, rsuffix='_r') df['PD_' + k] += df['TEU' + k] return df
python
{ "resource": "" }
q263551
SplunkLogger._compress
validation
def _compress(self, input_str): """ Compress the log message in order to send less bytes to the wire. """ compressed_bits = cStringIO.StringIO() f = gzip.GzipFile(fileobj=compressed_bits, mode='wb') f.write(input_str) f.close() return compressed_bits.getvalue()
python
{ "resource": "" }
q263552
SpecSelector.registerGoodClass
validation
def registerGoodClass(self, class_): """ Internal bookkeeping to handle nested classes """ # Class itself added to "good" list self._valid_classes.append(class_) # Recurse into any inner classes for name, cls in class_members(class_): if self.isValidClass(cls): self.registerGoodClass(cls)
python
{ "resource": "" }
q263553
SpecSelector.isValidClass
validation
def isValidClass(self, class_): """ Needs to be its own method so it can be called from both wantClass and registerGoodClass. """ module = inspect.getmodule(class_) valid = ( module in self._valid_modules or ( hasattr(module, '__file__') and module.__file__ in self._valid_named_modules ) ) return valid and not private(class_)
python
{ "resource": "" }
q263554
PVPC.get_resample_data
validation
def get_resample_data(self): """Obtiene los dataframes de los datos de PVPC con resampling diario y mensual.""" if self.data is not None: if self._pvpc_mean_daily is None: self._pvpc_mean_daily = self.data['data'].resample('D').mean() if self._pvpc_mean_monthly is None: self._pvpc_mean_monthly = self.data['data'].resample('MS').mean() return self._pvpc_mean_daily, self._pvpc_mean_monthly
python
{ "resource": "" }
q263555
sanitize_path
validation
def sanitize_path(path): """Performs sanitation of the path after validating :param path: path to sanitize :return: path :raises: - InvalidPath if the path doesn't start with a slash """ if path == '/': # Nothing to do, just return return path if path[:1] != '/': raise InvalidPath('The path must start with a slash') # Deduplicate slashes in path path = re.sub(r'/+', '/', path) # Strip trailing slashes and return return path.rstrip('/')
python
{ "resource": "" }
q263556
_validate_schema
validation
def _validate_schema(obj): """Ensures the passed schema instance is compatible :param obj: object to validate :return: obj :raises: - IncompatibleSchema if the passed schema is of an incompatible type """ if obj is not None and not isinstance(obj, Schema): raise IncompatibleSchema('Schema must be of type {0}'.format(Schema)) return obj
python
{ "resource": "" }
q263557
route
validation
def route(bp, *args, **kwargs): """Journey route decorator Enables simple serialization, deserialization and validation of Flask routes with the help of Marshmallow. :param bp: :class:`flask.Blueprint` object :param args: args to pass along to `Blueprint.route` :param kwargs: - :strict_slashes: Enable / disable strict slashes (default False) - :validate: Enable / disable body/query validation (default True) - :_query: Unmarshal Query string into this schema - :_body: Unmarshal JSON body into this schema - :marshal_with: Serialize the output with this schema :raises: - ValidationError if the query parameters or JSON body fails validation """ kwargs['strict_slashes'] = kwargs.pop('strict_slashes', False) body = _validate_schema(kwargs.pop('_body', None)) query = _validate_schema(kwargs.pop('_query', None)) output = _validate_schema(kwargs.pop('marshal_with', None)) validate = kwargs.pop('validate', True) def decorator(f): @bp.route(*args, **kwargs) @wraps(f) def wrapper(*inner_args, **inner_kwargs): """If a schema (_body and/or _query) was supplied to the route decorator, the deserialized :class`marshmallow.Schema` object is injected into the decorated function's kwargs.""" try: if query is not None: query.strict = validate url = furl(request.url) inner_kwargs['_query'] = query.load(data=url.args) if body is not None: body.strict = validate json_data = request.get_json() if json_data is None: # Set json_data to empty dict if body is empty, so it gets picked up by the validator json_data = {} inner_kwargs['_body'] = body.load(data=json_data) except ValidationError as err: return jsonify(err.messages), 422 if output: data = output.dump(f(*inner_args, **inner_kwargs)) return jsonify(data[0]) return f(*inner_args, **inner_kwargs) return f return decorator
python
{ "resource": "" }
q263558
BlueprintBundle.attach_bp
validation
def attach_bp(self, bp, description=''): """Attaches a flask.Blueprint to the bundle :param bp: :class:`flask.Blueprint` object :param description: Optional description string :raises: - InvalidBlueprint if the Blueprint is not of type `flask.Blueprint` """ if not isinstance(bp, Blueprint): raise InvalidBlueprint('Blueprints attached to the bundle must be of type {0}'.format(Blueprint)) self.blueprints.append((bp, description))
python
{ "resource": "" }
q263559
DottedRule.move_dot
validation
def move_dot(self): """Returns the DottedRule that results from moving the dot.""" return self.__class__(self.production, self.pos + 1, self.lookahead)
python
{ "resource": "" }
q263560
Grammar.first
validation
def first(self, symbols): """Computes the intermediate FIRST set using symbols.""" ret = set() if EPSILON in symbols: return set([EPSILON]) for symbol in symbols: ret |= self._first[symbol] - set([EPSILON]) if EPSILON not in self._first[symbol]: break else: ret.add(EPSILON) return ret
python
{ "resource": "" }
q263561
Grammar._compute_first
validation
def _compute_first(self): """Computes the FIRST set for every symbol in the grammar. Tenatively based on _compute_first in PLY. """ for terminal in self.terminals: self._first[terminal].add(terminal) self._first[END_OF_INPUT].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: new_first = self.first(production.rhs) if new_first - self._first[nonterminal]: self._first[nonterminal] |= new_first changed = True if not changed: break
python
{ "resource": "" }
q263562
Grammar._compute_follow
validation
def _compute_follow(self): """Computes the FOLLOW set for every non-terminal in the grammar. Tenatively based on _compute_follow in PLY. """ self._follow[self.start_symbol].add(END_OF_INPUT) while True: changed = False for nonterminal, productions in self.nonterminals.items(): for production in productions: for i, symbol in enumerate(production.rhs): if symbol not in self.nonterminals: continue first = self.first(production.rhs[i + 1:]) new_follow = first - set([EPSILON]) if EPSILON in first or i == (len(production.rhs) - 1): new_follow |= self._follow[nonterminal] if new_follow - self._follow[symbol]: self._follow[symbol] |= new_follow changed = True if not changed: break
python
{ "resource": "" }
q263563
Grammar.initial_closure
validation
def initial_closure(self): """Computes the initial closure using the START_foo production.""" first_rule = DottedRule(self.start, 0, END_OF_INPUT) return self.closure([first_rule])
python
{ "resource": "" }
q263564
Grammar.goto
validation
def goto(self, rules, symbol): """Computes the next closure for rules based on the symbol we got. Args: rules - an iterable of DottedRules symbol - a string denoting the symbol we've just seen Returns: frozenset of DottedRules """ return self.closure( {rule.move_dot() for rule in rules if not rule.at_end and rule.rhs[rule.pos] == symbol}, )
python
{ "resource": "" }
q263565
Grammar.closure
validation
def closure(self, rules): """Fills out the entire closure based on some initial dotted rules. Args: rules - an iterable of DottedRules Returns: frozenset of DottedRules """ closure = set() todo = set(rules) while todo: rule = todo.pop() closure.add(rule) # If the dot is at the end, there's no need to process it. if rule.at_end: continue symbol = rule.rhs[rule.pos] for production in self.nonterminals[symbol]: for first in self.first(rule.rest): if EPSILON in production.rhs: # Move immediately to the end if the production # goes to epsilon new_rule = DottedRule(production, 1, first) else: new_rule = DottedRule(production, 0, first) if new_rule not in closure: todo.add(new_rule) return frozenset(closure)
python
{ "resource": "" }
q263566
Journey.init_app
validation
def init_app(self, app): """Initializes Journey extension :param app: App passed from constructor or directly to init_app :raises: - NoBundlesAttached if no bundles has been attached attached """ if len(self._attached_bundles) == 0: raise NoBundlesAttached("At least one bundle must be attached before initializing Journey") for bundle in self._attached_bundles: processed_bundle = { 'path': bundle.path, 'description': bundle.description, 'blueprints': [] } for (bp, description) in bundle.blueprints: # Register the BP blueprint = self._register_blueprint(app, bp, bundle.path, self.get_bp_path(bp), description) # Finally, attach the blueprints to its parent processed_bundle['blueprints'].append(blueprint) self._registered_bundles.append(processed_bundle)
python
{ "resource": "" }
q263567
Journey.routes_simple
validation
def routes_simple(self): """Returns simple info about registered blueprints :return: Tuple containing endpoint, path and allowed methods for each route """ routes = [] for bundle in self._registered_bundles: bundle_path = bundle['path'] for blueprint in bundle['blueprints']: bp_path = blueprint['path'] for child in blueprint['routes']: routes.append( ( child['endpoint'], bundle_path + bp_path + child['path'], child['methods'] ) ) return routes
python
{ "resource": "" }
q263568
Journey._bundle_exists
validation
def _bundle_exists(self, path): """Checks if a bundle exists at the provided path :param path: Bundle path :return: bool """ for attached_bundle in self._attached_bundles: if path == attached_bundle.path: return True return False
python
{ "resource": "" }
q263569
Journey.attach_bundle
validation
def attach_bundle(self, bundle): """Attaches a bundle object :param bundle: :class:`flask_journey.BlueprintBundle` object :raises: - IncompatibleBundle if the bundle is not of type `BlueprintBundle` - ConflictingPath if a bundle already exists at bundle.path - MissingBlueprints if the bundle doesn't contain any blueprints """ if not isinstance(bundle, BlueprintBundle): raise IncompatibleBundle('BlueprintBundle object passed to attach_bundle must be of type {0}' .format(BlueprintBundle)) elif len(bundle.blueprints) == 0: raise MissingBlueprints("Bundles must contain at least one flask.Blueprint") elif self._bundle_exists(bundle.path): raise ConflictingPath("Duplicate bundle path {0}".format(bundle.path)) elif self._journey_path == bundle.path == '/': raise ConflictingPath("Bundle path and Journey path cannot both be {0}".format(bundle.path)) self._attached_bundles.append(bundle)
python
{ "resource": "" }
q263570
Journey._register_blueprint
validation
def _register_blueprint(self, app, bp, bundle_path, child_path, description): """Register and return info about the registered blueprint :param bp: :class:`flask.Blueprint` object :param bundle_path: the URL prefix of the bundle :param child_path: blueprint relative to the bundle path :return: Dict with info about the blueprint """ base_path = sanitize_path(self._journey_path + bundle_path + child_path) app.register_blueprint(bp, url_prefix=base_path) return { 'name': bp.name, 'path': child_path, 'import_name': bp.import_name, 'description': description, 'routes': self.get_blueprint_routes(app, base_path) }
python
{ "resource": "" }
q263571
Journey.get_blueprint_routes
validation
def get_blueprint_routes(app, base_path): """Returns detailed information about registered blueprint routes matching the `BlueprintBundle` path :param app: App instance to obtain rules from :param base_path: Base path to return detailed route info for :return: List of route detail dicts """ routes = [] for child in app.url_map.iter_rules(): if child.rule.startswith(base_path): relative_path = child.rule[len(base_path):] routes.append({ 'path': relative_path, 'endpoint': child.endpoint, 'methods': list(child.methods) }) return routes
python
{ "resource": "" }
q263572
ParserBase.compute_precedence
validation
def compute_precedence(terminals, productions, precedence_levels): """Computes the precedence of terminal and production. The precedence of a terminal is it's level in the PRECEDENCE tuple. For a production, the precedence is the right-most terminal (if it exists). The default precedence is DEFAULT_PREC - (LEFT, 0). Returns: precedence - dict[terminal | production] = (assoc, level) """ precedence = collections.OrderedDict() for terminal in terminals: precedence[terminal] = DEFAULT_PREC level_precs = range(len(precedence_levels), 0, -1) for i, level in zip(level_precs, precedence_levels): assoc = level[0] for symbol in level[1:]: precedence[symbol] = (assoc, i) for production, prec_symbol in productions: if prec_symbol is None: prod_terminals = [symbol for symbol in production.rhs if symbol in terminals] or [None] precedence[production] = precedence.get(prod_terminals[-1], DEFAULT_PREC) else: precedence[production] = precedence.get(prec_symbol, DEFAULT_PREC) return precedence
python
{ "resource": "" }
q263573
ParserBase.make_tables
validation
def make_tables(grammar, precedence): """Generates the ACTION and GOTO tables for the grammar. Returns: action - dict[state][lookahead] = (action, ...) goto - dict[state][just_reduced] = new_state """ ACTION = {} GOTO = {} labels = {} def get_label(closure): if closure not in labels: labels[closure] = len(labels) return labels[closure] def resolve_shift_reduce(lookahead, s_action, r_action): s_assoc, s_level = precedence[lookahead] r_assoc, r_level = precedence[r_action[1]] if s_level < r_level: return r_action elif s_level == r_level and r_assoc == LEFT: return r_action else: return s_action initial, closures, goto = grammar.closures() for closure in closures: label = get_label(closure) for rule in closure: new_action, lookahead = None, rule.lookahead if not rule.at_end: symbol = rule.rhs[rule.pos] is_terminal = symbol in grammar.terminals has_goto = symbol in goto[closure] if is_terminal and has_goto: next_state = get_label(goto[closure][symbol]) new_action, lookahead = ('shift', next_state), symbol elif rule.production == grammar.start and rule.at_end: new_action = ('accept',) elif rule.at_end: new_action = ('reduce', rule.production) if new_action is None: continue prev_action = ACTION.get((label, lookahead)) if prev_action is None or prev_action == new_action: ACTION[label, lookahead] = new_action else: types = (prev_action[0], new_action[0]) if types == ('shift', 'reduce'): chosen = resolve_shift_reduce(lookahead, prev_action, new_action) elif types == ('reduce', 'shift'): chosen = resolve_shift_reduce(lookahead, new_action, prev_action) else: raise TableConflictError(prev_action, new_action) ACTION[label, lookahead] = chosen for symbol in grammar.nonterminals: if symbol in goto[closure]: GOTO[label, symbol] = get_label(goto[closure][symbol]) return get_label(initial), ACTION, GOTO
python
{ "resource": "" }
q263574
parse_definite_clause
validation
def parse_definite_clause(s): "Return the antecedents and the consequent of a definite clause." assert is_definite_clause(s) if is_symbol(s.op): return [], s else: antecedent, consequent = s.args return conjuncts(antecedent), consequent
python
{ "resource": "" }
q263575
tt_check_all
validation
def tt_check_all(kb, alpha, symbols, model): "Auxiliary routine to implement tt_entails." if not symbols: if pl_true(kb, model): result = pl_true(alpha, model) assert result in (True, False) return result else: return True else: P, rest = symbols[0], symbols[1:] return (tt_check_all(kb, alpha, rest, extend(model, P, True)) and tt_check_all(kb, alpha, rest, extend(model, P, False)))
python
{ "resource": "" }
q263576
prop_symbols
validation
def prop_symbols(x): "Return a list of all propositional symbols in x." if not isinstance(x, Expr): return [] elif is_prop_symbol(x.op): return [x] else: return list(set(symbol for arg in x.args for symbol in prop_symbols(arg)))
python
{ "resource": "" }
q263577
pl_true
validation
def pl_true(exp, model={}): """Return True if the propositional logic expression is true in the model, and False if it is false. If the model does not specify the value for every proposition, this may return None to indicate 'not obvious'; this may happen even when the expression is tautological.""" op, args = exp.op, exp.args if exp == TRUE: return True elif exp == FALSE: return False elif is_prop_symbol(op): return model.get(exp) elif op == '~': p = pl_true(args[0], model) if p is None: return None else: return not p elif op == '|': result = False for arg in args: p = pl_true(arg, model) if p is True: return True if p is None: result = None return result elif op == '&': result = True for arg in args: p = pl_true(arg, model) if p is False: return False if p is None: result = None return result p, q = args if op == '>>': return pl_true(~p | q, model) elif op == '<<': return pl_true(p | ~q, model) pt = pl_true(p, model) if pt is None: return None qt = pl_true(q, model) if qt is None: return None if op == '<=>': return pt == qt elif op == '^': return pt != qt else: raise ValueError, "illegal operator in logic expression" + str(exp)
python
{ "resource": "" }
q263578
dpll
validation
def dpll(clauses, symbols, model): "See if the clauses are true in a partial model." unknown_clauses = [] ## clauses with an unknown truth value for c in clauses: val = pl_true(c, model) if val == False: return False if val != True: unknown_clauses.append(c) if not unknown_clauses: return model P, value = find_pure_symbol(symbols, unknown_clauses) if P: return dpll(clauses, removeall(P, symbols), extend(model, P, value)) P, value = find_unit_clause(clauses, model) if P: return dpll(clauses, removeall(P, symbols), extend(model, P, value)) P, symbols = symbols[0], symbols[1:] return (dpll(clauses, symbols, extend(model, P, True)) or dpll(clauses, symbols, extend(model, P, False)))
python
{ "resource": "" }
q263579
is_variable
validation
def is_variable(x): "A variable is an Expr with no args and a lowercase symbol as the op." return isinstance(x, Expr) and not x.args and is_var_symbol(x.op)
python
{ "resource": "" }
q263580
PropKB.retract
validation
def retract(self, sentence): "Remove the sentence's clauses from the KB." for c in conjuncts(to_cnf(sentence)): if c in self.clauses: self.clauses.remove(c)
python
{ "resource": "" }
q263581
SettingDict.refresh
validation
def refresh(self): """ Updates the cache with setting values from the database. """ # `values_list('name', 'value')` doesn't work because `value` is not a # setting (base class) field, it's a setting value (subclass) field. So # we have to get real instances. args = [(obj.name, obj.value) for obj in self.queryset.all()] super(SettingDict, self).update(args) self.empty_cache = False
python
{ "resource": "" }
q263582
alphabeta_search
validation
def alphabeta_search(state, game, d=4, cutoff_test=None, eval_fn=None): """Search game to determine best action; use alpha-beta pruning. This version cuts off search and uses an evaluation function.""" player = game.to_move(state) def max_value(state, alpha, beta, depth): if cutoff_test(state, depth): return eval_fn(state) v = -infinity for a in game.actions(state): v = max(v, min_value(game.result(state, a), alpha, beta, depth+1)) if v >= beta: return v alpha = max(alpha, v) return v def min_value(state, alpha, beta, depth): if cutoff_test(state, depth): return eval_fn(state) v = infinity for a in game.actions(state): v = min(v, max_value(game.result(state, a), alpha, beta, depth+1)) if v <= alpha: return v beta = min(beta, v) return v # Body of alphabeta_search starts here: # The default test cuts off at depth d or at a terminal state cutoff_test = (cutoff_test or (lambda state,depth: depth>d or game.terminal_test(state))) eval_fn = eval_fn or (lambda state: game.utility(state, player)) return argmax(game.actions(state), lambda a: min_value(game.result(state, a), -infinity, infinity, 0))
python
{ "resource": "" }
q263583
TicTacToe.utility
validation
def utility(self, state, player): "Return the value to player; 1 for win, -1 for loss, 0 otherwise." return if_(player == 'X', state.utility, -state.utility)
python
{ "resource": "" }
q263584
TicTacToe.compute_utility
validation
def compute_utility(self, board, move, player): "If X wins with this move, return 1; if O return -1; else return 0." if (self.k_in_row(board, move, player, (0, 1)) or self.k_in_row(board, move, player, (1, 0)) or self.k_in_row(board, move, player, (1, -1)) or self.k_in_row(board, move, player, (1, 1))): return if_(player == 'X', +1, -1) else: return 0
python
{ "resource": "" }
q263585
TicTacToe.k_in_row
validation
def k_in_row(self, board, move, player, (delta_x, delta_y)): "Return true if there is a line through move on board for player." x, y = move n = 0 # n is number of moves in row while board.get((x, y)) == player: n += 1 x, y = x + delta_x, y + delta_y x, y = move while board.get((x, y)) == player: n += 1 x, y = x - delta_x, y - delta_y n -= 1 # Because we counted move itself twice return n >= self.k
python
{ "resource": "" }
q263586
update
validation
def update(x, **entries): """Update a dict, or an object with slots, according to `entries` dict. >>> update({'a': 1}, a=10, b=20) {'a': 10, 'b': 20} >>> update(Struct(a=1), a=10, b=20) Struct(a=10, b=20) """ if isinstance(x, dict): x.update(entries) else: x.__dict__.update(entries) return x
python
{ "resource": "" }
q263587
weighted_sample_with_replacement
validation
def weighted_sample_with_replacement(seq, weights, n): """Pick n samples from seq at random, with replacement, with the probability of each element in proportion to its corresponding weight.""" sample = weighted_sampler(seq, weights) return [sample() for s in range(n)]
python
{ "resource": "" }
q263588
weighted_sampler
validation
def weighted_sampler(seq, weights): "Return a random-sample function that picks from seq weighted by weights." totals = [] for w in weights: totals.append(w + totals[-1] if totals else w) return lambda: seq[bisect.bisect(totals, random.uniform(0, totals[-1]))]
python
{ "resource": "" }
q263589
printf
validation
def printf(format, *args): """Format args with the first argument as format string, and write. Return the last arg, or format itself if there are no args.""" sys.stdout.write(str(format) % args) return if_(args, lambda: args[-1], lambda: format)
python
{ "resource": "" }
q263590
name
validation
def name(object): "Try to find some reasonable name for the object." return (getattr(object, 'name', 0) or getattr(object, '__name__', 0) or getattr(getattr(object, '__class__', 0), '__name__', 0) or str(object))
python
{ "resource": "" }
q263591
AIMAFile
validation
def AIMAFile(components, mode='r'): "Open a file based at the AIMA root directory." import utils dir = os.path.dirname(utils.__file__) return open(apply(os.path.join, [dir] + components), mode)
python
{ "resource": "" }
q263592
NaiveBayesLearner
validation
def NaiveBayesLearner(dataset): """Just count how many times each value of each input attribute occurs, conditional on the target value. Count the different target values too.""" targetvals = dataset.values[dataset.target] target_dist = CountingProbDist(targetvals) attr_dists = dict(((gv, attr), CountingProbDist(dataset.values[attr])) for gv in targetvals for attr in dataset.inputs) for example in dataset.examples: targetval = example[dataset.target] target_dist.add(targetval) for attr in dataset.inputs: attr_dists[targetval, attr].add(example[attr]) def predict(example): """Predict the target value for example. Consider each possible value, and pick the most likely by looking at each attribute independently.""" def class_probability(targetval): return (target_dist[targetval] * product(attr_dists[targetval, attr][example[attr]] for attr in dataset.inputs)) return argmax(targetvals, class_probability) return predict
python
{ "resource": "" }
q263593
information_content
validation
def information_content(values): "Number of bits to represent the probability distribution in values." probabilities = normalize(removeall(0, values)) return sum(-p * log2(p) for p in probabilities)
python
{ "resource": "" }
q263594
NeuralNetLearner
validation
def NeuralNetLearner(dataset, sizes): """Layered feed-forward network.""" activations = map(lambda n: [0.0 for i in range(n)], sizes) weights = [] def predict(example): unimplemented() return predict
python
{ "resource": "" }
q263595
EnsembleLearner
validation
def EnsembleLearner(learners): """Given a list of learning algorithms, have them vote.""" def train(dataset): predictors = [learner(dataset) for learner in learners] def predict(example): return mode(predictor(example) for predictor in predictors) return predict return train
python
{ "resource": "" }
q263596
WeightedMajority
validation
def WeightedMajority(predictors, weights): "Return a predictor that takes a weighted vote." def predict(example): return weighted_mode((predictor(example) for predictor in predictors), weights) return predict
python
{ "resource": "" }
q263597
replicated_dataset
validation
def replicated_dataset(dataset, weights, n=None): "Copy dataset, replicating each example in proportion to its weight." n = n or len(dataset.examples) result = copy.copy(dataset) result.examples = weighted_replicate(dataset.examples, weights, n) return result
python
{ "resource": "" }
q263598
leave1out
validation
def leave1out(learner, dataset): "Leave one out cross-validation over the dataset." return cross_validation(learner, dataset, k=len(dataset.examples))
python
{ "resource": "" }
q263599
SyntheticRestaurant
validation
def SyntheticRestaurant(n=20): "Generate a DataSet with n examples." def gen(): example = map(random.choice, restaurant.values) example[restaurant.target] = Fig[18,2](example) return example return RestaurantDataSet([gen() for i in range(n)])
python
{ "resource": "" }