_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q274400
Argument.add_to_parser
test
def add_to_parser(self, parser): """ Adds the argument to an argparse.ArgumentParser instance @param parser An argparse.ArgumentParser instance """ kwargs = self._get_kwargs() args = self._get_args() parser.add_argument(*args, **kwargs)
python
{ "resource": "" }
q274401
BaseSubParser.add_to_parser
test
def add_to_parser(self, subparsers): """ Adds this SubParser to the subparsers created by argparse.ArgumentParser.add_subparsers method. @param subparsers Normally a _SubParsersAction instance created by argparse.ArgumentParser.add_subparsers method """ parser = subparsers.add_parser(*self._get_args(), **self._get_kwargs()) parser.set_defaults(**self.get_defaults()) for name, group in self.base_argument_groups: group.add_to_parser(parser) for name, arg in self.base_arguments: arg.add_to_parser(parser) self.add_subparsers(parser)
python
{ "resource": "" }
q274402
SubParsersMixin.set_subparsers_args
test
def set_subparsers_args(self, *args, **kwargs): """ Sets args and kwargs that are passed when creating a subparsers group in an argparse.ArgumentParser i.e. when calling argparser.ArgumentParser.add_subparsers """ self.subparsers_args = args self.subparsers_kwargs = kwargs
python
{ "resource": "" }
q274403
SubParsersMixin.add_subparsers
test
def add_subparsers(self, parser): """ Adds the subparsers to an argparse.ArgumentParser @param parser An argparse.ArgumentParser instance """ sgroup = getattr(self, "subparser_group", None) if sgroup: sgroup.add_to_parser(self) if not self.subparsers: return args = self.subparsers_args or self.get_default_subparsers_args() kwargs = self.subparsers_kwargs or self.get_default_subparsers_kwargs() subs = parser.add_subparsers(*args, **kwargs) for subparser in self.subparsers: subparser.add_to_parser(subs)
python
{ "resource": "" }
q274404
Revert._file_in_patch
test
def _file_in_patch(self, filename, patch): """ Checks if a backup file of the filename in the current patch exists and raises a QuiltError if not. """ pc_dir = self.quilt_pc + patch.get_name() file = pc_dir + File(filename) if not file.exists(): raise QuiltError("File %s is not in patch %s" % (filename, patch.get_name()))
python
{ "resource": "" }
q274405
Revert._file_in_next_patches
test
def _file_in_next_patches(self, filename, patch): """ Checks if a backup file of the filename in the applied patches after patch exists """ if not self.db.is_patch(patch): # no patches applied return patches = self.db.patches_after(patch) for patch in patches: file = self.quilt_pc + File(os.path.join(patch.get_name(), filename)) if file.exists(): raise QuiltError("File %s is modified by patch %s" % (filename, patch.get_name()))
python
{ "resource": "" }
q274406
Revert.revert_file
test
def revert_file(self, filename, patch_name=None): """ Revert not added changes of filename. If patch_name is None or empty the topmost patch will be used. """ file = File(filename) if patch_name: patch = Patch(patch_name) else: patch = self.db.top_patch() if not patch: raise QuiltError("No patch available. Nothing to revert.") self._file_in_patch(filename, patch) self._file_in_next_patches(filename, patch) pc_dir = self.quilt_pc + patch.get_name() pc_file = pc_dir + file if not file.exists() and pc_file.is_empty(): # new and empty file will be reverted pc_file.delete() self.file_reverted(file, patch) return with TmpDirectory(prefix="pquilt-") as tmpdir: # apply current patch in temporary directory to revert changes of # file that aren't committed in the patch tmp_file = self._apply_patch_temporary(tmpdir, pc_file, patch) if tmp_file and tmp_file.exists() and not tmp_file.is_empty(): diff = Diff(file, tmp_file) if diff.equal(self.cwd): self.file_unchanged(file, patch) return dir = file.get_directory() if not dir: dir = Directory(os.getcwd()) else: dir.create() tmp_file.copy(dir) self.file_reverted(file, patch) else: self.file_unchanged(file, patch)
python
{ "resource": "" }
q274407
Import.import_patch
test
def import_patch(self, patch_name, new_name=None): """ Import patch into the patch queue The patch is inserted as the next unapplied patch. """ if new_name: dir_name = os.path.dirname(new_name) name = os.path.basename(new_name) dest_dir = self.quilt_patches + Directory(dir_name) dest_dir.create() else: name = os.path.basename(patch_name) dest_dir = self.quilt_patches patch_file = File(patch_name) dest_file = dest_dir + File(name) patch_file.copy(dest_file) self._import_patches([name])
python
{ "resource": "" }
q274408
Import.import_patches
test
def import_patches(self, patches): """ Import several patches into the patch queue """ dest_dir = self.quilt_patches patch_names = [] for patch in patches: patch_name = os.path.basename(patch) patch_file = File(patch) dest_file = dest_dir + File(patch_name) patch_file.copy(dest_file) patch_names.append(patch_name) self._import_patches(patch_names)
python
{ "resource": "" }
q274409
WayHandler.way
test
def way(self, w): """Process each way.""" if w.id not in self.way_ids: return way_points = [] for n in w.nodes: try: way_points.append(Point(n.location.lon, n.location.lat)) except o.InvalidLocationError: logging.debug('InvalidLocationError at way %s node %s', w.id, n.ref) self.ways[w.id] = Way(w.id, way_points)
python
{ "resource": "" }
q274410
NodeHandler.missing_node_ids
test
def missing_node_ids(self): """Get a list of nodes not found in OSM data.""" present_node_ids = self.nodes.keys() for nid in self.node_ids: if nid not in present_node_ids: yield nid
python
{ "resource": "" }
q274411
NodeHandler.node
test
def node(self, n): """Process each node.""" if n.id not in self.node_ids: return try: self.nodes[n.id] =\ Node(n.id, n.location.lon, n.location.lat, {t.k: t.v for t in n.tags}) except o.InvalidLocationError: logging.debug('InvalidLocationError at node %s', n.id)
python
{ "resource": "" }
q274412
build_route
test
def build_route(relation): """Extract information of one route.""" if relation.tags.get('type') != 'route': # Build route only for relations of type `route` return short_name = create_route_short_name(relation) color = relation.tags.get('color') return\ Route(relation.id, short_name, create_route_long_name(relation, short_name), map_osm_route_type_to_gtfs(relation.tags.get('route')), 'https://www.openstreetmap.org/relation/{}'.format(relation.id), color.strip('#') if color else '', get_agency_id(relation))
python
{ "resource": "" }
q274413
create_route_long_name
test
def create_route_long_name(relation, short_name): """Create a meaningful route name.""" if relation.tags.get('from') and relation.tags.get('to'): return "{0}-to-{1}".format(relation.tags.get('from'), relation.tags.get('to')) name = relation.tags.get('name') or\ relation.tags.get('alt_name') or\ "OSM Route No. {}".format(relation.id) # Drop route_short_name from this one if it contains it if short_name and name.startswith(short_name): # Drop it return name[len(short_name):] return name
python
{ "resource": "" }
q274414
get_agency_id
test
def get_agency_id(relation): """Construct an id for agency using its tags.""" op = relation.tags.get('operator') if op: return int(hashlib.sha256(op.encode('utf-8')).hexdigest(), 16) % 10**8 return -1
python
{ "resource": "" }
q274415
TransitDataExporter.process
test
def process(self): """Process the files and collect necessary data.""" # Extract relations self.rh = RelationHandler() self.rh.apply_file(self.filename) logging.debug('Found %d public transport relations.', len(self.rh.relations)) # Collect ids of interest node_ids, stop_node_ids, way_ids, reverse_map = self.__collect_ids() # Extract nodes self.nh = NodeHandler(node_ids) self.nh.apply_file(self.filename, locations=True) count = 0 for idx, missing_node_id in enumerate(self.nh.missing_node_ids): count += 1 logging.warning( '[no data] missing stop node. rel: https://osm.org/relation/%s node: https://osm.org/node/%s.', reverse_map[missing_node_id], missing_node_id) if count: logging.warning( '%d nodes that appear in relations are missing.', count) else: logging.debug('Lucky you! All relation member nodes were found.') # Extract ways self.wh = WayHandler(way_ids) self.wh.apply_file(self.filename, locations=True)
python
{ "resource": "" }
q274416
RelationHandler.relation
test
def relation(self, rel): """Process each relation.""" rel_type = rel.tags.get('type') if any([rel.deleted, not rel.visible, not self.is_new_version(rel), rel_type not in ['route', 'public_transport']]): return route_tag = rel.tags.get('route') if rel_type == 'route' and route_tag not in self.transit_route_types: return public_transport = rel.tags.get('public_transport') if rel_type == 'public_transport' and public_transport != 'stop_area': return self.relations[rel.id] = \ Relation(rel.id, { 'type': rel_type, 'public_transport': public_transport, 'route': route_tag, 'operator': rel.tags.get('operator'), 'color': rel.tags.get('color'), 'ref': rel.tags.get('ref'), 'from': rel.tags.get('from'), 'to': rel.tags.get('to'), 'name': rel.tags.get('name'), 'alt_name': rel.tags.get('alt_name'), 'url': rel.tags.get('url'), 'contact_website': rel.tags.get('contact:website')}, [(member.type, member.ref, member.role) for member in rel.members]) self.versions[rel.id] = rel.version
python
{ "resource": "" }
q274417
create_dummy_data
test
def create_dummy_data(routes, stops): """Create `calendar`, `stop_times`, `trips` and `shapes`. :return: DummyData namedtuple """ # Build stops per route auxiliary map stops_per_route = defaultdict(lambda: []) stops_map = {} for s in stops: if not s.route_id: continue stops_per_route[s.route_id].append(s) stops_map[s.stop_id] = s calendar = _create_dummy_calendar() trips = \ _create_dummy_trips( routes, stops_per_route, calendar) stop_times = _create_dummy_stoptimes(trips, stops_per_route) frequencies = _create_dummy_frequencies(trips) return DummyData(calendar, stop_times, trips, frequencies)
python
{ "resource": "" }
q274418
patch_agencies
test
def patch_agencies(agencies): """Fill the fields that are necessary for passing transitfeed checks.""" # First return the unknown agency entry yield Agency(-1, 'http://hiposfer.com', 'Unknown agency', 'Europe/Berlin') # Then return the rest. for agency_id, agency_url, agency_name, agency_timezone in agencies: if not agency_url: agency_url = 'http://hiposfer.com' if not agency_timezone: # Set everything to one time zone to get rid of transitfeeds error. agency_timezone = 'Europe/Berlin' yield Agency(agency_id, agency_url, agency_name, agency_timezone)
python
{ "resource": "" }
q274419
_create_dummy_trip_stoptimes
test
def _create_dummy_trip_stoptimes(trip_id, stops, first_service_time): """Create station stop times for each trip.""" waiting = datetime.timedelta(seconds=30) arrival = first_service_time last_departure = first_service_time last_departure_hour = (arrival + waiting).hour last_stop = None departure_hour = None arrival_hour = None for stop_sequence, stop in enumerate(stops): # Avoid time travels arrival = last_departure + get_time_from_last_stop(last_stop, stop) departure = arrival + waiting # Cover the case when the arrival time falls into the next day if arrival.hour < last_departure_hour: diff = last_departure_hour arrival_hour = arrival.hour + diff departure_hour = departure.hour + diff last_departure_hour = departure.hour + diff else: arrival_hour = arrival.hour departure_hour = departure.hour last_departure_hour = departure.hour # Cover the case when adding waiting time to the arrival time # falls into the next day if departure.hour < arrival.hour: diff = last_departure_hour departure_hour = departure.hour + diff last_departure_hour = departure.hour + diff yield {'trip_id': trip_id, 'arrival_time': '{:02}:{}'.format( arrival_hour, arrival.strftime('%M:%S')), 'departure_time': '{:02}:{}'.format( departure_hour, departure.strftime('%M:%S')), 'stop_id': stop.stop_id, 'stop_sequence': stop_sequence} last_stop = stop last_departure = departure
python
{ "resource": "" }
q274420
GTFSWriter.write_zipped
test
def write_zipped(self, filepath): """Write the GTFS feed in the given file.""" with zipfile.ZipFile(filepath, mode='w', compression=zipfile.ZIP_DEFLATED) as zfile: for name, buffer in self._buffers.items(): encoded_values = io.BytesIO(buffer.getvalue().encode('utf-8')) zfile.writestr('{}.txt'.format(name), encoded_values.getbuffer()) for name, path in self._files.items(): zfile.write(path, arcname=name)
python
{ "resource": "" }
q274421
GTFSWriter.write_unzipped
test
def write_unzipped(self, destination): """Write GTFS text files in the given path.""" for name, buffer in self._buffers.items(): with open(os.path.join(destination, '{}.txt'.format(name)), 'w', encoding='utf-8') as file: file.write(buffer.getvalue()) for name, path in self._files.items(): shutil.copy(path, os.path.join(destination, name))
python
{ "resource": "" }
q274422
build_agency
test
def build_agency(relation, nodes): """Extract agency information.""" # TODO: find out the operator for routes without operator tag. # See: http://wiki.openstreetmap.org/wiki/Key:operator # Quote from the above link: # # If the vast majority of a certain object in an area is operated by a certain # organization and only very few by others then it may be sufficient to only tag the # exceptions. For example, when nearly all roads in an area are managed by a local # authority then it would be sufficient to only tag those that are not with an operator # tag. op = relation.tags.get('operator') agency_url = relation.tags.get('url') or relation.tags.get('contact_website') if not op: return agency_id = int(hashlib.sha256(op.encode('utf8')).hexdigest(), 16) % 10**8 return Agency(agency_id, agency_url, op, '')
python
{ "resource": "" }
q274423
extract_stops
test
def extract_stops(relation, nodes, visited_stop_ids, stop_to_station_map): """Extract stops in a relation.""" # member_role: stop, halt, platform, terminal, etc. for member_type, member_id, member_role in relation.member_info: if member_id not in visited_stop_ids and \ member_id in nodes and\ member_role in ('stop', 'halt'): location_type = '' visited_stop_ids.add(member_id) yield Stop( member_id, nodes[member_id].tags.get('name') or "Unnamed {} stop.".format(relation.tags.get('route')), nodes[member_id].lon if member_id in nodes else '', nodes[member_id].lat if member_id in nodes else '', relation.id, _map_wheelchair(nodes[member_id].tags.get('wheelchair')), location_type, stop_to_station_map.get(member_id, ''))
python
{ "resource": "" }
q274424
build_shape
test
def build_shape(relation, nodes, ways): """Extract shape of one route.""" sequence_index = 0 for member_type, member_id, member_role in relation.member_info: if member_id in nodes: yield Shape( relation.id, nodes[member_id].lat, nodes[member_id].lon, sequence_index) sequence_index += 1 # Do we need to consider ways too? It dramatically increases the number of shapes. elif member_id in ways: continue # for point in ways[member_id].points: # shape = Shape( # relation.id, # point.lat, # point.lon, # sequence_index) # sequence_index += 1 else: # Ignore excessive logging for now. pass
python
{ "resource": "" }
q274425
U2FDevice.get_supported_versions
test
def get_supported_versions(self): """ Gets a list of supported U2F versions from the device. """ if not hasattr(self, '_versions'): try: self._versions = [self.send_apdu(INS_GET_VERSION).decode()] except exc.APDUError as e: # v0 didn't support the instruction. self._versions = ['v0'] if e.code == 0x6d00 else [] return self._versions
python
{ "resource": "" }
q274426
U2FDevice.send_apdu
test
def send_apdu(self, ins, p1=0, p2=0, data=b''): """ Sends an APDU to the device, and waits for a response. """ if data is None: data = b'' elif isinstance(data, int): data = int2byte(data) size = len(data) l0 = size >> 16 & 0xff l1 = size >> 8 & 0xff l2 = size & 0xff apdu_data = struct.pack('B B B B B B B %is B B' % size, 0, ins, p1, p2, l0, l1, l2, data, 0x00, 0x00) try: resp = self._do_send_apdu(apdu_data) except Exception as e: # TODO Use six.reraise if/when Six becomes an agreed dependency. raise exc.DeviceError(e) status = struct.unpack('>H', resp[-2:])[0] data = resp[:-2] if status != APDU_OK: raise exc.APDUError(status) return data
python
{ "resource": "" }
q274427
authenticate
test
def authenticate(devices, params, facet, check_only): """ Interactively authenticates a AuthenticateRequest using an attached U2F device. """ for device in devices[:]: try: device.open() except: devices.remove(device) try: prompted = False while devices: removed = [] for device in devices: try: return u2f.authenticate(device, params, facet, check_only) except exc.APDUError as e: if e.code == APDU_USE_NOT_SATISFIED: if check_only: sys.stderr.write('\nCorrect U2F device present!\n') sys.exit(0) if not prompted: sys.stderr.write('\nTouch the flashing U2F device ' 'to authenticate...\n') prompted = True else: removed.append(device) except exc.DeviceError: removed.append(device) devices = [d for d in devices if d not in removed] for d in removed: d.close() time.sleep(0.25) finally: for device in devices: device.close() sys.stderr.write('\nThe required U2F device is not present!\n') sys.exit(1)
python
{ "resource": "" }
q274428
register
test
def register(device, data, facet): """ Register a U2F device data = { "version": "U2F_V2", "challenge": string, //b64 encoded challenge "appId": string, //app_id } """ if isinstance(data, string_types): data = json.loads(data) if data['version'] != VERSION: raise ValueError('Unsupported U2F version: %s' % data['version']) app_id = data.get('appId', facet) verify_facet(app_id, facet) app_param = sha256(app_id.encode('utf8')).digest() client_data = { 'typ': 'navigator.id.finishEnrollment', 'challenge': data['challenge'], 'origin': facet } client_data = json.dumps(client_data) client_param = sha256(client_data.encode('utf8')).digest() request = client_param + app_param p1 = 0x03 p2 = 0 response = device.send_apdu(INS_ENROLL, p1, p2, request) return { 'registrationData': websafe_encode(response), 'clientData': websafe_encode(client_data) }
python
{ "resource": "" }
q274429
authenticate
test
def authenticate(device, data, facet, check_only=False): """ Signs an authentication challenge data = { 'version': "U2F_V2", 'challenge': websafe_encode(self.challenge), 'appId': self.binding.app_id, 'keyHandle': websafe_encode(self.binding.key_handle) } """ if isinstance(data, string_types): data = json.loads(data) if data['version'] != VERSION: raise ValueError('Unsupported U2F version: %s' % data['version']) app_id = data.get('appId', facet) verify_facet(app_id, facet) app_param = sha256(app_id.encode('utf8')).digest() key_handle = websafe_decode(data['keyHandle']) # Client data client_data = { 'typ': 'navigator.id.getAssertion', 'challenge': data['challenge'], 'origin': facet } client_data = json.dumps(client_data) client_param = sha256(client_data.encode('utf8')).digest() request = client_param + app_param + int2byte( len(key_handle)) + key_handle p1 = 0x07 if check_only else 0x03 p2 = 0 response = device.send_apdu(INS_SIGN, p1, p2, request) return { 'clientData': websafe_encode(client_data), 'signatureData': websafe_encode(response), 'keyHandle': data['keyHandle'] }
python
{ "resource": "" }
q274430
register
test
def register(devices, params, facet): """ Interactively registers a single U2F device, given the RegistrationRequest. """ for device in devices[:]: try: device.open() except: devices.remove(device) sys.stderr.write('\nTouch the U2F device you wish to register...\n') try: while devices: removed = [] for device in devices: try: return u2f.register(device, params, facet) except exc.APDUError as e: if e.code == APDU_USE_NOT_SATISFIED: pass else: removed.append(device) except exc.DeviceError: removed.append(device) devices = [d for d in devices if d not in removed] for d in removed: d.close() time.sleep(0.25) finally: for device in devices: device.close() sys.stderr.write('\nUnable to register with any U2F device.\n') sys.exit(1)
python
{ "resource": "" }
q274431
u2str
test
def u2str(data): """Recursively converts unicode objects to UTF-8 encoded byte strings.""" if isinstance(data, dict): return {u2str(k): u2str(v) for k, v in data.items()} elif isinstance(data, list): return [u2str(x) for x in data] elif isinstance(data, text_type): return data.encode('utf-8') else: return data
python
{ "resource": "" }
q274432
wrap_function
test
def wrap_function(func=None, error_threshold=None, reraise_exception=True, save_current_stack_trace=True): ''' Wraps a function with reporting to errors backend ''' # This if/else allows wrap_function to behave like a normal decorator when # used like: # @wrap_function # def some_func(): # # However, it also allows wrap_function to also be passed keyword arguments # like the following: # @wrap_function(error_threshold=3, reraise_exception=False) # def some_func(): if func: return flawless.client.client._wrap_function_with_error_decorator( func=func, error_threshold=error_threshold, reraise_exception=reraise_exception, save_current_stack_trace=save_current_stack_trace) else: return functools.partial(flawless.client.client._wrap_function_with_error_decorator, error_threshold=error_threshold, reraise_exception=reraise_exception, save_current_stack_trace=save_current_stack_trace)
python
{ "resource": "" }
q274433
wrap_class
test
def wrap_class(cls, error_threshold=None): ''' Wraps a class with reporting to errors backend by decorating each function of the class. Decorators are injected under the classmethod decorator if they exist. ''' methods = inspect.getmembers(cls, inspect.ismethod) + inspect.getmembers(cls, inspect.isfunction) for method_name, method in methods: wrapped_method = flawless.client.client._wrap_function_with_error_decorator( method if not im_self(method) else im_func(method), save_current_stack_trace=False, error_threshold=error_threshold, ) if im_self(method): wrapped_method = classmethod(wrapped_method) setattr(cls, method_name, wrapped_method) return cls
python
{ "resource": "" }
q274434
FlawlessServiceBaseClass._matches_filepath_pattern
test
def _matches_filepath_pattern(self, filepath): '''Given a filepath, and a list of regex patterns, this function returns true if filepath matches any one of those patterns''' if not self.only_blame_patterns: return True for pattern in self.only_blame_patterns: if pattern.match(filepath): return True return False
python
{ "resource": "" }
q274435
FlawlessThriftServiceHandler._get_email
test
def _get_email(self, email): '''Given an email address, check the email_remapping table to see if the email should be sent to a different address. This function also handles overriding the email domain if ignore_vcs_email_domain is set or the domain was missing''' if not email or "@" not in email: return None if email in self.email_remapping.remap: return self.email_remapping.remap[email] prefix, domain = email.split("@", 2) if prefix in self.email_remapping.remap: return self.email_remapping.remap[prefix] if "." not in domain or config.ignore_vcs_email_domain: return "%s@%s" % (prefix, config.email_domain_name) return email
python
{ "resource": "" }
q274436
FlawlessThriftServiceHandler._get_entry
test
def _get_entry(self, entry, entry_tree): '''Helper function for retrieving a particular entry from the prefix trees''' for e in entry_tree[entry.filename]: if entry == e: return e
python
{ "resource": "" }
q274437
markdown_to_reST
test
def markdown_to_reST(text): '''This is not a general purpose converter. Only converts this readme''' # Convert parameters to italics and prepend a newline text = re.sub(pattern=r"\n (\w+) - (.+)\n", repl=r"\n\n *\g<1>* - \g<2>\n", string=text) # Parse [http://url](text), and just leave the url text = re.sub(pattern=r"\[([^\]]+)\]\([^)]+\)", repl=r"\g<1>", string=text) # Disable formatting of numbered lists text = re.sub(pattern=r"\n(\d+). ", repl=r"\n\\\g<1>. ", string=text) return text
python
{ "resource": "" }
q274438
serve
test
def serve(conf_path, storage_factory=None): """This method starts the server. There are two processes, one is an HTTP server that shows and admin interface and the second is a Thrift server that the client code calls. Arguments: `conf_path` - The path to your flawless.cfg file `storage_factory` - You can pass in your own storage class that implements StorageInterface. You must implement storage_cls if you want Flawless to be horizontally scalable, since by default it will just store everything on the local disk. """ flawless.lib.config.init_config(conf_path) # Try and create datadir if it doesn't exist. For instance it might be in /tmp if not os.path.exists(config.data_dir_path): os.makedirs(config.data_dir_path) storage_factory = storage_factory or (lambda partition: DiskStorage(partition=partition)) # Setup root logger root_logger = logging.getLogger() root_handler = logging.handlers.TimedRotatingFileHandler( filename=config.log_file, when='d', interval=1, backupCount=config.log_days_to_keep) root_logger.setLevel(getattr(logging, config.log_level)) root_logger.addHandler(root_handler) child_pid = os.fork() if child_pid == 0: # Setup HTTP server handler = FlawlessWebServiceHandler(storage_factory=storage_factory) server = SimpleThreadedHTTPServer(('', config.http_port), SimpleRequestHTTPHandler) server.attach_service(handler) server.request_queue_size = 50 try: server.serve_forever() except (KeyboardInterrupt, SystemExit): server.server_close() else: # Setup Thrift server handler = FlawlessThriftServiceHandler(storage_factory=storage_factory) processor = Flawless.Processor(handler) transport = TSocket.TServerSocket(port=config.port) tfactory = TTransport.TFramedTransportFactory() pfactory = TBinaryProtocol.TBinaryProtocolFactory() server = TServer.TThreadedServer(processor, transport, tfactory, pfactory) try: server.serve() except (KeyboardInterrupt, SystemExit): handler.errors_seen.sync() transport.close() os.kill(child_pid, signal.SIGINT)
python
{ "resource": "" }
q274439
record_error
test
def record_error(hostname, exc_info, preceding_stack=None, error_threshold=None, additional_info=None): ''' Helper function to record errors to the flawless backend ''' stack = [] exc_type, exc_value, sys_traceback = exc_info while sys_traceback is not None: stack.append(sys_traceback) sys_traceback = sys_traceback.tb_next stack_lines = [] for row in preceding_stack or []: stack_lines.append( api_ttypes.StackLine(filename=os.path.abspath(row[0]), line_number=row[1], function_name=row[2], text=row[3]) ) for index, tb in enumerate(stack): filename = tb.tb_frame.f_code.co_filename func_name = tb.tb_frame.f_code.co_name lineno = tb.tb_lineno line = linecache.getline(filename, lineno, tb.tb_frame.f_globals) frame_locals = None if index >= (len(stack) - NUM_FRAMES_TO_SAVE): # Include some limits on max string length & number of variables to keep things from getting # out of hand frame_locals = dict((k, _myrepr(k, v)) for k, v in list(tb.tb_frame.f_locals.items())[:MAX_LOCALS] if k != "self") if "self" in tb.tb_frame.f_locals and hasattr(tb.tb_frame.f_locals["self"], "__dict__"): frame_locals.update(dict(("self." + k, _myrepr(k, v)) for k, v in list(tb.tb_frame.f_locals["self"].__dict__.items())[:MAX_LOCALS] if k != "self")) stack_lines.append( api_ttypes.StackLine(filename=os.path.abspath(filename), line_number=lineno, function_name=func_name, text=line, frame_locals=frame_locals) ) # Check LRU cache & potentially do not send error report if this client has already reported this error # several times. key = CachedErrorInfo.get_hash_key(stack_lines) info = ERROR_CACHE.get(key) or CachedErrorInfo() info.increment() ERROR_CACHE[key] = info if info.should_report(): error_count = info.mark_reported() _send_request( api_ttypes.RecordErrorRequest( traceback=stack_lines, exception_message=repr(exc_value), exception_type=exc_type.__module__ + "." + exc_type.__name__, hostname=hostname, error_threshold=error_threshold, additional_info=additional_info, error_count=error_count, ) )
python
{ "resource": "" }
q274440
url_to_image
test
def url_to_image(url): """ Fetch an image from url and convert it into a Pillow Image object """ r = requests.get(url) image = StringIO(r.content) return image
python
{ "resource": "" }
q274441
string_to_image
test
def string_to_image(image_string): """ Convert string datas into a Pillow Image object """ image_filelike = StringIO(image_string) image = Image.open(image_filelike) return image
python
{ "resource": "" }
q274442
validate
test
def validate(validator): """ Return a decorator that validates arguments with provided `validator` function. This will also store the validator function as `func.validate`. The decorator returned by this function, can bypass the validator if `validate=False` is passed as argument otherwise the fucntion is called directly. The validator must raise an exception, if the function can not be called. """ def decorator(func): """Bound decorator to a particular validator function""" @wraps(func) def wrapper(image, size, validate=True): if validate: validator(image, size) return func(image, size) return wrapper return decorator
python
{ "resource": "" }
q274443
_is_big_enough
test
def _is_big_enough(image, size): """Check that the image's size superior to `size`""" if (size[0] > image.size[0]) and (size[1] > image.size[1]): raise ImageSizeError(image.size, size)
python
{ "resource": "" }
q274444
_width_is_big_enough
test
def _width_is_big_enough(image, width): """Check that the image width is superior to `width`""" if width > image.size[0]: raise ImageSizeError(image.size[0], width)
python
{ "resource": "" }
q274445
_height_is_big_enough
test
def _height_is_big_enough(image, height): """Check that the image height is superior to `height`""" if height > image.size[1]: raise ImageSizeError(image.size[1], height)
python
{ "resource": "" }
q274446
TaskImporter.parse_category
test
def parse_category(self, item, field_name, source_name): """ Converts the text category to a tasks.Category instance. """ # Get and checks for the corresponding slug slug = category_map.get(self.get_value(item, source_name), None) if not slug: return None # Load the category instance try: return Category.objects.get(slug=slug) except Category.DoesNotExist: pass
python
{ "resource": "" }
q274447
TaskImporter.parse_totals
test
def parse_totals(self, item, field_name, source_name): """ Parse numeric fields. """ val = self.get_value(item, source_name) try: return int(val) except: return 0
python
{ "resource": "" }
q274448
XMLImporter.get_items
test
def get_items(self): """ Iterator of the list of items in the XML source. """ # Use `iterparse`, it's more efficient, specially for big files for event, item in ElementTree.iterparse(self.source): if item.tag == self.item_tag_name: yield item # Releases the item from memory item.clear()
python
{ "resource": "" }
q274449
Importer.save_error
test
def save_error(self, data, exception_info): """ Saves an error in the error list. """ # TODO: what to do with errors? Let it flow? Write to a log file? self.errors.append({'data': data, 'exception': ''.join(format_exception(*exception_info)), })
python
{ "resource": "" }
q274450
Importer.parse
test
def parse(self): """ Parses all data from the source, saving model instances. """ # Checks if the source is loaded if not self.loaded: self.load(self.source) for item in self.get_items(): # Parse the fields from the source into a dict data = self.parse_item(item) # Get the instance from the DB, or a new one instance = self.get_instance(data) # Feed instance with data self.feed_instance(data, instance) # Try to save the instance or keep the error try: self.save_item(item, data, instance) except Exception as e: self.save_error(data, sys.exc_info()) # Unload the source self.unload()
python
{ "resource": "" }
q274451
Importer.parse_item
test
def parse_item(self, item): """ Receives an item and returns a dictionary of field values. """ # Create a dictionary from values for each field parsed_data = {} for field_name in self.fields: # A field-name may be mapped to another identifier on the source, # it could be a XML path or a CSV column name / position. # Defaults to the field-name itself. source_name = self.field_map.get(field_name, field_name) # Uses a custom method "parse_%(field_name)" # or get the value from the item parse = getattr(self, 'parse_%s' % field_name, None) if parse: value = parse(item, field_name, source_name) else: value = self.get_value(item, source_name) # Add the value to the parsed data parsed_data[field_name] = value return parsed_data
python
{ "resource": "" }
q274452
Importer.get_instance
test
def get_instance(self, data): """ Get an item from the database or an empty one if not found. """ # Get unique fields unique_fields = self.unique_fields # If there are no unique fields option, all items are new if not unique_fields: return self.model() # Build the filter filter = dict([(f, data[f]) for f in unique_fields]) # Get the instance from the DB or use a new instance try: instance = self.model._default_manager.get(**filter) except self.model.DoesNotExist: return self.model() return instance
python
{ "resource": "" }
q274453
Importer.save_item
test
def save_item(self, item, data, instance, commit=True): """ Saves a model instance to the database. """ if commit: instance.save() return instance
python
{ "resource": "" }
q274454
download_file
test
def download_file(url, dest): """ Downloads a HTTP resource from `url` and save to `dest`. Capable of dealing with Gzip compressed content. """ # Create the HTTP request request = urllib2.Request(url) # Add the header to accept gzip encoding request.add_header('Accept-encoding', 'gzip') # Open the request opener = urllib2.build_opener() response = opener.open(request) # Retrieve data data = response.read() # If the data is compressed, put the data in a stream and decompress if response.headers.get('content-encoding', '') == 'gzip': stream = StringIO.StringIO(data) gzipper = gzip.GzipFile(fileobj=stream) data = gzipper.read() # Write to a file f = open(dest, 'wb') f.write(data) f.close()
python
{ "resource": "" }
q274455
CSVImporter.load
test
def load(self, source): """ Opens the source file. """ self.source = open(self.source, 'rb') self.loaded = True
python
{ "resource": "" }
q274456
CSVImporter.get_items
test
def get_items(self): """ Iterator to read the rows of the CSV file. """ # Get the csv reader reader = csv.reader(self.source) # Get the headers from the first line headers = reader.next() # Read each line yielding a dictionary mapping # the column headers to the row values for row in reader: # Skip empty rows if not row: continue yield dict(zip(headers, row))
python
{ "resource": "" }
q274457
AutograderSandbox.allow_network_access
test
def allow_network_access(self, value: bool): """ Raises ValueError if this sandbox instance is currently running. """ if self._is_running: raise ValueError( "Cannot change network access settings on a running sandbox") self._allow_network_access = value
python
{ "resource": "" }
q274458
AutograderSandbox.run_command
test
def run_command(self, args: List[str], max_num_processes: int=None, max_stack_size: int=None, max_virtual_memory: int=None, as_root: bool=False, stdin: FileIO=None, timeout: int=None, check: bool=False, truncate_stdout: int=None, truncate_stderr: int=None) -> 'CompletedCommand': """ Runs a command inside the sandbox and returns the results. :param args: A list of strings that specify which command should be run inside the sandbox. :param max_num_processes: The maximum number of processes the command is allowed to spawn. :param max_stack_size: The maximum stack size, in bytes, allowed for the command. :param max_virtual_memory: The maximum amount of memory, in bytes, allowed for the command. :param as_root: Whether to run the command as a root user. :param stdin: A file object to be redirected as input to the command's stdin. If this is None, /dev/null is sent to the command's stdin. :param timeout: The time limit for the command. :param check: Causes CalledProcessError to be raised if the command exits nonzero or times out. :param truncate_stdout: When not None, stdout from the command will be truncated after this many bytes. :param truncate_stderr: When not None, stderr from the command will be truncated after this many bytes. """ cmd = ['docker', 'exec', '-i', self.name, 'cmd_runner.py'] if stdin is None: cmd.append('--stdin_devnull') if max_num_processes is not None: cmd += ['--max_num_processes', str(max_num_processes)] if max_stack_size is not None: cmd += ['--max_stack_size', str(max_stack_size)] if max_virtual_memory is not None: cmd += ['--max_virtual_memory', str(max_virtual_memory)] if timeout is not None: cmd += ['--timeout', str(timeout)] if truncate_stdout is not None: cmd += ['--truncate_stdout', str(truncate_stdout)] if truncate_stderr is not None: cmd += ['--truncate_stderr', str(truncate_stderr)] if not as_root: cmd += ['--linux_user_id', str(self._linux_uid)] cmd += args if self.debug: print('running: {}'.format(cmd), flush=True) with tempfile.TemporaryFile() as f: try: subprocess.run(cmd, stdin=stdin, stdout=f, stderr=subprocess.PIPE, check=True) f.seek(0) json_len = int(f.readline().decode().rstrip()) results_json = json.loads(f.read(json_len).decode()) stdout_len = int(f.readline().decode().rstrip()) stdout = tempfile.NamedTemporaryFile() stdout.write(f.read(stdout_len)) stdout.seek(0) stderr_len = int(f.readline().decode().rstrip()) stderr = tempfile.NamedTemporaryFile() stderr.write(f.read(stderr_len)) stderr.seek(0) result = CompletedCommand(return_code=results_json['return_code'], timed_out=results_json['timed_out'], stdout=stdout, stderr=stderr, stdout_truncated=results_json['stdout_truncated'], stderr_truncated=results_json['stderr_truncated']) if (result.return_code != 0 or results_json['timed_out']) and check: raise subprocess.CalledProcessError( result.return_code, cmd, output=result.stdout, stderr=result.stderr) return result except subprocess.CalledProcessError as e: f.seek(0) print(f.read()) print(e.stderr) raise
python
{ "resource": "" }
q274459
AutograderSandbox.add_files
test
def add_files(self, *filenames: str, owner: str=SANDBOX_USERNAME, read_only: bool=False): """ Copies the specified files into the working directory of this sandbox. The filenames specified can be absolute paths or relative paths to the current working directory. :param owner: The name of a user who should be granted ownership of the newly added files. Must be either autograder_sandbox.SANDBOX_USERNAME or 'root', otherwise ValueError will be raised. :param read_only: If true, the new files' permissions will be set to read-only. """ if owner != SANDBOX_USERNAME and owner != 'root': raise ValueError('Invalid value for parameter "owner": {}'.format(owner)) with tempfile.TemporaryFile() as f, \ tarfile.TarFile(fileobj=f, mode='w') as tar_file: for filename in filenames: tar_file.add(filename, arcname=os.path.basename(filename)) f.seek(0) subprocess.check_call( ['docker', 'cp', '-', self.name + ':' + SANDBOX_WORKING_DIR_NAME], stdin=f) file_basenames = [os.path.basename(filename) for filename in filenames] if owner == SANDBOX_USERNAME: self._chown_files(file_basenames) if read_only: chmod_cmd = ['chmod', '444'] + file_basenames self.run_command(chmod_cmd, as_root=True)
python
{ "resource": "" }
q274460
AutograderSandbox.add_and_rename_file
test
def add_and_rename_file(self, filename: str, new_filename: str) -> None: """ Copies the specified file into the working directory of this sandbox and renames it to new_filename. """ dest = os.path.join( self.name + ':' + SANDBOX_WORKING_DIR_NAME, new_filename) subprocess.check_call(['docker', 'cp', filename, dest]) self._chown_files([new_filename])
python
{ "resource": "" }
q274461
Enrollments.get_enrollments_for_course
test
def get_enrollments_for_course(self, course_id, params={}): """ Return a list of all enrollments for the passed course_id. https://canvas.instructure.com/doc/api/enrollments.html#method.enrollments_api.index """ url = COURSES_API.format(course_id) + "/enrollments" enrollments = [] for datum in self._get_paged_resource(url, params=params): enrollments.append(CanvasEnrollment(data=datum)) return enrollments
python
{ "resource": "" }
q274462
Enrollments.get_enrollments_for_course_by_sis_id
test
def get_enrollments_for_course_by_sis_id(self, sis_course_id, params={}): """ Return a list of all enrollments for the passed course sis id. """ return self.get_enrollments_for_course( self._sis_id(sis_course_id, sis_field="course"), params)
python
{ "resource": "" }
q274463
Enrollments.get_enrollments_for_section
test
def get_enrollments_for_section(self, section_id, params={}): """ Return a list of all enrollments for the passed section_id. https://canvas.instructure.com/doc/api/enrollments.html#method.enrollments_api.index """ url = SECTIONS_API.format(section_id) + "/enrollments" enrollments = [] for datum in self._get_paged_resource(url, params=params): enrollments.append(CanvasEnrollment(data=datum)) return enrollments
python
{ "resource": "" }
q274464
Enrollments.get_enrollments_for_section_by_sis_id
test
def get_enrollments_for_section_by_sis_id(self, sis_section_id, params={}): """ Return a list of all enrollments for the passed section sis id. """ return self.get_enrollments_for_section( self._sis_id(sis_section_id, sis_field="section"), params)
python
{ "resource": "" }
q274465
Enrollments.get_enrollments_for_regid
test
def get_enrollments_for_regid(self, regid, params={}, include_courses=True): """ Return a list of enrollments for the passed user regid. https://canvas.instructure.com/doc/api/enrollments.html#method.enrollments_api.index """ sis_user_id = self._sis_id(regid, sis_field="user") url = USERS_API.format(sis_user_id) + "/enrollments" courses = Courses() if include_courses else None enrollments = [] for datum in self._get_paged_resource(url, params=params): enrollment = CanvasEnrollment(data=datum) if include_courses: course_id = datum["course_id"] course = courses.get_course(course_id) if course.sis_course_id is not None: enrollment.course = course # the following 3 lines are not removed # to be backward compatible. enrollment.course_url = course.course_url enrollment.course_name = course.name enrollment.sis_course_id = course.sis_course_id else: enrollment.course_url = re.sub( r'/users/\d+$', '', enrollment.html_url) enrollments.append(enrollment) return enrollments
python
{ "resource": "" }
q274466
Enrollments.enroll_user
test
def enroll_user(self, course_id, user_id, enrollment_type, params=None): """ Enroll a user into a course. https://canvas.instructure.com/doc/api/enrollments.html#method.enrollments_api.create """ url = COURSES_API.format(course_id) + "/enrollments" if not params: params = {} params["user_id"] = user_id params["type"] = enrollment_type data = self._post_resource(url, {"enrollment": params}) return CanvasEnrollment(data=data)
python
{ "resource": "" }
q274467
Roles.get_roles_in_account
test
def get_roles_in_account(self, account_id, params={}): """ List the roles for an account, for the passed Canvas account ID. https://canvas.instructure.com/doc/api/roles.html#method.role_overrides.api_index """ url = ACCOUNTS_API.format(account_id) + "/roles" roles = [] for datum in self._get_resource(url, params=params): roles.append(CanvasRole(data=datum)) return roles
python
{ "resource": "" }
q274468
Roles.get_roles_by_account_sis_id
test
def get_roles_by_account_sis_id(self, account_sis_id, params={}): """ List the roles for an account, for the passed account SIS ID. """ return self.get_roles_in_account(self._sis_id(account_sis_id, sis_field="account"), params)
python
{ "resource": "" }
q274469
Roles.get_effective_course_roles_in_account
test
def get_effective_course_roles_in_account(self, account_id): """ List all course roles available to an account, for the passed Canvas account ID, including course roles inherited from parent accounts. """ course_roles = [] params = {"show_inherited": "1"} for role in self.get_roles_in_account(account_id, params): if role.base_role_type != "AccountMembership": course_roles.append(role) return course_roles
python
{ "resource": "" }
q274470
Roles.get_role
test
def get_role(self, account_id, role_id): """ Get information about a single role, for the passed Canvas account ID. https://canvas.instructure.com/doc/api/roles.html#method.role_overrides.show """ url = ACCOUNTS_API.format(account_id) + "/roles/{}".format(role_id) return CanvasRole(data=self._get_resource(url))
python
{ "resource": "" }
q274471
Roles.get_role_by_account_sis_id
test
def get_role_by_account_sis_id(self, account_sis_id, role_id): """ Get information about a single role, for the passed account SIS ID. """ return self.get_role(self._sis_id(account_sis_id, sis_field="account"), role_id)
python
{ "resource": "" }
q274472
Courses.get_course
test
def get_course(self, course_id, params={}): """ Return course resource for given canvas course id. https://canvas.instructure.com/doc/api/courses.html#method.courses.show """ include = params.get("include", []) if "term" not in include: include.append("term") params["include"] = include url = COURSES_API.format(course_id) return CanvasCourse(data=self._get_resource(url, params=params))
python
{ "resource": "" }
q274473
Courses.get_course_by_sis_id
test
def get_course_by_sis_id(self, sis_course_id, params={}): """ Return course resource for given sis id. """ return self.get_course(self._sis_id(sis_course_id, sis_field="course"), params)
python
{ "resource": "" }
q274474
Courses.get_courses_in_account
test
def get_courses_in_account(self, account_id, params={}): """ Returns a list of courses for the passed account ID. https://canvas.instructure.com/doc/api/accounts.html#method.accounts.courses_api """ if "published" in params: params["published"] = "true" if params["published"] else "" url = ACCOUNTS_API.format(account_id) + "/courses" courses = [] for data in self._get_paged_resource(url, params=params): courses.append(CanvasCourse(data=data)) return courses
python
{ "resource": "" }
q274475
Courses.get_courses_in_account_by_sis_id
test
def get_courses_in_account_by_sis_id(self, sis_account_id, params={}): """ Return a list of courses for the passed account SIS ID. """ return self.get_courses_in_account( self._sis_id(sis_account_id, sis_field="account"), params)
python
{ "resource": "" }
q274476
Courses.get_published_courses_in_account
test
def get_published_courses_in_account(self, account_id, params={}): """ Return a list of published courses for the passed account ID. """ params["published"] = True return self.get_courses_in_account(account_id, params)
python
{ "resource": "" }
q274477
Courses.get_published_courses_in_account_by_sis_id
test
def get_published_courses_in_account_by_sis_id(self, sis_account_id, params={}): """ Return a list of published courses for the passed account SIS ID. """ return self.get_published_courses_in_account( self._sis_id(sis_account_id, sis_field="account"), params)
python
{ "resource": "" }
q274478
Courses.get_courses_for_regid
test
def get_courses_for_regid(self, regid, params={}): """ Return a list of courses for the passed regid. https://canvas.instructure.com/doc/api/courses.html#method.courses.index """ self._as_user = regid data = self._get_resource("/api/v1/courses", params=params) self._as_user = None courses = [] for datum in data: if "sis_course_id" in datum: courses.append(CanvasCourse(data=datum)) else: courses.append(self.get_course(datum["id"], params)) return courses
python
{ "resource": "" }
q274479
Courses.create_course
test
def create_course(self, account_id, course_name): """ Create a canvas course with the given subaccount id and course name. https://canvas.instructure.com/doc/api/courses.html#method.courses.create """ url = ACCOUNTS_API.format(account_id) + "/courses" body = {"course": {"name": course_name}} return CanvasCourse(data=self._post_resource(url, body))
python
{ "resource": "" }
q274480
Courses.update_sis_id
test
def update_sis_id(self, course_id, sis_course_id): """ Updates the SIS ID for the course identified by the passed course ID. https://canvas.instructure.com/doc/api/courses.html#method.courses.update """ url = COURSES_API.format(course_id) body = {"course": {"sis_course_id": sis_course_id}} return CanvasCourse(data=self._put_resource(url, body))
python
{ "resource": "" }
q274481
Analytics.get_activity_by_account
test
def get_activity_by_account(self, account_id, term_id): """ Returns participation data for the given account_id and term_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.department_participation """ url = ("/api/v1/accounts/sis_account_id:%s/analytics/" "terms/sis_term_id:%s/activity.json") % (account_id, term_id) return self._get_resource(url)
python
{ "resource": "" }
q274482
Analytics.get_grades_by_account
test
def get_grades_by_account(self, account_id, term_id): """ Returns grade data for the given account_id and term_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.department_grades """ url = ("/api/v1/accounts/sis_account_id:%s/analytics/" "terms/sis_term_id:%s/grades.json") % (account_id, term_id) return self._get_resource(url)
python
{ "resource": "" }
q274483
Analytics.get_statistics_by_account
test
def get_statistics_by_account(self, account_id, term_id): """ Returns statistics for the given account_id and term_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.department_statistics """ url = ("/api/v1/accounts/sis_account_id:%s/analytics/" "terms/sis_term_id:%s/statistics.json") % (account_id, term_id) return self._get_resource(url)
python
{ "resource": "" }
q274484
Analytics.get_activity_by_sis_course_id
test
def get_activity_by_sis_course_id(self, sis_course_id): """ Returns participation data for the given sis_course_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.course_participation """ url = "/api/v1/courses/%s/analytics/activity.json" % ( self._sis_id(sis_course_id, sis_field="course")) return self._get_resource(url)
python
{ "resource": "" }
q274485
Analytics.get_assignments_by_sis_course_id
test
def get_assignments_by_sis_course_id(self, sis_course_id): """ Returns assignment data for the given course_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.course_assignments """ url = "/api/v1/courses/%s/analytics/assignments.json" % ( self._sis_id(sis_course_id, sis_field="course")) return self._get_resource(url)
python
{ "resource": "" }
q274486
Analytics.get_student_summaries_by_sis_course_id
test
def get_student_summaries_by_sis_course_id(self, sis_course_id): """ Returns per-student data for the given course_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.course_student_summaries """ url = "/api/v1/courses/%s/analytics/student_summaries.json" % ( self._sis_id(sis_course_id, sis_field="course")) return self._get_resource(url)
python
{ "resource": "" }
q274487
Analytics.get_student_activity_for_sis_course_id_and_sis_user_id
test
def get_student_activity_for_sis_course_id_and_sis_user_id( self, sis_user_id, sis_course_id): """ Returns student activity data for the given user_id and course_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.student_in_course_participation """ url = ("/api/v1/courses/%s/analytics/users/" "sis_user_id:%s/activity.json") % ( self._sis_id(sis_course_id, sis_field="course"), sis_user_id) return self._get_resource(url)
python
{ "resource": "" }
q274488
Analytics.get_student_messaging_for_sis_course_id_and_sis_user_id
test
def get_student_messaging_for_sis_course_id_and_sis_user_id( self, sis_user_id, sis_course_id): """ Returns student messaging data for the given user_id and course_id. https://canvas.instructure.com/doc/api/analytics.html#method.analytics_api.student_in_course_messaging """ url = ("/api/v1/courses/%s/analytics/" "users/sis_user_id:%s/communication.json") % ( self._sis_id(sis_course_id, sis_field="course"), sis_user_id) return self._get_resource(url)
python
{ "resource": "" }
q274489
ExternalTools.get_external_tools_in_account
test
def get_external_tools_in_account(self, account_id, params={}): """ Return external tools for the passed canvas account id. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index """ url = ACCOUNTS_API.format(account_id) + "/external_tools" external_tools = [] for data in self._get_paged_resource(url, params=params): external_tools.append(data) return external_tools
python
{ "resource": "" }
q274490
ExternalTools.get_external_tools_in_course
test
def get_external_tools_in_course(self, course_id, params={}): """ Return external tools for the passed canvas course id. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.index """ url = COURSES_API.format(course_id) + "/external_tools" external_tools = [] for data in self._get_paged_resource(url, params=params): external_tools.append(data) return external_tools
python
{ "resource": "" }
q274491
ExternalTools._create_external_tool
test
def _create_external_tool(self, context, context_id, json_data): """ Create an external tool using the passed json_data. context is either COURSES_API or ACCOUNTS_API. context_id is the Canvas course_id or account_id, depending on context. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.create """ url = context.format(context_id) + "/external_tools" return self._post_resource(url, body=json_data)
python
{ "resource": "" }
q274492
ExternalTools._update_external_tool
test
def _update_external_tool(self, context, context_id, external_tool_id, json_data): """ Update the external tool identified by external_tool_id with the passed json data. context is either COURSES_API or ACCOUNTS_API. context_id is the course_id or account_id, depending on context https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.update """ url = context.format(context_id) + "/external_tools/{}".format( external_tool_id) return self._put_resource(url, body=json_data)
python
{ "resource": "" }
q274493
ExternalTools._delete_external_tool
test
def _delete_external_tool(self, context, context_id, external_tool_id): """ Delete the external tool identified by external_tool_id. context is either COURSES_API or ACCOUNTS_API. context_id is the course_id or account_id, depending on context https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.destroy """ url = context.format(context_id) + "/external_tools/{}".format( external_tool_id) response = self._delete_resource(url) return True
python
{ "resource": "" }
q274494
check_required
test
def check_required(obj, required_parameters): """ Check if a parameter is available on an object :param obj: Object :param required_parameters: list of parameters :return: """ for parameter in required_parameters: if not hasattr(obj, parameter) or getattr(obj, parameter) is None: raise DesignError("parameter '%s' must be set for '%s' object." % (parameter, obj.base_type))
python
{ "resource": "" }
q274495
Users.get_user
test
def get_user(self, user_id): """ Returns user profile data. https://canvas.instructure.com/doc/api/users.html#method.profile.settings """ url = USERS_API.format(user_id) + "/profile" return CanvasUser(data=self._get_resource(url))
python
{ "resource": "" }
q274496
Users.get_users_for_course
test
def get_users_for_course(self, course_id, params={}): """ Returns a list of users for the given course id. """ url = COURSES_API.format(course_id) + "/users" data = self._get_paged_resource(url, params=params) users = [] for datum in data: users.append(CanvasUser(data=datum)) return users
python
{ "resource": "" }
q274497
Users.get_users_for_sis_course_id
test
def get_users_for_sis_course_id(self, sis_course_id, params={}): """ Returns a list of users for the given sis course id. """ return self.get_users_for_course( self._sis_id(sis_course_id, sis_field="course"), params)
python
{ "resource": "" }
q274498
Users.create_user
test
def create_user(self, user, account_id=None): """ Create and return a new user and pseudonym for an account. https://canvas.instructure.com/doc/api/users.html#method.users.create """ if account_id is None: account_id = self._canvas_account_id if account_id is None: raise MissingAccountID() url = ACCOUNTS_API.format(account_id) + "/users" data = self._post_resource(url, user.post_data()) return CanvasUser(data=data)
python
{ "resource": "" }
q274499
Users.get_user_logins
test
def get_user_logins(self, user_id, params={}): """ Return a user's logins for the given user_id. https://canvas.instructure.com/doc/api/logins.html#method.pseudonyms.index """ url = USERS_API.format(user_id) + "/logins" data = self._get_paged_resource(url, params=params) logins = [] for login_data in data: logins.append(Login(data=login_data)) return logins
python
{ "resource": "" }