_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q269200
cook_refs
test
def cook_refs(refs, n=4): '''Takes a list of reference sentences for a single segment and returns an object that encapsulates everything that BLEU needs to know about them.''' refs = [normalize(ref) for ref in refs] maxcounts = {} for ref in refs: counts = count_ngrams(ref, n) for (ngram,count) in list(counts.items()): maxcounts[ngram] = max(maxcounts.get(ngram,0), count) return ([len(ref) for ref in refs], maxcounts)
python
{ "resource": "" }
q269201
cook_ref_set
test
def cook_ref_set(ref, n=4): '''Takes a reference sentences for a single segment and returns an object that encapsulates everything that BLEU needs to know about them. Also provides a set cause bleualign wants it''' ref = normalize(ref) counts = count_ngrams(ref, n) return (len(ref), counts, frozenset(counts))
python
{ "resource": "" }
q269202
erfcc
test
def erfcc(x): """Complementary error function.""" z = abs(x) t = 1 / (1 + 0.5 * z) r = t * math.exp(-z * z - 1.26551223 + t * (1.00002368 + t * (.37409196 + t * (.09678418 + t * (-.18628806 + t * (.27886807 + t * (-1.13520398 + t * (1.48851587 + t * (-.82215223 + t * .17087277))))))))) if (x >= 0.): return r else: return 2. - r
python
{ "resource": "" }
q269203
align_texts
test
def align_texts(source_blocks, target_blocks, params = LanguageIndependent): """Creates the sentence alignment of two texts. Texts can consist of several blocks. Block boundaries cannot be crossed by sentence alignment links. Each block consists of a list that contains the lengths (in characters) of the sentences in this block. @param source_blocks: The list of blocks in the source text. @param target_blocks: The list of blocks in the target text. @param params: the sentence alignment parameters. @returns: A list of sentence alignment lists """ if len(source_blocks) != len(target_blocks): raise ValueError("Source and target texts do not have the same number of blocks.") return [align_blocks(source_block, target_block, params) for source_block, target_block in zip(source_blocks, target_blocks)]
python
{ "resource": "" }
q269204
get_descriptors_in_module
test
def get_descriptors_in_module(mdl, submodule=True): r"""Get descriptors in module. Parameters: mdl(module): module to search submodule(bool): search recursively Returns: Iterator[Descriptor] """ __all__ = getattr(mdl, "__all__", None) if __all__ is None: __all__ = dir(mdl) all_values = (getattr(mdl, name) for name in __all__ if name[:1] != "_") if submodule: for v in all_values: if is_descriptor_class(v): yield v if isinstance(v, ModuleType): for v in get_descriptors_in_module(v, submodule=True): yield v else: for v in all_values: if is_descriptor_class(v): yield v
python
{ "resource": "" }
q269205
Calculator.register_json
test
def register_json(self, obj): """Register Descriptors from json descriptor objects. Parameters: obj(list or dict): descriptors to register """ if not isinstance(obj, list): obj = [obj] self.register(Descriptor.from_json(j) for j in obj)
python
{ "resource": "" }
q269206
Calculator.register
test
def register(self, desc, version=None, ignore_3D=False): r"""Register descriptors. Descriptor-like: * Descriptor instance: self * Descriptor class: use Descriptor.preset() method * module: use Descriptor-likes in module * Iterable: use Descriptor-likes in Iterable Parameters: desc(Descriptor-like): descriptors to register version(str): version ignore_3D(bool): ignore 3D descriptors """ if version is None: version = __version__ version = StrictVersion(version) return self._register(desc, version, ignore_3D)
python
{ "resource": "" }
q269207
Calculator.echo
test
def echo(self, s, file=sys.stdout, end="\n"): """Output message. Parameters: s(str): message to output file(file-like): output to end(str): end mark of message Return: None """ p = getattr(self, "_progress_bar", None) if p is not None: p.write(s, file=file, end="\n") return print(s, file=file, end="\n")
python
{ "resource": "" }
q269208
is_descriptor_class
test
def is_descriptor_class(desc, include_abstract=False): r"""Check calculatable descriptor class or not. Returns: bool """ return ( isinstance(desc, type) and issubclass(desc, Descriptor) and (True if include_abstract else not inspect.isabstract(desc)) )
python
{ "resource": "" }
q269209
Descriptor.to_json
test
def to_json(self): """Convert to json serializable dictionary. Returns: dict: dictionary of descriptor """ d, ps = self._to_json() if len(ps) == 0: return {"name": d} else: return {"name": d, "args": ps}
python
{ "resource": "" }
q269210
Descriptor.coord
test
def coord(self): """Get 3D coordinate. Returns: numpy.array[3, N]: coordinate matrix """ if not self.require_3D: self.fail(AttributeError("use 3D coordinate in 2D descriptor")) return self._context.get_coord(self)
python
{ "resource": "" }
q269211
SurfaceArea.atomic_sa
test
def atomic_sa(self, i): r"""Calculate atomic surface area. :type i: int :param i: atom index :rtype: float """ sa = 4.0 * np.pi * self.rads2[i] neighbors = self.neighbors.get(i) if neighbors is None: return sa XYZi = self.xyzs[i, np.newaxis].T sphere = self.sphere * self.rads[i] + XYZi N = sphere.shape[1] for j, _ in neighbors: XYZj = self.xyzs[j, np.newaxis].T d2 = (sphere - XYZj) ** 2 mask = (d2[0] + d2[1] + d2[2]) > self.rads2[j] sphere = np.compress(mask, sphere, axis=1) return sa * sphere.shape[1] / N
python
{ "resource": "" }
q269212
SurfaceArea.surface_area
test
def surface_area(self): r"""Calculate all atomic surface area. :rtype: [float] """ return [self.atomic_sa(i) for i in range(len(self.rads))]
python
{ "resource": "" }
q269213
SurfaceArea.from_mol
test
def from_mol(cls, mol, conformer=-1, solvent_radius=1.4, level=4): r"""Construct SurfaceArea from rdkit Mol type. :type mol: rdkit.Chem.Mol :param mol: input molecule :type conformer: int :param conformer: conformer id :type solvent_radius: float :param solvent_radius: solvent radius :type level: int :param level: mesh level :rtype: SurfaceArea """ rs = atoms_to_numpy(lambda a: vdw_radii[a.GetAtomicNum()] + solvent_radius, mol) conf = mol.GetConformer(conformer) ps = np.array([list(conf.GetAtomPosition(i)) for i in range(mol.GetNumAtoms())]) return cls(rs, ps, level)
python
{ "resource": "" }
q269214
_Descriptor_from_json
test
def _Descriptor_from_json(self, obj): """Create Descriptor instance from json dict. Parameters: obj(dict): descriptor dict Returns: Descriptor: descriptor """ descs = getattr(self, "_all_descriptors", None) if descs is None: from mordred import descriptors descs = { cls.__name__: cls for cls in get_descriptors_in_module(descriptors) } descs[ConstDescriptor.__name__] = ConstDescriptor self._all_descriptors = descs return _from_json(obj, descs)
python
{ "resource": "" }
q269215
Result.fill_missing
test
def fill_missing(self, value=np.nan): r"""Replace missing value to "value". Parameters: value: value that missing value is replaced Returns: Result """ return self.__class__( self.mol, [(value if is_missing(v) else v) for v in self.values()], self.keys(), )
python
{ "resource": "" }
q269216
Result.drop_missing
test
def drop_missing(self): r"""Delete missing value. Returns: Result """ newvalues = [] newdescs = [] for d, v in self.items(): if not is_missing(v): newvalues.append(v) newdescs.append(d) return self.__class__(self.mol, newvalues, newdescs)
python
{ "resource": "" }
q269217
Result.items
test
def items(self): r"""Get items. Returns: Iterable[(Descriptor, value)] """ return ((k, v) for k, v in zip(self.keys(), self.values()))
python
{ "resource": "" }
q269218
Result.asdict
test
def asdict(self, rawkey=False): r"""Convert Result to dict. Parameters: rawkey(bool): * True: dict key is Descriptor instance * False: dict key is str Returns: dict """ if rawkey: return dict(self.items()) else: return { str(k): v for k, v in self.items() }
python
{ "resource": "" }
q269219
Result.name
test
def name(self): r"""Access descriptor value by descriptor name or instance. >>> from mordred import Calculator, descriptors >>> from rdkit import Chem >>> result = Calculator(descriptors)(Chem.MolFromSmiles("C1CCCCC1")) >>> result.name["C2SP3"] 6 """ if self._name_to_value is None: self._name_to_value = {str(d): v for d, v in zip(self._descriptors, self._values)} return GetValueByName(self._name_to_value)
python
{ "resource": "" }
q269220
log_calls
test
def log_calls(func): '''Decorator to log function calls.''' def wrapper(*args, **kargs): callStr = "%s(%s)" % (func.__name__, ", ".join([repr(p) for p in args] + ["%s=%s" % (k, repr(v)) for (k, v) in list(kargs.items())])) debug(">> %s", callStr) ret = func(*args, **kargs) debug("<< %s: %s", callStr, repr(ret)) return ret return wrapper
python
{ "resource": "" }
q269221
synchronized
test
def synchronized(func): '''Decorator to synchronize function.''' func.__lock__ = threading.Lock() def synced_func(*args, **kargs): with func.__lock__: return func(*args, **kargs) return synced_func
python
{ "resource": "" }
q269222
progress
test
def progress(msg, *args): '''Show current progress message to stderr. This function will remember the previous message so that next time, it will clear the previous message before showing next one. ''' # Don't show any progress if the output is directed to a file. if not (sys.stdout.isatty() and sys.stderr.isatty()): return text = (msg % args) if progress.prev_message: sys.stderr.write(' ' * len(progress.prev_message) + '\r') sys.stderr.write(text + '\r') progress.prev_message = text
python
{ "resource": "" }
q269223
message
test
def message(msg, *args): '''Program message output.''' clear_progress() text = (msg % args) sys.stdout.write(text + '\n')
python
{ "resource": "" }
q269224
fail
test
def fail(message, exc_info=None, status=1, stacktrace=False): '''Utility function to handle runtime failures gracefully. Show concise information if possible, then terminate program. ''' text = message if exc_info: text += str(exc_info) error(text) if stacktrace: error(traceback.format_exc()) clean_tempfiles() if __name__ == '__main__': sys.exit(status) else: raise RuntimeError(status)
python
{ "resource": "" }
q269225
tempfile_get
test
def tempfile_get(target): '''Get a temp filename for atomic download.''' fn = '%s-%s.tmp' % (target, ''.join(random.Random().sample("0123456789abcdefghijklmnopqrstuvwxyz", 15))) TEMP_FILES.add(fn) return fn
python
{ "resource": "" }
q269226
tempfile_set
test
def tempfile_set(tempfile, target): '''Atomically rename and clean tempfile''' if target: os.rename(tempfile, target) else: os.unlink(tempfile) if target in TEMP_FILES: TEMP_FILES.remove(tempfile)
python
{ "resource": "" }
q269227
clean_tempfiles
test
def clean_tempfiles(): '''Clean up temp files''' for fn in TEMP_FILES: if os.path.exists(fn): os.unlink(fn)
python
{ "resource": "" }
q269228
S3URL.get_fixed_path
test
def get_fixed_path(self): '''Get the fixed part of the path without wildcard''' pi = self.path.split(PATH_SEP) fi = [] for p in pi: if '*' in p or '?' in p: break fi.append(p) return PATH_SEP.join(fi)
python
{ "resource": "" }
q269229
BotoClient.get_legal_params
test
def get_legal_params(self, method): '''Given a API name, list all legal parameters using boto3 service model.''' if method not in self.client.meta.method_to_api_mapping: # Injected methods. Ignore. return [] api = self.client.meta.method_to_api_mapping[method] shape = self.client.meta.service_model.operation_model(api).input_shape if shape is None: # No params needed for this API. return [] return shape.members.keys()
python
{ "resource": "" }
q269230
BotoClient.merge_opt_params
test
def merge_opt_params(self, method, kargs): '''Combine existing parameters with extra options supplied from command line options. Carefully merge special type of parameter if needed. ''' for key in self.legal_params[method]: if not hasattr(self.opt, key) or getattr(self.opt, key) is None: continue if key in kargs and type(kargs[key]) == dict: assert(type(getattr(self.opt, key)) == dict) # Merge two dictionaries. for k, v in getattr(self.opt, key).iteritems(): kargs[key][k] = v else: # Overwrite values. kargs[key] = getattr(self.opt, key) return kargs
python
{ "resource": "" }
q269231
BotoClient.add_options
test
def add_options(parser): '''Add the whole list of API parameters into optparse.''' for param, param_type, param_doc in BotoClient.EXTRA_CLIENT_PARAMS: parser.add_option('--API-' + param, help=param_doc, type=param_type, dest=param)
python
{ "resource": "" }
q269232
TaskQueue.terminate
test
def terminate(self, exc_info=None): '''Terminate all threads by deleting the queue and forcing the child threads to quit. ''' if exc_info: self.exc_info = exc_info try: while self.get_nowait(): self.task_done() except Queue.Empty: pass
python
{ "resource": "" }
q269233
ThreadPool.add_task
test
def add_task(self, func_name, *args, **kargs): '''Utility function to add a single task into task queue''' self.tasks.put((func_name, 0, args, kargs))
python
{ "resource": "" }
q269234
ThreadPool.join
test
def join(self): '''Utility function to wait all tasks to complete''' self.tasks.join() # Force each thread to break loop. for worker in self.workers: self.tasks.put(None) # Wait for all thread to terminate. for worker in self.workers: worker.join() worker.s3 = None
python
{ "resource": "" }
q269235
ThreadPool.processed
test
def processed(self): '''Increase the processed task counter and show progress message''' self.processed_tasks += 1 qsize = self.tasks.qsize() if qsize > 0: progress('[%d task(s) completed, %d remaining, %d thread(s)]', self.processed_tasks, qsize, len(self.workers)) else: progress('[%d task(s) completed, %d thread(s)]', self.processed_tasks, len(self.workers))
python
{ "resource": "" }
q269236
S3Handler.s3_keys_from_env
test
def s3_keys_from_env(): '''Retrieve S3 access keys from the environment, or None if not present.''' env = os.environ if S3_ACCESS_KEY_NAME in env and S3_SECRET_KEY_NAME in env: keys = (env[S3_ACCESS_KEY_NAME], env[S3_SECRET_KEY_NAME]) debug("read S3 keys from environment") return keys else: return None
python
{ "resource": "" }
q269237
S3Handler.s3_keys_from_cmdline
test
def s3_keys_from_cmdline(opt): '''Retrieve S3 access keys from the command line, or None if not present.''' if opt.access_key != None and opt.secret_key != None: keys = (opt.access_key, opt.secret_key) debug("read S3 keys from commandline") return keys else: return None
python
{ "resource": "" }
q269238
S3Handler.s3_keys_from_s3cfg
test
def s3_keys_from_s3cfg(opt): '''Retrieve S3 access key settings from s3cmd's config file, if present; otherwise return None.''' try: if opt.s3cfg != None: s3cfg_path = "%s" % opt.s3cfg else: s3cfg_path = "%s/.s3cfg" % os.environ["HOME"] if not os.path.exists(s3cfg_path): return None config = ConfigParser.ConfigParser() config.read(s3cfg_path) keys = config.get("default", "access_key"), config.get("default", "secret_key") debug("read S3 keys from %s file", s3cfg_path) return keys except Exception as e: info("could not read S3 keys from %s file; skipping (%s)", s3cfg_path, e) return None
python
{ "resource": "" }
q269239
S3Handler.init_s3_keys
test
def init_s3_keys(opt): '''Initialize s3 access keys from environment variable or s3cfg config file.''' S3Handler.S3_KEYS = S3Handler.s3_keys_from_cmdline(opt) or S3Handler.s3_keys_from_env() \ or S3Handler.s3_keys_from_s3cfg(opt)
python
{ "resource": "" }
q269240
S3Handler.connect
test
def connect(self): '''Connect to S3 storage''' try: if S3Handler.S3_KEYS: self.s3 = BotoClient(self.opt, S3Handler.S3_KEYS[0], S3Handler.S3_KEYS[1]) else: self.s3 = BotoClient(self.opt) except Exception as e: raise RetryFailure('Unable to connect to s3: %s' % e)
python
{ "resource": "" }
q269241
S3Handler.list_buckets
test
def list_buckets(self): '''List all buckets''' result = [] for bucket in self.s3.list_buckets().get('Buckets') or []: result.append({ 'name': S3URL.combine('s3', bucket['Name'], ''), 'is_dir': True, 'size': 0, 'last_modified': bucket['CreationDate'] }) return result
python
{ "resource": "" }
q269242
S3Handler.s3walk
test
def s3walk(self, basedir, show_dir=None): '''Walk through a S3 directory. This function initiate a walk with a basedir. It also supports multiple wildcards. ''' # Provide the default value from command line if no override. if not show_dir: show_dir = self.opt.show_dir # trailing slash normalization, this is for the reason that we want # ls 's3://foo/bar/' has the same result as 's3://foo/bar'. Since we # call partial_match() to check wildcards, we need to ensure the number # of slashes stays the same when we do this. if basedir[-1] == PATH_SEP: basedir = basedir[0:-1] s3url = S3URL(basedir) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() # automatic directory detection if not show_dir and len(result) == 1 and result[0]['is_dir']: path = result[0]['name'] s3url = S3URL(path) result = [] pool = ThreadPool(ThreadUtil, self.opt) pool.s3walk(s3url, s3url.get_fixed_path(), s3url.path, result) pool.join() def compare(x, y): '''Comparator for ls output''' result = -cmp(x['is_dir'], y['is_dir']) if result != 0: return result return cmp(x['name'], y['name']) return sorted(result, key=cmp_to_key(compare))
python
{ "resource": "" }
q269243
S3Handler.local_walk
test
def local_walk(self, basedir): '''Walk through local directories from root basedir''' result = [] for root, dirs, files in os.walk(basedir): for f in files: result.append(os.path.join(root, f)) return result
python
{ "resource": "" }
q269244
S3Handler.source_expand
test
def source_expand(self, source): '''Expand the wildcards for an S3 path. This emulates the shall expansion for wildcards if the input is local path. ''' result = [] if not isinstance(source, list): source = [source] for src in source: # XXX Hacky: We need to disable recursive when we expand the input # parameters, need to pass this as an override parameter if # provided. tmp = self.opt.recursive self.opt.recursive = False result += [f['name'] for f in self.s3walk(src, True)] self.opt.recursive = tmp if (len(result) == 0) and (not self.opt.ignore_empty_source): fail("[Runtime Failure] Source doesn't exist.") return result
python
{ "resource": "" }
q269245
S3Handler.put_single_file
test
def put_single_file(self, pool, source, target): '''Upload a single file or a directory by adding a task into queue''' if os.path.isdir(source): if self.opt.recursive: for f in (f for f in self.local_walk(source) if not os.path.isdir(f)): target_url = S3URL(target) # deal with ./ or ../ here by normalizing the path. joined_path = os.path.normpath(os.path.join(target_url.path, os.path.relpath(f, source))) pool.upload(f, S3URL.combine('s3', target_url.bucket, joined_path)) else: message('omitting directory "%s".' % source) else: pool.upload(source, target)
python
{ "resource": "" }
q269246
S3Handler.put_files
test
def put_files(self, source, target): '''Upload files to S3. This function can handle multiple file upload if source is a list. Also, it works for recursive mode which copy all files and keep the directory structure under the given source directory. ''' pool = ThreadPool(ThreadUtil, self.opt) if not isinstance(source, list): source = [source] if target[-1] == PATH_SEP: for src in source: self.put_single_file(pool, src, os.path.join(target, self.get_basename(src))) else: if len(source) == 1: self.put_single_file(pool, source[0], target) else: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) pool.join()
python
{ "resource": "" }
q269247
S3Handler.create_bucket
test
def create_bucket(self, source): '''Use the create_bucket API to create a new bucket''' s3url = S3URL(source) message('Creating %s', source) if not self.opt.dry_run: resp = self.s3.create_bucket(Bucket=s3url.bucket) if resp['ResponseMetadata']["HTTPStatusCode"] == 200: message('Done.') else: raise Failure('Unable to create bucket %s' % source)
python
{ "resource": "" }
q269248
S3Handler.update_privilege
test
def update_privilege(self, obj, target): '''Get privileges from metadata of the source in s3, and apply them to target''' if 'privilege' in obj['Metadata']: os.chmod(target, int(obj['Metadata']['privilege'], 8))
python
{ "resource": "" }
q269249
S3Handler.print_files
test
def print_files(self, source): '''Print out a series of files''' sources = self.source_expand(source) for source in sources: s3url = S3URL(source) response = self.s3.get_object(Bucket=s3url.bucket, Key=s3url.path) message('%s', response['Body'].read())
python
{ "resource": "" }
q269250
S3Handler.get_single_file
test
def get_single_file(self, pool, source, target): '''Download a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.download(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath))) else: message('omitting directory "%s".' % source) else: pool.download(source, target)
python
{ "resource": "" }
q269251
S3Handler.get_files
test
def get_files(self, source, target): '''Download files. This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by download all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if os.path.isdir(target): for src in source: self.get_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path))) else: if len(source) > 1: raise Failure('Target "%s" is not a directory.' % target) # Get file if it exists on s3 otherwise do nothing elif len(source) == 1: self.get_single_file(pool, source[0], target) else: #Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
python
{ "resource": "" }
q269252
S3Handler.cp_single_file
test
def cp_single_file(self, pool, source, target, delete_source): '''Copy a single file or a directory by adding a task into queue''' if source[-1] == PATH_SEP: if self.opt.recursive: basepath = S3URL(source).path for f in (f for f in self.s3walk(source) if not f['is_dir']): pool.copy(f['name'], os.path.join(target, os.path.relpath(S3URL(f['name']).path, basepath)), delete_source=delete_source) else: message('omitting directory "%s".' % source) else: pool.copy(source, target, delete_source=delete_source)
python
{ "resource": "" }
q269253
S3Handler.cp_files
test
def cp_files(self, source, target, delete_source=False): '''Copy files This function can handle multiple files if source S3 URL has wildcard characters. It also handles recursive mode by copying all files and keep the directory structure. ''' pool = ThreadPool(ThreadUtil, self.opt) source = self.source_expand(source) if target[-1] == PATH_SEP: for src in source: self.cp_single_file(pool, src, os.path.join(target, self.get_basename(S3URL(src).path)), delete_source) else: if len(source) > 1: raise Failure('Target "%s" is not a directory (with a trailing slash).' % target) # Copy file if it exists otherwise do nothing elif len(source) == 1: self.cp_single_file(pool, source[0], target, delete_source) else: # Source expand may return empty list only if ignore-empty-source is set to true pass pool.join()
python
{ "resource": "" }
q269254
S3Handler.del_files
test
def del_files(self, source): '''Delete files on S3''' src_files = [] for obj in self.s3walk(source): if not obj['is_dir']: # ignore directories src_files.append(obj['name']) pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(src_files) pool.join()
python
{ "resource": "" }
q269255
S3Handler.relative_dir_walk
test
def relative_dir_walk(self, dir): '''Generic version of directory walk. Return file list without base path for comparison. ''' result = [] if S3URL.is_valid(dir): basepath = S3URL(dir).path for f in (f for f in self.s3walk(dir) if not f['is_dir']): result.append(os.path.relpath(S3URL(f['name']).path, basepath)) else: for f in (f for f in self.local_walk(dir) if not os.path.isdir(f)): result.append(os.path.relpath(f, dir)) return result
python
{ "resource": "" }
q269256
S3Handler.dsync_files
test
def dsync_files(self, source, target): '''Sync directory to directory.''' src_s3_url = S3URL.is_valid(source) dst_s3_url = S3URL.is_valid(target) source_list = self.relative_dir_walk(source) if len(source_list) == 0 or '.' in source_list: raise Failure('Sync command need to sync directory to directory.') sync_list = [(os.path.join(source, f), os.path.join(target, f)) for f in source_list] pool = ThreadPool(ThreadUtil, self.opt) if src_s3_url and not dst_s3_url: for src, dest in sync_list: pool.download(src, dest) elif not src_s3_url and dst_s3_url: for src, dest in sync_list: pool.upload(src, dest) elif src_s3_url and dst_s3_url: for src, dest in sync_list: pool.copy(src, dest) else: raise InvalidArgument('Cannot sync two local directories.') pool.join() if self.opt.delete_removed: target_list = self.relative_dir_walk(target) remove_list = [os.path.join(target, f) for f in (set(target_list) - set(source_list))] if S3URL.is_valid(target): pool = ThreadPool(ThreadUtil, self.opt) pool.batch_delete(remove_list) pool.join() else: for f in remove_list: try: os.unlink(f) message('Delete %s', f) except: pass
python
{ "resource": "" }
q269257
LocalMD5Cache.file_hash
test
def file_hash(self, filename, block_size=2**20): '''Calculate MD5 hash code for a local file''' m = hashlib.md5() with open(filename, 'rb') as f: while True: data = f.read(block_size) if not data: break m.update(data) return m.hexdigest()
python
{ "resource": "" }
q269258
LocalMD5Cache.get_md5
test
def get_md5(self): '''Get or calculate MD5 value of the local file.''' if self.md5 is None: self.md5 = self.file_hash(self.filename) return self.md5
python
{ "resource": "" }
q269259
ThreadUtil.mkdirs
test
def mkdirs(self, target): '''Ensure all directories are created for a given target file.''' path = os.path.dirname(target) if path and path != PATH_SEP and not os.path.isdir(path): # Multi-threading means there will be intervleaved execution # between the check and creation of the directory. try: os.makedirs(path) except OSError as ose: if ose.errno != errno.EEXIST: raise Failure('Unable to create directory (%s)' % (path,))
python
{ "resource": "" }
q269260
ThreadUtil.sync_check
test
def sync_check(self, md5cache, remoteKey): '''Check MD5 for a local file and a remote file. Return True if they have the same md5 hash, otherwise False. ''' if not remoteKey: return False if not os.path.exists(md5cache.filename): return False localmd5 = md5cache.get_md5() # check multiple md5 locations return ('ETag' in remoteKey and remoteKey['ETag'] == '"%s"' % localmd5) or \ ('md5' in remoteKey and remoteKey['md5'] == localmd5) or \ ('md5' in remoteKey['Metadata'] and remoteKey['Metadata']['md5'] == localmd5)
python
{ "resource": "" }
q269261
ThreadUtil.partial_match
test
def partial_match(self, path, filter_path): '''Partially match a path and a filter_path with wildcards. This function will return True if this path partially match a filter path. This is used for walking through directories with multiple level wildcard. ''' if not path or not filter_path: return True # trailing slash normalization if path[-1] == PATH_SEP: path = path[0:-1] if filter_path[-1] == PATH_SEP: filter_path += '*' pi = path.split(PATH_SEP) fi = filter_path.split(PATH_SEP) # Here, if we are in recursive mode, we allow the pi to be longer than fi. # Otherwise, length of pi should be equal or less than the lenght of fi. min_len = min(len(pi), len(fi)) matched = fnmatch.fnmatch(PATH_SEP.join(pi[0:min_len]), PATH_SEP.join(fi[0:min_len])) return matched and (self.opt.recursive or len(pi) <= len(fi))
python
{ "resource": "" }
q269262
ThreadUtil.s3walk
test
def s3walk(self, s3url, s3dir, filter_path, result): '''Thread worker for s3walk. Recursively walk into all subdirectories if they still match the filter path partially. ''' paginator = self.s3.get_paginator('list_objects') filter_path_level = filter_path.count(PATH_SEP) for page in paginator.paginate(Bucket=s3url.bucket, Prefix=s3dir, Delimiter=PATH_SEP, PaginationConfig={'PageSize': 1000}): # Get subdirectories first. for obj in page.get('CommonPrefixes') or []: obj_name = obj['Prefix'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or (obj_name.count(PATH_SEP) != filter_path_level + 1): self.pool.s3walk(s3url, obj_name, filter_path, result) else: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': True, 'size': 0, 'last_modified': None }) # Then get all items in this folder. for obj in page.get('Contents') or []: obj_name = obj['Key'] if not self.partial_match(obj_name, filter_path): continue if self.opt.recursive or obj_name.count(PATH_SEP) == filter_path_level: self.conditional(result, { 'name': S3URL.combine(s3url.proto, s3url.bucket, obj_name), 'is_dir': False, 'size': obj['Size'], 'last_modified': obj['LastModified'] })
python
{ "resource": "" }
q269263
ThreadUtil.conditional
test
def conditional(self, result, obj): '''Check all file item with given conditions.''' fileonly = (self.opt.last_modified_before is not None) or (self.opt.last_modified_after is not None) if obj['is_dir']: if not fileonly: result.append(obj) return if (self.opt.last_modified_before is not None) and obj['last_modified'] >= self.opt.last_modified_before: return if (self.opt.last_modified_after is not None) and obj['last_modified'] <= self.opt.last_modified_after: return result.append(obj)
python
{ "resource": "" }
q269264
ThreadUtil.get_file_privilege
test
def get_file_privilege(self, source): '''Get privileges of a local file''' try: return str(oct(os.stat(source).st_mode)[-3:]) except Exception as e: raise Failure('Could not get stat for %s, error_message = %s', source, e)
python
{ "resource": "" }
q269265
ThreadUtil.lookup
test
def lookup(self, s3url): '''Get the s3 object with the S3 URL. Return None if not exist.''' try: return self.s3.head_object(Bucket=s3url.bucket, Key=s3url.path) except BotoClient.ClientError as e: if e.response['ResponseMetadata']['HTTPStatusCode'] == 404: return None else: raise e
python
{ "resource": "" }
q269266
ThreadUtil.read_file_chunk
test
def read_file_chunk(self, source, pos, chunk): '''Read local file chunk''' if chunk==0: return StringIO() data = None with open(source, 'rb') as f: f.seek(pos) data = f.read(chunk) if not data: raise Failure('Unable to read data from source: %s' % source) return StringIO(data)
python
{ "resource": "" }
q269267
ThreadUtil.upload
test
def upload(self, source, target, mpi=None, pos=0, chunk=0, part=0): '''Thread worker for upload operation.''' s3url = S3URL(target) obj = self.lookup(s3url) # Initialization: Set up multithreaded uploads. if not mpi: fsize = os.path.getsize(source) md5cache = LocalMD5Cache(source) # optional checks if self.opt.dry_run: message('%s => %s', source, target) return elif self.opt.sync_check and self.sync_check(md5cache, obj): message('%s => %s (synced)', source, target) return elif not self.opt.force and obj: raise Failure('File already exists: %s' % target) if fsize < self.opt.max_singlepart_upload_size: data = self.read_file_chunk(source, 0, fsize) self.s3.put_object(Bucket=s3url.bucket, Key=s3url.path, Body=data, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) message('%s => %s', source, target) return # Here we need to have our own md5 value because multipart upload calculates # different md5 values. response = self.s3.create_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, Metadata={'md5': md5cache.get_md5(), 'privilege': self.get_file_privilege(source)}) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.upload(*args) return data = self.read_file_chunk(source, pos, chunk) response = self.s3.upload_part(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, Body=data, PartNumber=part) # Finalize if mpi.complete({'ETag': response['ETag'], 'PartNumber': part}): try: self.s3.complete_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) message('%s => %s', source, target) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=s3url.bucket, Key=s3url.path, UploadId=mpi.id) raise RetryFailure('Upload failed: Unable to complete upload %s.' % source)
python
{ "resource": "" }
q269268
ThreadUtil._verify_file_size
test
def _verify_file_size(self, obj, downloaded_file): '''Verify the file size of the downloaded file.''' file_size = os.path.getsize(downloaded_file) if int(obj['ContentLength']) != file_size: raise RetryFailure('Downloaded file size inconsistent: %s' % (repr(obj)))
python
{ "resource": "" }
q269269
ThreadUtil.write_file_chunk
test
def write_file_chunk(self, target, pos, chunk, body): '''Write local file chunk''' fd = os.open(target, os.O_CREAT | os.O_WRONLY) try: os.lseek(fd, pos, os.SEEK_SET) data = body.read(chunk) num_bytes_written = os.write(fd, data) if(num_bytes_written != len(data)): raise RetryFailure('Number of bytes written inconsistent: %s != %s' % (num_bytes_written, sys.getsizeof(data))) finally: os.close(fd)
python
{ "resource": "" }
q269270
ThreadUtil.copy
test
def copy(self, source, target, mpi=None, pos=0, chunk=0, part=0, delete_source=False): '''Copy a single file from source to target using boto S3 library.''' if self.opt.dry_run: message('%s => %s' % (source, target)) return source_url = S3URL(source) target_url = S3URL(target) if not mpi: obj = self.lookup(source_url) fsize = int(obj['ContentLength']) if fsize < self.opt.max_singlepart_copy_size: self.s3.copy_object(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}) message('%s => %s' % (source, target)) if delete_source: self.delete(source) return response = self.s3.create_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, Metadata=obj['Metadata']) upload_id = response['UploadId'] for args in self.get_file_splits(upload_id, source, target, fsize, self.opt.multipart_split_size): self.pool.copy(*args, delete_source=delete_source) return response = self.s3.upload_part_copy(Bucket=target_url.bucket, Key=target_url.path, CopySource={'Bucket': source_url.bucket, 'Key': source_url.path}, CopySourceRange='bytes=%d-%d' % (pos, pos + chunk - 1), UploadId=mpi.id, PartNumber=part) if mpi.complete({'ETag': response['CopyPartResult']['ETag'], 'PartNumber': part}): try: # Finalize copy operation. self.s3.complete_multipart_upload(Bucket=target_url.bucket, Key=target_url.path, UploadId=mpi.id, MultipartUpload={'Parts': mpi.sorted_parts()}) if delete_source: self.delete(source) message('%s => %s' % (source, target)) except Exception as e: message('Unable to complete upload: %s', str(e)) self.s3.abort_multipart_upload(Bucket=source_url.bucket, Key=source_url.path, UploadId=mpi.id) raise RetryFailure('Copy failed: Unable to complete copy %s.' % source)
python
{ "resource": "" }
q269271
CommandHandler.run
test
def run(self, args): '''Main entry to handle commands. Dispatch to individual command handler.''' if len(args) == 0: raise InvalidArgument('No command provided') cmd = args[0] if cmd + '_handler' in CommandHandler.__dict__: CommandHandler.__dict__[cmd + '_handler'](self, args) else: raise InvalidArgument('Unknown command %s' % cmd)
python
{ "resource": "" }
q269272
CommandHandler.validate
test
def validate(self, format, args): '''Validate input parameters with given format. This function also checks for wildcards for recursive mode. ''' fmtMap = { 'cmd': 'Command', 's3': 's3 path', 'local': 'local path' } fmts = format.split('|') if len(fmts) != len(args): raise InvalidArgument('Invalid number of parameters') for i, fmt in enumerate(fmts): valid = False for f in fmt.split(','): if f == 'cmd' and args[i] + '_handler' in CommandHandler.__dict__: valid = True if f == 's3' and S3URL.is_valid(args[i]): valid = True if f == 'local' and not S3URL.is_valid(args[i]): valid = True if not valid: raise InvalidArgument('Invalid parameter: %s, %s expected' % (args[i], fmtMap[fmt.split(',')[0]]))
python
{ "resource": "" }
q269273
CommandHandler.pretty_print
test
def pretty_print(self, objlist): '''Pretty print the result of s3walk. Here we calculate the maximum width of each column and align them. ''' def normalize_time(timestamp): '''Normalize the timestamp format for pretty print.''' if timestamp is None: return ' ' * 16 return TIMESTAMP_FORMAT % (timestamp.year, timestamp.month, timestamp.day, timestamp.hour, timestamp.minute) cwidth = [0, 0, 0] format = '%%%ds %%%ds %%-%ds' # Calculate maximum width for each column. result = [] for obj in objlist: last_modified = normalize_time(obj['last_modified']) size = str(obj['size']) if not obj['is_dir'] else 'DIR' name = obj['name'] item = (last_modified, size, name) for i, value in enumerate(item): if cwidth[i] < len(value): cwidth[i] = len(value) result.append(item) # Format output. for item in result: text = (format % tuple(cwidth)) % item message('%s', text.rstrip())
python
{ "resource": "" }
q269274
CommandHandler.ls_handler
test
def ls_handler(self, args): '''Handler for ls command''' if len(args) == 1: self.pretty_print(self.s3handler().list_buckets()) return self.validate('cmd|s3', args) self.pretty_print(self.s3handler().s3walk(args[1]))
python
{ "resource": "" }
q269275
CommandHandler.mb_handler
test
def mb_handler(self, args): '''Handler for mb command''' if len(args) == 1: raise InvalidArgument('No s3 bucketname provided') self.validate('cmd|s3', args) self.s3handler().create_bucket(args[1])
python
{ "resource": "" }
q269276
CommandHandler.put_handler
test
def put_handler(self, args): '''Handler for put command''' # Special check for shell expansion if len(args) < 3: raise InvalidArgument('Invalid number of parameters') self.validate('|'.join(['cmd'] + ['local'] * (len(args) - 2) + ['s3']), args) source = args[1:-1] # shell expansion target = args[-1] self.s3handler().put_files(source, target)
python
{ "resource": "" }
q269277
CommandHandler.get_handler
test
def get_handler(self, args): '''Handler for get command''' # Special case when we don't have target directory. if len(args) == 2: args += ['.'] self.validate('cmd|s3|local', args) source = args[1] target = args[2] self.s3handler().get_files(source, target)
python
{ "resource": "" }
q269278
CommandHandler.cat_handler
test
def cat_handler(self, args): '''Handler for cat command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().print_files(source)
python
{ "resource": "" }
q269279
CommandHandler.dsync_handler
test
def dsync_handler(self, args): '''Handler for dsync command.''' self.opt.recursive = True self.opt.sync_check = True self.opt.force = True self.validate('cmd|s3,local|s3,local', args) source = args[1] target = args[2] self.s3handler().dsync_files(source, target)
python
{ "resource": "" }
q269280
CommandHandler.cp_handler
test
def cp_handler(self, args): '''Handler for cp command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target)
python
{ "resource": "" }
q269281
CommandHandler.mv_handler
test
def mv_handler(self, args): '''Handler for mv command''' self.validate('cmd|s3|s3', args) source = args[1] target = args[2] self.s3handler().cp_files(source, target, delete_source=True)
python
{ "resource": "" }
q269282
CommandHandler.del_handler
test
def del_handler(self, args): '''Handler for del command''' self.validate('cmd|s3', args) source = args[1] self.s3handler().del_files(source)
python
{ "resource": "" }
q269283
CommandHandler.du_handler
test
def du_handler(self, args): '''Handler for size command''' for src, size in self.s3handler().size(args[1:]): message('%s\t%s' % (size, src))
python
{ "resource": "" }
q269284
CommandHandler._totalsize_handler
test
def _totalsize_handler(self, args): '''Handler of total_size command''' total_size = 0 for src, size in self.s3handler().size(args[1:]): total_size += size message(str(total_size))
python
{ "resource": "" }
q269285
ExtendedOptParser.match_date
test
def match_date(self, value): '''Search for date information in the string''' m = self.REGEX_DATE.search(value) date = datetime.datetime.utcnow().date() if m: date = datetime.date(int(m.group(1)), int(m.group(2)), int(m.group(3))) value = self.REGEX_DATE.sub('', value) return (date, value)
python
{ "resource": "" }
q269286
ExtendedOptParser.match_time
test
def match_time(self, value): '''Search for time information in the string''' m = self.REGEX_TIME.search(value) time = datetime.datetime.utcnow().time() if m: time = datetime.time(int(m.group(1)), int(m.group(2))) value = self.REGEX_TIME.sub('', value) return (time, value)
python
{ "resource": "" }
q269287
ExtendedOptParser.match_delta
test
def match_delta(self, value): '''Search for timedelta information in the string''' m = self.REGEX_DELTA.search(value) delta = datetime.timedelta(days=0) if m: d = int(m.group(1)) if m.group(3) == 'ago' or m.group(3) == 'before': d = -d if m.group(2) == 'minute': delta = datetime.timedelta(minutes=d) elif m.group(2) == 'hour': delta = datetime.timedelta(hours=d) elif m.group(2) == 'day': delta = datetime.timedelta(days=d) elif m.group(2) == 'week': delta = datetime.timedelta(weeks=d) value = self.REGEX_DELTA.sub('', value) return (delta, value)
python
{ "resource": "" }
q269288
ExtendedOptParser.check_dict
test
def check_dict(self, opt, value): '''Take json as dictionary parameter''' try: return json.loads(value) except: raise optparse.OptionValueError("Option %s: invalid dict value: %r" % (opt, value))
python
{ "resource": "" }
q269289
XiaomiGatewayDiscovery.discover_gateways
test
def discover_gateways(self): """Discover gateways using multicast""" _socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) _socket.settimeout(5.0) if self._interface != 'any': _socket.bind((self._interface, 0)) for gateway in self._gateways_config: host = gateway.get('host') port = gateway.get('port') sid = gateway.get('sid') if not (host and port and sid): continue try: ip_address = socket.gethostbyname(host) if gateway.get('disable'): _LOGGER.info( 'Xiaomi Gateway %s is disabled by configuration', sid) self.disabled_gateways.append(ip_address) continue _LOGGER.info( 'Xiaomi Gateway %s configured at IP %s:%s', sid, ip_address, port) self.gateways[ip_address] = XiaomiGateway( ip_address, port, sid, gateway.get('key'), self._device_discovery_retries, self._interface, gateway.get('proto')) except OSError as error: _LOGGER.error( "Could not resolve %s: %s", host, error) try: _socket.sendto('{"cmd":"whois"}'.encode(), (self.MULTICAST_ADDRESS, self.GATEWAY_DISCOVERY_PORT)) while True: data, (ip_add, _) = _socket.recvfrom(1024) if len(data) is None or ip_add in self.gateways: continue if ip_add in self.gateways.keys() or ip_add in self.disabled_gateways: continue resp = json.loads(data.decode()) if resp["cmd"] != 'iam': _LOGGER.error("Response does not match return cmd") continue if resp["model"] not in GATEWAY_MODELS: _LOGGER.error("Response must be gateway model") continue disabled = False gateway_key = None for gateway in self._gateways_config: sid = gateway.get('sid') if sid is None or sid == resp["sid"]: gateway_key = gateway.get('key') if sid and sid == resp['sid'] and gateway.get('disable'): disabled = True sid = resp["sid"] if disabled: _LOGGER.info("Xiaomi Gateway %s is disabled by configuration", sid) self.disabled_gateways.append(ip_add) else: _LOGGER.info('Xiaomi Gateway %s found at IP %s', sid, ip_add) self.gateways[ip_add] = XiaomiGateway( ip_add, resp["port"], sid, gateway_key, self._device_discovery_retries, self._interface, resp["proto_version"] if "proto_version" in resp else None) except socket.timeout: _LOGGER.info("Gateway discovery finished in 5 seconds") _socket.close()
python
{ "resource": "" }
q269290
XiaomiGatewayDiscovery.listen
test
def listen(self): """Start listening.""" _LOGGER.info('Creating Multicast Socket') self._mcastsocket = self._create_mcast_socket() self._listening = True thread = Thread(target=self._listen_to_msg, args=()) self._threads.append(thread) thread.daemon = True thread.start()
python
{ "resource": "" }
q269291
XiaomiGateway.get_from_hub
test
def get_from_hub(self, sid): """Get data from gateway""" cmd = '{ "cmd":"read","sid":"' + sid + '"}' resp = self._send_cmd(cmd, "read_ack") if int(self.proto[0:1]) == 1 else self._send_cmd(cmd, "read_rsp") _LOGGER.debug("read_ack << %s", resp) return self.push_data(resp)
python
{ "resource": "" }
q269292
XiaomiGateway.push_data
test
def push_data(self, data): """Push data broadcasted from gateway to device""" if not _validate_data(data): return False jdata = json.loads(data['data']) if int(self.proto[0:1]) == 1 else _list2map(data['params']) if jdata is None: return False sid = data['sid'] for func in self.callbacks[sid]: func(jdata, data) return True
python
{ "resource": "" }
q269293
XiaomiGateway._get_key
test
def _get_key(self): """Get key using token from gateway""" init_vector = bytes(bytearray.fromhex('17996d093d28ddb3ba695a2e6f58562e')) encryptor = Cipher(algorithms.AES(self.key.encode()), modes.CBC(init_vector), backend=default_backend()).encryptor() ciphertext = encryptor.update(self.token.encode()) + encryptor.finalize() if isinstance(ciphertext, str): # For Python 2 compatibility return ''.join('{:02x}'.format(ord(x)) for x in ciphertext) return ''.join('{:02x}'.format(x) for x in ciphertext)
python
{ "resource": "" }
q269294
exception_handler
test
def exception_handler(job, *exc_info): """ Called by RQ when there is a failure in a worker. NOTE: Make sure that in your RQ worker process, rollbar.init() has been called with handler='blocking'. The default handler, 'thread', does not work from inside an RQ worker. """ # Report data about the job with the exception. job_info = job.to_dict() # job_info['data'] is the pickled representation of the job, and doesn't json-serialize well. # repr() works nicely. job_info['data'] = repr(job_info['data']) extra_data = {'job': job_info} payload_data = {'framework': 'rq'} rollbar.report_exc_info(exc_info, extra_data=extra_data, payload_data=payload_data) # continue to the next handler return True
python
{ "resource": "" }
q269295
includeme
test
def includeme(config): """ Pyramid entry point """ settings = config.registry.settings config.add_tween('rollbar.contrib.pyramid.rollbar_tween_factory', over=EXCVIEW) # run patch_debugtoolbar, unless they disabled it if asbool(settings.get('rollbar.patch_debugtoolbar', True)): patch_debugtoolbar(settings) def hook(request, data): data['framework'] = 'pyramid' if request: request.environ['rollbar.uuid'] = data['uuid'] if request.matched_route: data['context'] = request.matched_route.name rollbar.BASE_DATA_HOOK = hook kw = parse_settings(settings) access_token = kw.pop('access_token') environment = kw.pop('environment', 'production') if kw.get('scrub_fields'): kw['scrub_fields'] = set([str.strip(x) for x in kw.get('scrub_fields').split('\n') if x]) if kw.get('exception_level_filters'): r = DottedNameResolver() exception_level_filters = [] for line in kw.get('exception_level_filters').split('\n'): if line: dotted_path, level = line.split() try: cls = r.resolve(dotted_path) exception_level_filters.append((cls, level)) except ImportError: log.error('Could not import %r' % dotted_path) kw['exception_level_filters'] = exception_level_filters kw['enabled'] = asbool(kw.get('enabled', True)) rollbar.init(access_token, environment, **kw)
python
{ "resource": "" }
q269296
RollbarNotifierMiddleware._ensure_log_handler
test
def _ensure_log_handler(self): """ If there's no log configuration, set up a default handler. """ if log.handlers: return handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(levelname)-5.5s [%(name)s][%(threadName)s] %(message)s') handler.setFormatter(formatter) log.addHandler(handler)
python
{ "resource": "" }
q269297
get_request
test
def get_request(): """ Get the current request object. Implementation varies on library support. Modified below when we know which framework is being used. """ # TODO(cory): add in a generic _get_locals_request() which # will iterate up through the call stack and look for a variable # that appears to be valid request object. for fn in (_get_bottle_request, _get_flask_request, _get_pyramid_request, _get_pylons_request): try: req = fn() if req is not None: return req except: pass return None
python
{ "resource": "" }
q269298
init
test
def init(access_token, environment='production', scrub_fields=None, url_fields=None, **kw): """ Saves configuration variables in this module's SETTINGS. access_token: project access token. Get this from the Rollbar UI: - click "Settings" in the top nav - click "Projects" in the left nav - copy-paste the appropriate token. environment: environment name. Can be any string; suggestions: 'production', 'development', 'staging', 'yourname' **kw: provided keyword arguments will override keys in SETTINGS. """ global SETTINGS, agent_log, _initialized, _transforms, _serialize_transform, _threads if scrub_fields is not None: SETTINGS['scrub_fields'] = list(scrub_fields) if url_fields is not None: SETTINGS['url_fields'] = list(url_fields) # Merge the extra config settings into SETTINGS SETTINGS = dict_merge(SETTINGS, kw) if _initialized: # NOTE: Temp solution to not being able to re-init. # New versions of pyrollbar will support re-initialization # via the (not-yet-implemented) configure() method. if not SETTINGS.get('suppress_reinit_warning'): log.warning('Rollbar already initialized. Ignoring re-init.') return SETTINGS['access_token'] = access_token SETTINGS['environment'] = environment if SETTINGS.get('allow_logging_basic_config'): logging.basicConfig() if SETTINGS.get('handler') == 'agent': agent_log = _create_agent_log() # We will perform these transforms in order: # 1. Serialize the payload to be all python built-in objects # 2. Scrub the payloads based on the key suffixes in SETTINGS['scrub_fields'] # 3. Scrub URLs in the payload for keys that end with 'url' # 4. Optional - If local variable gathering is enabled, transform the # trace frame values using the ShortReprTransform. _serialize_transform = SerializableTransform(safe_repr=SETTINGS['locals']['safe_repr'], whitelist_types=SETTINGS['locals']['whitelisted_types']) _transforms = [ ScrubRedactTransform(), _serialize_transform, ScrubTransform(suffixes=[(field,) for field in SETTINGS['scrub_fields']], redact_char='*'), ScrubUrlTransform(suffixes=[(field,) for field in SETTINGS['url_fields']], params_to_scrub=SETTINGS['scrub_fields']) ] # A list of key prefixes to apply our shortener transform to. The request # being included in the body key is old behavior and is being retained for # backwards compatibility. shortener_keys = [ ('request', 'POST'), ('request', 'json'), ('body', 'request', 'POST'), ('body', 'request', 'json'), ] if SETTINGS['locals']['enabled']: shortener_keys.append(('body', 'trace', 'frames', '*', 'code')) shortener_keys.append(('body', 'trace', 'frames', '*', 'args', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'kwargs', '*')) shortener_keys.append(('body', 'trace', 'frames', '*', 'locals', '*')) shortener_keys.extend(SETTINGS['shortener_keys']) shortener = ShortenerTransform(safe_repr=SETTINGS['locals']['safe_repr'], keys=shortener_keys, **SETTINGS['locals']['sizes']) _transforms.append(shortener) _threads = queue.Queue() events.reset() filters.add_builtin_filters(SETTINGS) _initialized = True
python
{ "resource": "" }
q269299
lambda_function
test
def lambda_function(f): """ Decorator for making error handling on AWS Lambda easier """ @functools.wraps(f) def wrapper(event, context): global _CURRENT_LAMBDA_CONTEXT _CURRENT_LAMBDA_CONTEXT = context try: result = f(event, context) return wait(lambda: result) except: cls, exc, trace = sys.exc_info() report_exc_info((cls, exc, trace.tb_next)) wait() raise return wrapper
python
{ "resource": "" }