function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def __init__(self, title=None, **kwargs): self.models = list(kwargs.pop('models', [])) self.exclude = list(kwargs.pop('exclude', [])) self.include_list = kwargs.pop('include_list', []) # deprecated self.exclude_list = kwargs.pop('exclude_list', []) # deprecated super(AppList, self).__init__(title, **kwargs)
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, **kwargs): Dashboard.__init__(self, **kwargs) # will only list the django.contrib.auth models self.children += [ modules.ModelList('Authentication', ['django.contrib.auth.*',]) ]
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, title=None, models=None, exclude=None, **kwargs): self.models = list(models or []) self.exclude = list(exclude or []) self.include_list = kwargs.pop('include_list', []) # deprecated self.exclude_list = kwargs.pop('exclude_list', []) # deprecated if 'extra' in kwargs: self.extra = kwargs.pop('extra') else: self.extra = [] super(ModelList, self).__init__(title, **kwargs)
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, **kwargs): Dashboard.__init__(self, **kwargs) # will only list the django.contrib apps self.children.append(modules.RecentActions( title='Django CMS recent actions', include_list=('cms.page', 'cms.cmsplugin',) ))
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, title=None, limit=10, include_list=None, exclude_list=None, **kwargs): self.include_list = include_list or [] self.exclude_list = exclude_list or [] kwargs.update({'limit': limit}) super(RecentActions, self).__init__(title, **kwargs)
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def get_qset(list): qset = None for contenttype in list: if isinstance(contenttype, ContentType): current_qset = Q(content_type__id=contenttype.id) else: try: app_label, model = contenttype.split('.') except: raise ValueError('Invalid contenttype: "%s"' % contenttype) current_qset = Q( content_type__app_label=app_label, content_type__model=model ) if qset is None: qset = current_qset else: qset = qset | current_qset return qset
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, **kwargs): Dashboard.__init__(self, **kwargs) # will only list the django.contrib apps self.children.append(modules.Feed( title=_('Latest Django News'), feed_url='http://www.djangoproject.com/rss/weblog/', limit=5 ))
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__(self, title=None, feed_url=None, limit=None, **kwargs): kwargs.update({'feed_url': feed_url, 'limit': limit}) super(Feed, self).__init__(title, **kwargs)
liberation/django-admin-tools
[ 2, 2, 2, 3, 1371719510 ]
def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, referer: str, base_url: Optional[str] = None, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, n_particles, dimensions, options, bounds=None, velocity_clamp=None, center=1.0, ftol=-np.inf, ftol_iter=1, init_pos=None,
ljvmiranda921/pyswarms
[ 1044, 314, 1044, 21, 1499861085 ]
def _populate_history(self, hist): """Populate all history lists The :code:`cost_history`, :code:`mean_pbest_history`, and :code:`neighborhood_best` is expected to have a shape of :code:`(iters,)`,on the other hand, the :code:`pos_history` and :code:`velocity_history` are expected to have a shape of :code:`(iters, n_particles, dimensions)` Parameters ---------- hist : collections.namedtuple Must be of the same type as self.ToHistory """ self.cost_history.append(hist.best_cost) self.mean_pbest_history.append(hist.mean_pbest_cost) self.mean_neighbor_history.append(hist.mean_neighbor_cost) self.pos_history.append(hist.position) self.velocity_history.append(hist.velocity)
ljvmiranda921/pyswarms
[ 1044, 314, 1044, 21, 1499861085 ]
def optimize(self, objective_func, iters, n_processes=None, **kwargs): """Optimize the swarm for a number of iterations Performs the optimization to evaluate the objective function :code:`objective_func` for a number of iterations :code:`iter.` Parameters ---------- objective_func : function objective function to be evaluated iters : int number of iterations n_processes : int number of processes to use for parallel particle evaluation Default is None with no parallelization kwargs : dict arguments for objective function Raises ------ NotImplementedError When this method is not implemented. """ raise NotImplementedError("SwarmOptimizer::optimize()")
ljvmiranda921/pyswarms
[ 1044, 314, 1044, 21, 1499861085 ]
def __init__(self, parent_module, node_id, func_type, cg, em, sym_table, type_info, live_at_end, vars_to_raise, closure_results, my_closure_results, globals_, is_module): assert isinstance(my_closure_results, closure_analyzer.ClosureResults), my_closure_results self._parent_module = parent_module self._nid = node_id self._func_type = func_type self.cg = cg self.em = em self._st = sym_table self._type_info = type_info self._live_at_end = live_at_end self._vars_to_raise = vars_to_raise self._closure_results = closure_results self._cr = my_closure_results self._globals = globals_ self._is_module = is_module
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def _get(self, node): self.em.pl("; %s:" % getattr(node, "lineno", "??") + " " + ast_utils.format_node(node)) self.em.indent(2) r = self._evaluate(node) self.em.pl("; end" + " " + ast_utils.format_node(node)) self.em.indent(-2) # Skip generated nodes since they're not in the type inference (TODO are there cases that that could break?) # and skip several classes of things that can't always be converted if not hasattr(node, "not_real") and not isinstance(r.t, (UnboxedFunctionMT, _SpecialFuncMT, PolymorphicFunctionMT, UnboxedInstanceMethod, CallableMT, ModuleMT, ListMT.ListIteratorMT, DictMT.DictIteratorMT, ClassMT)): expected_type = self._type_info.get_expr_type(self.em, node) if not (expected_type is r.t or expected_type is r.t.get_instantiated()): assert r.t.can_convert_to(expected_type), (expected_type, r.t) r = r.convert_to(self.em, expected_type) return r
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def _set(self, t, val): # v is a Variable with one vref that this _set should consume # (can't actually check it because it might have added other refs # ex by adding it to the symbol table) if isinstance(t, _ast.Name): self._set(t.id, val) elif isinstance(t, _ast.Subscript): v = self._get(t.value) s = self._get(t.slice) f = v.getattr(self.em, "__setitem__", clsonly=True) f.call(self.em, [s, val]) elif isinstance(t, _ast.Attribute): v = self._get(t.value) v.setattr(self.em, t.attr, val) elif isinstance(t, str): self._set_name(t, val) elif isinstance(t, (_ast.Tuple, _ast.List)): if isinstance(val.t, UnboxedTupleMT): assert len(t.elts) == len(val.v) for i in xrange(len(val.v)): e = val.v[i] e.incvref(self.em) self._set(t.elts[i], e) val.decvref(self.em) elif isinstance(val.t, (TupleMT, ListMT)): if isinstance(val.t, TupleMT): assert len(t.elts) == len(val.t.elt_types) else: val.incvref(self.em) r = val.getattr(self.em, "__len__", clsonly=True).call(self.em, []) self.em.pl("call void @check_unpacking_length(i64 %d, i64 %s)" % (len(t.elts), r.v)) for i in xrange(len(t.elts)): val.incvref(self.em) r = val.getattr(self.em, "__getitem__", clsonly=True).call(self.em, [Variable(Int, i, 1, True)]) self._set(t.elts[i], r) val.decvref(self.em) else: raise Exception(val.t) else: raise Exception(t)
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def _close_block(self): done = [] for n, v in self._st.iteritems(): if n not in self._live_at_end: # self.em.pl("; %s not live" % (n)) v.decvref(self.em) done.append(n) else: # self.em.pl("; %s live" % (n)) if n in self._vars_to_raise: # Have to assume that all variables are owned at the end of a block; could get around this with a more complicated analysis phase (or maybe just remove them in a post-processing phase) # similarly for raising v2 = v.convert_to(self.em, self._vars_to_raise[n]) if v2.nrefs > 1 or not v2.marked: v3 = v2.split(self.em) v2.decvref(self.em) v2 = v3 self._st[n] = v2 for n in done: self._st.pop(n)
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_pass(self, node): return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_branch(self, node): v = self._get(node.test) v2 = v.getattr(self.em, "__nonzero__", clsonly=True).call(self.em, []) assert node.true_block assert node.false_block self._close_block() if str(v2.v) == "0" or node.true_block == node.false_block: assert 0, "untested" self.em.pl("br label %%block%d" % (node.false_block,)) elif str(v2.v) == "1": assert 0, "untested" self.em.pl("br label %%block%d" % (node.true_block,)) else: self.em.pl("br i1 %s, label %%block%d, label %%block%d" % (v2.v, node.true_block, node.false_block)) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_import(self, node): for n in node.names: assert not n.asname assert '.' not in n.name m = self.cg.import_module(self.em, n.name) self._set(n.name, m) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_augassign(self, node): t = self._get(node.target) v = self._get(node.value) op_name = BINOP_MAP[type(node.op)] iop_name = "__i" + op_name[2:] rop_name = "__r" + op_name[2:] r = self._find_and_apply_binop(t, v, (iop_name, False), (op_name, False), (rop_name, True)) self._set(node.target, r) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_print(self, node): for i, elt in enumerate(node.values): v = self._get(elt) assert isinstance(v, Variable), elt v = v.getattr(self.em, "__str__", clsonly=True).call(self.em, []) assert v.t is Str self.em.pl("call void @file_write(%%file* @sys_stdout, %%string* %s)" % (v.v,)) if i < len(node.values) - 1 or not node.nl: self.em.pl("call void @print_space_if_necessary(%%string* %s)" % (v.v,)) v.decvref(self.em) if node.nl: self.em.pl("call void @file_write(%%file* @sys_stdout, %%string* @str_newline)" % ()) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_functiondef(self, node): var = self._handle_function(node) if var is not None: self._set(node.name, var) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def pre_return(self, node): rtn_type = self._func_type.rtn_type v = Variable(None_, "null", 1, False) if node.value is None else self._get(node.value) v = v.convert_to(self.em, rtn_type) if v.marked: r = v else: r = v.split(self.em) v.decvref(self.em) self._close_block() if rtn_type is None_: self.em.pl("ret void") else: # This is the ref contract: assert r.nrefs == 1 assert r.marked self.em.pl("ret %s %s" % (r.t.llvm_type(), r.v)) return ()
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def __init__(self, typepath, pythonpath): self._compile_queue = None self._typepath = typepath self._pythonpath = pythonpath self.modules = None # maps ast node -> usermodulemt object self._loaded_modules = None # maps fn -> usermodulemt object self._module_filenames = None # maps ast module -> fn self._closure_results = None self.type_info = None
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def import_module(self, em, name=None, fn=None): assert not (name and fn) assert name or fn if name and name in BUILTIN_MODULES: return BUILTIN_MODULES[name].dup({}) if name: assert '.' not in name fns = [os.path.join(dirname, name + ".py") for dirname in self._pythonpath] else: fns = [fn] assert fn.endswith(".py") name = os.path.basename(fn)[:-3] assert name for fn in fns: if os.path.exists(fn): if fn not in self._loaded_modules: source = open(fn).read() node = ast_utils.parse(source, fn) self._load_module(em, node, name, fn) rtn = self._loaded_modules[fn] rtn.t.load(em) return rtn.dup({}) raise Exception("don't know how to import '%s'" % name)
kmod/icbd
[ 12, 5, 12, 1, 1397488176 ]
def get(self, type: Type[T], query: MutableMapping[str, Any], context: PipelineContext = None) -> T: pass
meraki-analytics/cassiopeia-datastores
[ 3, 5, 3, 5, 1504385373 ]
def get_many(self, type: Type[T], query: MutableMapping[str, Any], context: PipelineContext = None) -> Iterable[T]: pass
meraki-analytics/cassiopeia-datastores
[ 3, 5, 3, 5, 1504385373 ]
def put(self, type: Type[T], item: T, context: PipelineContext = None) -> None: pass
meraki-analytics/cassiopeia-datastores
[ 3, 5, 3, 5, 1504385373 ]
def put_many(self, type: Type[T], items: Iterable[T], context: PipelineContext = None) -> None: pass
meraki-analytics/cassiopeia-datastores
[ 3, 5, 3, 5, 1504385373 ]
def get_status(self, query: MutableMapping[str, Any], context: PipelineContext = None) -> ShardStatusDto: key = "{clsname}.{platform}".format(clsname=ShardStatusDto.__name__, platform=query["platform"].value) return ShardStatusDto(self._get(key))
meraki-analytics/cassiopeia-datastores
[ 3, 5, 3, 5, 1504385373 ]
def test_identity_rotation(w): # Rotation by 1 should be identity operation W_in = w() W_out = w() assert W_in.ensure_validity(alter=False) assert W_out.ensure_validity(alter=False) W_out.rotate_decomposition_basis(quaternion.one) assert W_out.ensure_validity(alter=False) assert np.array_equal(W_out.t, W_in.t) assert np.array_equal(W_out.frame, W_in.frame) assert np.array_equal(W_out.data, W_in.data) assert np.array_equal(W_out.LM, W_in.LM) assert W_out.ell_min == W_in.ell_min assert W_out.ell_max == W_in.ell_max for h_in, h_out in zip(W_in.history, W_out.history[:-1]): assert h_in == h_out.replace( f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}" ) or (h_in.startswith("# ") and h_out.startswith("# ")) assert W_out.frameType == W_in.frameType assert W_out.dataType == W_in.dataType assert W_out.r_is_scaled_out == W_in.r_is_scaled_out assert W_out.m_is_scaled_out == W_in.m_is_scaled_out assert isinstance(W_out.num, int) assert W_out.num != W_in.num
moble/scri
[ 16, 19, 16, 7, 1435000407 ]
def test_rotation_invariants(w): # A random rotation should leave everything but data and frame the # same (except num, of course) W_in = w() W_out = w() np.random.seed(hash("test_rotation_invariants") % 4294967294) # Use mod to get in an acceptable range W_out.rotate_decomposition_basis(np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()) assert W_in.ensure_validity(alter=False) assert W_out.ensure_validity(alter=False) assert np.array_equal(W_out.t, W_in.t) assert not np.array_equal(W_out.frame, W_in.frame) # This SHOULD change assert not np.array_equal(W_out.data, W_in.data) # This SHOULD change assert W_out.ell_min == W_in.ell_min assert W_out.ell_max == W_in.ell_max assert np.array_equal(W_out.LM, W_in.LM) for h_in, h_out in zip(W_in.history[:-3], W_out.history[:-5]): assert h_in == h_out.replace( f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}" ) or (h_in.startswith("# ") and h_out.startswith("# ")) assert W_out.frameType == W_in.frameType assert W_out.dataType == W_in.dataType assert W_out.r_is_scaled_out == W_in.r_is_scaled_out assert W_out.m_is_scaled_out == W_in.m_is_scaled_out assert W_out.num != W_in.num
moble/scri
[ 16, 19, 16, 7, 1435000407 ]
def test_constant_versus_series(w): # A random rotation should leave everything but data and frame the # same (except num, of course) W_const = w() W_series = w() np.random.seed(hash("test_constant_versus_series") % 4294967294) # Use mod to get in an acceptable range W_const.rotate_decomposition_basis(np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()) W_series.rotate_decomposition_basis( np.array([np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()] * W_series.n_times) ) assert W_const.ensure_validity(alter=False) assert W_series.ensure_validity(alter=False) assert np.array_equal(W_series.t, W_const.t) assert not np.array_equal(W_series.frame, W_const.frame) # This SHOULD change assert not np.array_equal(W_series.data, W_const.data) # This SHOULD change assert W_series.ell_min == W_const.ell_min assert W_series.ell_max == W_const.ell_max assert np.array_equal(W_series.LM, W_const.LM) for h_const, h_series in zip(W_const.history[:-5], W_series.history[:-11]): assert h_const == h_series.replace( f"{type(W_series).__name__}_{str(W_series.num)}", f"{type(W_const).__name__}_{str(W_const.num)}" ) or (h_const.startswith("# ") and h_series.startswith("# ")) assert W_series.frameType == W_const.frameType assert W_series.dataType == W_const.dataType assert W_series.r_is_scaled_out == W_const.r_is_scaled_out assert W_series.m_is_scaled_out == W_const.m_is_scaled_out assert W_series.num != W_const.num
moble/scri
[ 16, 19, 16, 7, 1435000407 ]
def test_rotation_inversion(w): # Rotation followed by the inverse rotation should leave # everything the same (except that the frame data will be either a # 1 or a series of 1s) np.random.seed(hash("test_rotation_inversion") % 4294967294) # Use mod to get in an acceptable range W_in = w() assert W_in.ensure_validity(alter=False) # We loop over (1) a single constant rotation, and (2) an array of random rotations for R_basis in [ np.quaternion(*np.random.uniform(-1, 1, 4)).normalized(), np.array([np.quaternion(*np.random.uniform(-1, 1, 4)).normalized()] * W_in.n_times), ]: W_out = w() W_out.rotate_decomposition_basis(R_basis) W_out.rotate_decomposition_basis(~R_basis) assert W_out.ensure_validity(alter=False) assert np.array_equal(W_out.t, W_in.t) assert np.max(np.abs(W_out.frame - W_in.frame)) < 1e-15 assert np.allclose(W_out.data, W_in.data, atol=W_in.ell_max ** 4 ** 4e-14, rtol=W_in.ell_max ** 4 * 4e-14) assert W_out.ell_min == W_in.ell_min assert W_out.ell_max == W_in.ell_max assert np.array_equal(W_out.LM, W_in.LM) for h_in, h_out in zip(W_in.history[:-3], W_out.history[:-5]): assert h_in == h_out.replace( f"{type(W_out).__name__}_{str(W_out.num)}", f"{type(W_in).__name__}_{str(W_in.num)}" ) or (h_in.startswith("# datetime") and h_out.startswith("# datetime")) assert W_out.frameType == W_in.frameType assert W_out.dataType == W_in.dataType assert W_out.r_is_scaled_out == W_in.r_is_scaled_out assert W_out.m_is_scaled_out == W_in.m_is_scaled_out assert W_out.num != W_in.num
moble/scri
[ 16, 19, 16, 7, 1435000407 ]
def createImgGOME_L2(fileAbsPath, pixelSize=0.25): hdf = h5py.File(fileAbsPath, 'r')
SISTEMAsw/TAMP
[ 1, 2, 1, 1, 1479208321 ]
def BlackScholes(): data = {} S = float(request.args.get('price')) K = float(request.args.get('strike')) T = float(request.args.get('time')) R = float(request.args.get('rate')) V = float(request.args.get('vol')) d1 = (log(float(S)/K)+(R+V*V/2.)*T)/(V*sqrt(T)) d2 = d1-V*sqrt(T) data['cPrice'] = S*norm.cdf(d1)-K*exp(-R*T)*norm.cdf(d2) data['pPrice'] = K*exp(-R*T)-S+data['cPrice'] data['cDelta'] = norm.cdf(d1) data['cGamma'] = norm.pdf(d1)/(S*V*sqrt(T)) data['cTheta'] = (-(S*V*norm.pdf(d1))/(2*sqrt(T))-R*K*exp(-R*T)*norm.cdf(d2))/365 data['cVega'] = S*sqrt(T)*norm.pdf(d1)/100 data['cRho'] = K*T*exp(-R*T)*norm.cdf(d2)/100 data['pDelta'] = data['cDelta']-1 data['pGamma'] = data['cGamma'] data['pTheta'] = (-(S*V*norm.pdf(d1))/(2*sqrt(T))+R*K*exp(-R*T)*norm.cdf(-d2))/365 data['pVega'] = data['cVega'] data['pRho'] = -K*T*exp(-R*T)*norm.cdf(-d2)/100 return json.dumps(data)
davemc84/bitcoin-payable-black-scholes
[ 2, 2, 2, 1, 1451697019 ]
def process_data(data): """Process the product""" defer = DBPOOL.runInteraction(real_parser, data) defer.addCallback(write_memcache) defer.addErrback(common.email_error, data) defer.addErrback(LOG.error)
akrherz/pyWWA
[ 12, 4, 12, 10, 1336488468 ]
def real_parser(txn, buf): """Actually do something with the buffer, please""" if buf.strip() == "": return None utcnow = common.utcnow() nws = product.TextProduct(buf, utcnow=utcnow, parse_segments=False) # When we are in realtime processing, do not consider old data, typically # when a WFO fails to update the date in their MND if not common.replace_enabled() and ( (utcnow - nws.valid).days > 180 or (utcnow - nws.valid).days < -180 ): raise Exception(f"Very Latent Product! {nws.valid}") if nws.warnings: common.email_error("\n".join(nws.warnings), buf) if nws.afos is None: if nws.source[0] not in ["K", "P"]: return None raise Exception("TextProduct.afos is null") if common.replace_enabled(): args = [nws.afos.strip(), nws.source, nws.valid] bbb = "" if nws.bbb: bbb = " and bbb = %s " args.append(nws.bbb) txn.execute( "DELETE from products where pil = %s and source = %s and " f"entered = %s {bbb}", args, ) LOG.info("Removed %s rows for %s", txn.rowcount, nws.get_product_id()) txn.execute( "INSERT into products (pil, data, entered, " "source, wmo, bbb) VALUES(%s, %s, %s, %s, %s, %s)", (nws.afos.strip(), nws.text, nws.valid, nws.source, nws.wmo, nws.bbb), ) if nws.afos[:3] in MEMCACHE_EXCLUDE: return None return nws
akrherz/pyWWA
[ 12, 4, 12, 10, 1336488468 ]
def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read()
conversis/varstack
[ 24, 9, 24, 8, 1413753154 ]
def main(argv): root = ET.parse(sys.stdin).getroot() period_start = root.attrib.get('periodstart') for road_link in root.iter('{http://FTT.arstraffic.com/schemas/IndividualTT/}link'): road_link_id = road_link.attrib.get('id') road_link_times = [int(car.attrib.get('tt')) for car in road_link] number_of_cars = len(road_link_times) average_travel_time = sum(road_link_times)/number_of_cars print "{0}\t{1} {2}".format(road_link_id, average_travel_time, number_of_cars)
gofore/aws-emr
[ 2, 5, 2, 1, 1416403169 ]
def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): deserialized = self._deserialize('ExpressRouteGateway', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {})
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def create_app(config_name='development', p_db=db, p_bcrypt=bcrypt, p_login_manager=login_manager): new_app = Flask(__name__) config_app(config_name, new_app) new_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False p_db.init_app(new_app) p_bcrypt.init_app(new_app) p_login_manager.init_app(new_app) p_login_manager.login_view = 'register' return new_app
TwilioDevEd/airtng-flask
[ 16, 16, 16, 18, 1449671813 ]
def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_delete( self, resource_group_name, # type: str route_table_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {})
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get( self, resource_group_name, # type: str route_table_name, # type: str expand=None, # type: Optional[str] **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def _create_or_update_initial( self, resource_group_name, # type: str route_table_name, # type: str parameters, # type: "_models.RouteTable" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_create_or_update( self, resource_group_name, # type: str route_table_name, # type: str parameters, # type: "_models.RouteTable" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): deserialized = self._deserialize('RouteTable', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def _update_tags_initial( self, resource_group_name, # type: str route_table_name, # type: str parameters, # type: "_models.TagsObject" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_update_tags( self, resource_group_name, # type: str route_table_name, # type: str parameters, # type: "_models.TagsObject" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): deserialized = self._deserialize('RouteTable', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list( self, resource_group_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list_all( self, **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_all.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def setup(self): self.units = solarsystem self.p1 = KeplerPotential(m=1.*u.Msun, units=self.units) self.p2 = HernquistPotential(m=0.5*u.Msun, c=0.1*u.au, units=self.units)
adrn/gala
[ 104, 51, 104, 37, 1394412978 ]
def test_composite_create(self): potential = self.Cls() # Add a point mass with same unit system potential["one"] = KeplerPotential(units=self.units, m=1.) with pytest.raises(TypeError): potential["two"] = "derp" assert "one" in potential.parameters assert "m" in potential.parameters["one"] with pytest.raises(TypeError): potential.parameters["m"] = "derp"
adrn/gala
[ 104, 51, 104, 37, 1394412978 ]
def test_integrate(self): potential = self.Cls() potential["one"] = self.p1 potential["two"] = self.p2 for Integrator in [DOPRI853Integrator, LeapfrogIntegrator]: H = Hamiltonian(potential) w_cy = H.integrate_orbit([1., 0, 0, 0, 2*np.pi, 0], dt=0.01, n_steps=1000, Integrator=Integrator, cython_if_possible=True) w_py = H.integrate_orbit([1., 0, 0, 0, 2*np.pi, 0], dt=0.01, n_steps=1000, Integrator=Integrator, cython_if_possible=False) assert np.allclose(w_cy.xyz.value, w_py.xyz.value) assert np.allclose(w_cy.v_xyz.value, w_py.v_xyz.value)
adrn/gala
[ 104, 51, 104, 37, 1394412978 ]
def test_failures(): p = CCompositePotential() p['derp'] = KeplerPotential(m=1.*u.Msun, units=solarsystem) with pytest.raises(ValueError): p['jnsdfn'] = HenonHeilesPotential(units=solarsystem)
adrn/gala
[ 104, 51, 104, 37, 1394412978 ]
def _energy(self, x, t): m = self.parameters['m'] x0 = self.parameters['x0'] r = np.sqrt(np.sum((x-x0[None])**2, axis=1)) return -m/r
adrn/gala
[ 104, 51, 104, 37, 1394412978 ]
def list_collections(self): return self._call_api('collections/list/')
ping/instagram_private_api
[ 2658, 604, 2658, 134, 1484222059 ]
def create_collection(self, name, added_media_ids=None): """ Create a new collection. :param name: Name for the collection :param added_media_ids: list of media_ids :return: .. code-block:: javascript { "status": "ok", "collection_id": "1700000000123", "cover_media": { "media_type": 1, "original_width": 1080, "original_height": 1080, "id": 1492726080000000, "image_versions2": { "candidates": [ { "url": "http://scontent-xx4-1.cdninstagram.com/...123.jpg", "width": 1080, "height": 1080 }, ... ] } }, "collection_name": "A Collection" } """ params = {'name': name} if added_media_ids and isinstance(added_media_ids, str): added_media_ids = [added_media_ids] if added_media_ids: params['added_media_ids'] = json.dumps(added_media_ids, separators=(',', ':')) params.update(self.authenticated_params) return self._call_api('collections/create/', params=params)
ping/instagram_private_api
[ 2658, 604, 2658, 134, 1484222059 ]
def __init__( self, *, certificate: Optional[str] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, properties: Optional["CertificateProperties"] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["CertificateDescription"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, certificate: Optional[str] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, certificate: Optional[str] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, properties: Optional["CertificatePropertiesWithNonce"] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, max_delivery_count: Optional[int] = None, default_ttl_as_iso8601: Optional[datetime.timedelta] = None, feedback: Optional["FeedbackProperties"] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, endpoint_id: Optional[str] = None, health_status: Optional[Union[str, "EndpointHealthStatus"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["EndpointHealthData"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, key: str, value: str, endpoint_names: List[str], **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, properties: Optional[Dict[str, str]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["EventHubConsumerGroupInfo"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, retention_time_in_days: Optional[int] = None, partition_count: Optional[int] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, export_blob_container_uri: str, exclude_keys: bool, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, failover_region: str, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, source: Union[str, "RoutingSource"], endpoint_names: List[str], is_enabled: bool, name: Optional[str] = None, condition: Optional[str] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, lock_duration_as_iso8601: Optional[datetime.timedelta] = None, ttl_as_iso8601: Optional[datetime.timedelta] = None, max_delivery_count: Optional[int] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, input_blob_container_uri: str, output_blob_container_uri: str, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, location: str, sku: "IotHubSkuInfo", tags: Optional[Dict[str, str]] = None, etag: Optional[str] = None, properties: Optional["IotHubProperties"] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["IotHubDescription"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, location: Optional[str] = None, role: Optional[Union[str, "IotHubReplicaRoleType"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, message: Optional[str] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, authorization_policies: Optional[List["SharedAccessSignatureAuthorizationRule"]] = None, ip_filter_rules: Optional[List["IpFilterRule"]] = None, event_hub_endpoints: Optional[Dict[str, "EventHubProperties"]] = None, routing: Optional["RoutingProperties"] = None, storage_endpoints: Optional[Dict[str, "StorageEndpointProperties"]] = None, messaging_endpoints: Optional[Dict[str, "MessagingEndpointProperties"]] = None, enable_file_upload_notifications: Optional[bool] = None, cloud_to_device: Optional["CloudToDeviceProperties"] = None, comments: Optional[str] = None, features: Optional[Union[str, "Capabilities"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["IotHubQuotaMetricInfo"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, sku: "IotHubSkuInfo", capacity: "IotHubCapacity", **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, value: Optional[List["IotHubSkuDescription"]] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, name: Union[str, "IotHubSku"], capacity: Optional[int] = None, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, *, filter_name: str, action: Union[str, "IpFilterActionType"], ip_mask: str, **kwargs
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]