code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def test_init_failure(self): <NEW_LINE> <INDENT> self.assertRaises(TypeError, AuthHandler) | test that not passing Chalice app object raises a type error | 625941c31f5feb6acb0c4b0f |
def create_database(): <NEW_LINE> <INDENT> conn = psycopg2.connect( "host=127.0.0.1 dbname=studentdb user=student password=student") <NEW_LINE> conn.set_session(autocommit=True) <NEW_LINE> cur = conn.cursor() <NEW_LINE> cur.execute("DROP DATABASE IF EXISTS sparkifydb") <NEW_LINE> cur.execute( "CREATE DATABASE sparkifydb WITH ENCODING 'utf8' TEMPLATE template0") <NEW_LINE> conn.close() <NEW_LINE> conn = psycopg2.connect( "host=127.0.0.1 dbname=sparkifydb user=student password=student") <NEW_LINE> cur = conn.cursor() <NEW_LINE> return cur, conn | Creates a new database. Drops existsing databases (if any),
so use with caution! | 625941c37b180e01f3dc47be |
def add_ignored(self, args): <NEW_LINE> <INDENT> if isinstance(args, list): <NEW_LINE> <INDENT> for elem in args: <NEW_LINE> <INDENT> self._ignored.append(elem) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(args, str): <NEW_LINE> <INDENT> self._ignored.append(args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("Argument should have type 'list' or 'str'") | Add ignore pattern. | 625941c356b00c62f0f14615 |
def CleanRackFile(self): <NEW_LINE> <INDENT> gp_home = EnvUtil.getEnv("GPHOME") <NEW_LINE> if os.path.exists(gp_home): <NEW_LINE> <INDENT> gp_home = os.path.realpath(gp_home) <NEW_LINE> <DEDENT> rack_conf_file = os.path.realpath( os.path.join(gp_home, "script/gspylib/etc/conf/rack_info.conf")) <NEW_LINE> if os.path.isfile(rack_conf_file): <NEW_LINE> <INDENT> cmd = "rm -f %s" % rack_conf_file <NEW_LINE> CmdExecutor.execCommandWithMode(cmd, self.sshTool, self.localMode, mpprc_file=self.mpprcFile) <NEW_LINE> self.logger.debug("Successfully deleted rack information file.") | function: clean rack information file
input : NA
output: NA | 625941c3be8e80087fb20c02 |
def to_dict(self): <NEW_LINE> <INDENT> submitted_answer_dict = { 'answer': self.answer, 'interaction_id': self.interaction_id, 'answer_group_index': self.answer_group_index, 'rule_spec_index': self.rule_spec_index, 'classification_categorization': self.classification_categorization, 'params': self.params, 'session_id': self.session_id, 'time_spent_in_sec': self.time_spent_in_sec, } <NEW_LINE> if self.rule_spec_str is not None: <NEW_LINE> <INDENT> submitted_answer_dict['rule_spec_str'] = self.rule_spec_str <NEW_LINE> <DEDENT> if self.answer_str is not None: <NEW_LINE> <INDENT> submitted_answer_dict['answer_str'] = self.answer_str <NEW_LINE> <DEDENT> return submitted_answer_dict | Returns the dict of submitted answer.
Returns:
dict. The submitted answer dict. | 625941c3796e427e537b0581 |
def character_skill(self, fight_round, operation): <NEW_LINE> <INDENT> if self.fgo_settings.fight[fight_round]["skill"][operation]["object"] == 1: <NEW_LINE> <INDENT> self.fgo_settings.emulator.press_mouse_key( self.fgo_settings.fight[fight_round]["skill"][operation]["button"], self.fgo_settings.fight[fight_round]["skill"][operation]["delay_time"] + self.fgo_settings.delay_time) <NEW_LINE> <DEDENT> elif self.fgo_settings.fight[fight_round]["skill"][operation]["object"] > 7 or self.fgo_settings.fight[fight_round]["skill"][operation]["object"] < 0: <NEW_LINE> <INDENT> self.logger.get_log().error('技能选择释放对象出错') <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> elif self.fgo_settings.fight[fight_round]["skill"][operation]["object"] > 4: <NEW_LINE> <INDENT> self.fgo_settings.emulator.press_mouse_key( self.fgo_settings.fight[fight_round]["skill"][operation]["button"], 0.5 + self.fgo_settings.delay_time) <NEW_LINE> self.fgo_settings.emulator.press_mouse_key( chr(74 + self.fgo_settings.fight[fight_round]["skill"][operation]["object"]), self.fgo_settings.fight[fight_round]["skill"][operation]["delay_time"] + self.fgo_settings.delay_time) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fgo_settings.emulator.press_mouse_key( chr(47 + self.fgo_settings.fight[fight_round]["skill"][operation]["object"]), 0.5 + self.fgo_settings.delay_time) <NEW_LINE> self.fgo_settings.emulator.press_mouse_key( self.fgo_settings.fight[fight_round]["skill"][operation]["button"], self.fgo_settings.fight[fight_round]["skill"][operation]["delay_time"] + self.fgo_settings.delay_time) <NEW_LINE> <DEDENT> return None | :param fight_round: 战斗面数
:param operation: 操作编号
:return: None | 625941c3627d3e7fe0d68e0b |
def __ordering_handler(query_set, model, params): <NEW_LINE> <INDENT> orders = params.get("order") <NEW_LINE> if not orders: <NEW_LINE> <INDENT> return query_set <NEW_LINE> <DEDENT> sqla_params = [] <NEW_LINE> for (field_keypath, order) in orders: <NEW_LINE> <INDENT> field = getattr_keypath(model, field_keypath) <NEW_LINE> param = field.asc() if order else field.desc() <NEW_LINE> sqla_params.append(param) <NEW_LINE> <DEDENT> return query_set.order_by(*sqla_params) | Handle ordering requests.
Args:
query_set: SQLAlchemy query set to be ordered.
model: Data model from which given query set is generated.
params: User-provided filter params, with format {"order": {"field1": <bool>, ...}, ...}.
True indicates ascending order, while False indicates descending order.
Returns:
A query set with user-provided ordering applied. | 625941c3a8ecb033257d308a |
def clean_email(self): <NEW_LINE> <INDENT> email = self.cleaned_data['email'].lower() <NEW_LINE> if User.all().filter('email =', email).count(1): <NEW_LINE> <INDENT> raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.')) <NEW_LINE> <DEDENT> return email | Validate that the supplied email address is unique for the
site. | 625941c33539df3088e2e308 |
def euclidean_distance_1d_circular(n, i, j): <NEW_LINE> <INDENT> d = euclidean_distance_1d(i, j) <NEW_LINE> return min(d, n-d) | Calculates 1d distance of integers i and j in a circle of n elements.
:param n: Circle length
:param i: Start element
:param j: Target element
:return: Minimal distance | 625941c3462c4b4f79d1d68d |
def shear_image(image, intensity): <NEW_LINE> <INDENT> image = tf.expand_dims(image, axis=0) <NEW_LINE> dtg = (tf.keras.preprocessing.image.ImageDataGenerator (shear_range=intensity)) <NEW_LINE> dtg.fit(x=image) <NEW_LINE> new_image = dtg.flow(image) <NEW_LINE> new_image = new_image.next()[0].astype("int") <NEW_LINE> return new_image | Function that randomly shears an image | 625941c30383005118ecf5a0 |
def prepareEntry(self, ts): <NEW_LINE> <INDENT> self._container[timestamp(ts)] = self._elemContainer() | Creates an empty sub-structure (elemContainer) for a given timestamp | 625941c3eab8aa0e5d26db14 |
def vj_extractor(hp,name): <NEW_LINE> <INDENT> vj_in = keras.Input(shape = (1,), name='vj_input') <NEW_LINE> reshape_layer = keras.layers.Reshape([hp['vj_embed']]) <NEW_LINE> x_vj_mu = keras.layers.Embedding(hp['vj_width'],hp['vj_embed'])(vj_in) <NEW_LINE> x_vj_mu = reshape_layer(x_vj_mu) <NEW_LINE> x_vj_mu = keras.layers.Dropout(hp['dropout'])(x_vj_mu) <NEW_LINE> return keras.Model(inputs=vj_in,outputs=x_vj_mu,name=name) | Extract Gene info - embed and dropout
parameters
-----------
hp: dict
dictionary of hyperpareameters, keys:
- vj_embed : int
the dimension in which to embed the one-hot gene representation
- vj_width: int
the original dimension of the one-hot representation coming in
- dropout: float
how much dropout to apply after the embedding
name: string
name to give the extractor model
returns
--------
model: tf.keras.Model
Keras model that converts input into a a feature representation. i.e prior
to any final dense layers. | 625941c3d10714528d5ffc9e |
def _not_spot(self, loc): <NEW_LINE> <INDENT> if len(loc) == 0 or loc[0] < 0 or loc[0] > self.HEIGHT - 1 or loc[1] < 0 or loc[1] > self.WIDTH - 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Finds out of the spot at the given location is an actual spot on the game board. | 625941c355399d3f05588670 |
def test_call(self): <NEW_LINE> <INDENT> output, _error = self.executor.pip('install', 'attrs').batch() <NEW_LINE> self.assertEquals(output, 'attrs installed') | calling a built-in command runs it in the shell | 625941c30fa83653e4656f79 |
def test_submit_disabled(self): <NEW_LINE> <INDENT> expected = '<button type="submit" disabled' <NEW_LINE> self.assertContains(self.resp, expected) | Submit button must be disabled | 625941c39b70327d1c4e0d91 |
def testManagedExceptionConversion(self): <NEW_LINE> <INDENT> from System import Exception, OverflowException <NEW_LINE> from Python.Test import ExceptionTest <NEW_LINE> e = ExceptionTest.GetBaseException() <NEW_LINE> self.assertTrue(isinstance(e, Exception)) <NEW_LINE> e = ExceptionTest.GetExplicitException() <NEW_LINE> self.assertTrue(isinstance(e, OverflowException)) <NEW_LINE> self.assertTrue(isinstance(e, Exception)) <NEW_LINE> e = ExceptionTest.GetWidenedException() <NEW_LINE> self.assertTrue(isinstance(e, OverflowException)) <NEW_LINE> self.assertTrue(isinstance(e, Exception)) <NEW_LINE> v = ExceptionTest.SetBaseException(Exception('error')) <NEW_LINE> self.assertTrue(v) <NEW_LINE> v = ExceptionTest.SetExplicitException(OverflowException('error')) <NEW_LINE> self.assertTrue(v) <NEW_LINE> v = ExceptionTest.SetWidenedException(OverflowException('error')) <NEW_LINE> self.assertTrue(v) | Test conversion of managed exceptions. | 625941c326068e7796caec98 |
def init_socket(): <NEW_LINE> <INDENT> sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) <NEW_LINE> sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> sock.settimeout(10) <NEW_LINE> sock.bind(('', 12000)) <NEW_LINE> return sock | initialize the socket for iMACRT MMR | 625941c33d592f4c4ed1d02f |
def __init__(self, data): <NEW_LINE> <INDENT> self.standings_schedule_date = data['standings_schedule_date'] <NEW_LINE> self.divisions = [Division(x['division'], x['teams']) for x in data['divisions']] | Creates a standings object for info specified in `data`.
`data` should be a dictionary of values | 625941c326068e7796caec99 |
def serialize(self, buff): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _x = self <NEW_LINE> buff.write(_get_struct_3d().pack(_x.point.x, _x.point.y, _x.point.z)) <NEW_LINE> <DEDENT> except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) <NEW_LINE> except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self))))) | serialize message into buffer
:param buff: buffer, ``StringIO`` | 625941c3d99f1b3c44c6754e |
def set_dataoffset(self, value): <NEW_LINE> <INDENT> self.assertWriteMode() <NEW_LINE> self.writer.set_padding(value-self.writer.vlr_stop) <NEW_LINE> return | Sets the data offset
Any space between this value and the end of the VLRs will be written
with 0's | 625941c316aa5153ce362435 |
def prism_equation(x,y,z,**kwargs): <NEW_LINE> <INDENT> if not cube_equation(x,y,z): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if y>0.7320508075688772: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if abs((y+1.)/x)<math.sqrt(3.): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | Check whether a point are within a unit prism or not
Return: True if inside or on surface, False if outside | 625941c3cad5886f8bd26f96 |
def createDescription(self): <NEW_LINE> <INDENT> return self.PARSER_TAGS["description"] | Create an Unicode description | 625941c36e29344779a625d0 |
def __init__(self, ng, ncol=1, dtype=float, gmin=None, dg=None, empty=np.nan): <NEW_LINE> <INDENT> self.dtype = dtype <NEW_LINE> self._gmin = gmin <NEW_LINE> self._dg = dg <NEW_LINE> self._ng = ng <NEW_LINE> self._ncol = ncol <NEW_LINE> ntot = np.prod(ng) <NEW_LINE> self._cols = np.zeros([ntot, ncol], dtype=dtype) <NEW_LINE> self._cols[:] = empty <NEW_LINE> self._filled = np.zeros([ntot, ncol], dtype=bool) | Create a regular grid of scalar data.
The grid is defined by its (gmin, dg, ng) = (origin, spacing, # of points).
The scalar data are stored as columns.
Args:
ng (np.array): number of grid points (3,) vector of ints
ncol (int, optional): number of data columns, default is 1
dtype (type, optional): type of data columns, default is float
gmin (np.array, optional): origin (3,) vector of floats, default is None
dg (np.array, optional): spacing (3,) vector of floats, default is None
empty (np.array, optional): default value for empty grid points, default np.nan | 625941c3ad47b63b2c509f3c |
def testcase_debug(request): <NEW_LINE> <INDENT> if request.method == "POST": <NEW_LINE> <INDENT> url = request.POST.get("url", "") <NEW_LINE> method = request.POST.get("method", "") <NEW_LINE> header = request.POST.get("header", "") <NEW_LINE> type_ = request.POST.get("type", "") <NEW_LINE> parameter = request.POST.get("parameter", "") <NEW_LINE> print("url", url) <NEW_LINE> print("method", method) <NEW_LINE> print("header", header) <NEW_LINE> print("type_", type_) <NEW_LINE> print("parameter", parameter) <NEW_LINE> json_header = header.replace("\'", "\"") <NEW_LINE> try: <NEW_LINE> <INDENT> header = json.loads(json_header) <NEW_LINE> <DEDENT> except json.decoder.JSONDecodeError: <NEW_LINE> <INDENT> return JsonResponse({"result": "header类型错误"}) <NEW_LINE> <DEDENT> json_par = parameter.replace("\'", "\"") <NEW_LINE> try: <NEW_LINE> <INDENT> payload = json.loads(json_par) <NEW_LINE> <DEDENT> except json.decoder.JSONDecodeError: <NEW_LINE> <INDENT> return JsonResponse({"result": "参数类型错误"}) <NEW_LINE> <DEDENT> result_text = None <NEW_LINE> if method == "get": <NEW_LINE> <INDENT> if header == "": <NEW_LINE> <INDENT> r = requests.get(url, params=payload) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = requests.get(url, params=payload, headers=header) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> <DEDENT> if method == "post": <NEW_LINE> <INDENT> if type_ == "from": <NEW_LINE> <INDENT> if header == "": <NEW_LINE> <INDENT> r = requests.post(url, data=payload) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = requests.post(url, data=payload, headers=header) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> <DEDENT> if type_ == "json": <NEW_LINE> <INDENT> if header == "": <NEW_LINE> <INDENT> r = requests.post(url, json=payload) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = requests.post(url, json=payload, headers=header) <NEW_LINE> result_text = r.text <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return JsonResponse({"result": result_text}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JsonResponse({"result": "请求方法错误"}) | 测试用例的调试 | 625941c3498bea3a759b9a6c |
def searchMatrix(self, matrix, target): <NEW_LINE> <INDENT> if not matrix: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> row = len(matrix) <NEW_LINE> column = len(matrix[0]) <NEW_LINE> i,j=0,column-1 <NEW_LINE> while i < row and j >= 0: <NEW_LINE> <INDENT> if matrix[i][j] == target: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif matrix[i][j] > target: <NEW_LINE> <INDENT> j -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> return False | :type matrix: List[List[int]]
:type target: int
:rtype: bool | 625941c385dfad0860c3ae17 |
def insert_new_controller(self, path, component_name, *args, **kwargs): <NEW_LINE> <INDENT> controller = init_controller(component_name, *args, **kwargs) <NEW_LINE> self.insert_controller(path, controller) <NEW_LINE> return controller | Similar to the insert_controller method, however it takes a component name
and arguments and will init the controller before inserting it at the given path,
replacing the existing controller stack with a new stack containing just this
controller. | 625941c315fb5d323cde0aca |
def ErrorMsg(title, message): <NEW_LINE> <INDENT> dlg = wx.MessageDialog(None, message, title, wx.ICON_ERROR) <NEW_LINE> dlg.ShowModal() <NEW_LINE> dlg.Destroy() | Dipslay an error message | 625941c3cb5e8a47e48b7a69 |
def compute_value_from_q_values(self, state): <NEW_LINE> <INDENT> legal_actions = self.get_legal_actions(state) <NEW_LINE> if len(legal_actions) == 0: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> value = utilities.Counter() <NEW_LINE> for action in legal_actions: <NEW_LINE> <INDENT> value[action] = self.get_q_value(state, action) <NEW_LINE> <DEDENT> return value[value.arg_max()] | Returns max_action Q(state, action) where the max is over the legal
actions. Note that if there are no legal actions, which is the case
at the terminal state, this should return a value of 0.0. | 625941c3a8370b771705285d |
def __copy__(self) -> 'AST': <NEW_LINE> <INDENT> return AST(_c_call('clingo_ast_t*', _lib.clingo_ast_copy, self._rep)) | Return a shallow copy of the ast. | 625941c3d4950a0f3b08c30d |
def set_client_waiting(self, session_id, wants=1): <NEW_LINE> <INDENT> self._waitlist[session_id] = wants | set_client_waiting(int session_id, int wants=1) -> nothing. | 625941c3ad47b63b2c509f3d |
def __init__(self, **kw): <NEW_LINE> <INDENT> super(DraggableButton, self).__init__(**kw) <NEW_LINE> self.size_hint = (None, None) <NEW_LINE> self.text = hex(id(self)) | Constructor | 625941c345492302aab5e27f |
def to_upper_underscore(name: str) -> str: <NEW_LINE> <INDENT> return name.replace("-", "_").upper() + "_" | Transform package name into uppercase with underscores.
Example:
>>> pkg_2_uu('a-pkg')
'A_PKG' | 625941c3b57a9660fec3383f |
def test_add_project(self): <NEW_LINE> <INDENT> page_name_text = self.driver.find_element_by_class_name("navbar-brand").text <NEW_LINE> print(page_name_text) <NEW_LINE> self.assertEqual("测试平台", page_name_text) <NEW_LINE> self.driver.find_element_by_id("add_btn").click() <NEW_LINE> add_project_page_name = self.driver.find_element_by_id("add_project_page").text <NEW_LINE> self.assertEqual("添加项目", add_project_page_name) <NEW_LINE> p_name_text = self.driver.find_element_by_id("id_pname") <NEW_LINE> p_name_text.send_keys("ui项目名称2") <NEW_LINE> id_description_text = self.driver.find_element_by_id("id_description") <NEW_LINE> id_description_text.send_keys("ui项目描述2") <NEW_LINE> self.driver.find_element_by_id("btn_save").click() <NEW_LINE> p_name = self.driver.find_element_by_xpath("//tr[2]/td[1]").text <NEW_LINE> p_description = self.driver.find_element_by_xpath("//tr[2]/td[2]").text <NEW_LINE> self.assertEqual("ui项目名称2", p_name) <NEW_LINE> self.assertEqual("ui项目描述2", p_description) | 新增页面ui | 625941c38da39b475bd64f2f |
@command("time", pm=True, phases=("join", "day", "night")) <NEW_LINE> def timeleft(var, wrapper, message): <NEW_LINE> <INDENT> if (wrapper.public and var.LAST_TIME and var.LAST_TIME + timedelta(seconds=var.TIME_RATE_LIMIT) > datetime.now()): <NEW_LINE> <INDENT> wrapper.pm(messages["command_ratelimited"].format()) <NEW_LINE> return <NEW_LINE> <DEDENT> if wrapper.public: <NEW_LINE> <INDENT> var.LAST_TIME = datetime.now() <NEW_LINE> <DEDENT> if var.PHASE == "join": <NEW_LINE> <INDENT> dur = int((var.CAN_START_TIME - datetime.now()).total_seconds()) <NEW_LINE> msg = None <NEW_LINE> if dur > 0: <NEW_LINE> <INDENT> msg = messages["start_timer"].format(dur) <NEW_LINE> <DEDENT> if msg is not None: <NEW_LINE> <INDENT> wrapper.reply(msg) <NEW_LINE> <DEDENT> <DEDENT> if var.PHASE in var.TIMERS: <NEW_LINE> <INDENT> if var.PHASE == "day": <NEW_LINE> <INDENT> what = "sunset" <NEW_LINE> <DEDENT> elif var.PHASE == "night": <NEW_LINE> <INDENT> what = "sunrise" <NEW_LINE> <DEDENT> elif var.PHASE == "join": <NEW_LINE> <INDENT> what = "the game is canceled if it's not started" <NEW_LINE> <DEDENT> remaining = int((var.TIMERS[var.PHASE][1] + var.TIMERS[var.PHASE][2]) - time.time()) <NEW_LINE> msg = "There is \u0002{0[0]:0>2}:{0[1]:0>2}\u0002 remaining until {1}.".format(divmod(remaining, 60), what) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = messages["timers_disabled"].format(var.PHASE.capitalize()) <NEW_LINE> <DEDENT> wrapper.reply(msg) | Returns the time left until the next day/night transition. | 625941c3851cf427c661a4ce |
def chunks(l, n): <NEW_LINE> <INDENT> for i in np.arange(0, len(l), n): <NEW_LINE> <INDENT> i=int(i) <NEW_LINE> n=int(n) <NEW_LINE> yield l[i:i + n] | Yield successive n-sized chunks from l. | 625941c350812a4eaa59c2e1 |
def shake_hand(self): <NEW_LINE> <INDENT> self.add_to_buffer(self.handshake) | Send our handshake to the peer | 625941c35e10d32532c5eee4 |
def verify_swap_uuid(storage, constraints, report_error, report_warning): <NEW_LINE> <INDENT> swaps = storage.fsset.swap_devices <NEW_LINE> no_uuid = [s for s in swaps if s.format.exists and not s.format.uuid] <NEW_LINE> if no_uuid: <NEW_LINE> <INDENT> report_warning(_("At least one of your swap devices does not have " "a UUID, which is common in swap space created " "using older versions of mkswap. These devices " "will be referred to by device path in " "/etc/fstab, which is not ideal since device " "paths can change under a variety of " "circumstances. ")) | Verify swap uuid.
:param storage: a storage to check
:param constraints: a dictionary of constraints
:param report_error: a function for error reporting
:param report_warning: a function for warning reporting | 625941c3d4950a0f3b08c30e |
def send_command(command_id, command_file=None, timestamp=0, *args): <NEW_LINE> <INDENT> if not timestamp or timestamp == 0: <NEW_LINE> <INDENT> timestamp = time.time() <NEW_LINE> <DEDENT> command_arguments = map(str, args) <NEW_LINE> command_arguments = ";".join(command_arguments) <NEW_LINE> command_string = "[%s] %s;%s" % (timestamp, command_id, command_arguments) <NEW_LINE> try: <NEW_LINE> <INDENT> if not command_file: <NEW_LINE> <INDENT> command_file = find_command_file() <NEW_LINE> <DEDENT> _write_to_command_file(command_file, command_string) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _write_to_livestatus(command_string) | Send one specific command to the command pipe
Args:
command_id (str): Identifier string of the nagios command Eg: ``ADD_SVC_COMMENT``
command_file (str): Path to nagios command file.
timestamp (int): Timestamp in time_t format of the time when the external command was sent to the command file. If 0 of None, it will be set to time.time(). Default 0.
args: Command arguments. | 625941c38e71fb1e9831d767 |
def test_close_1(wxMainApp): <NEW_LINE> <INDENT> Faker.clickButton(wxMainApp, ID_BUTTON_CANCEL) | Close login window, on clicking "Cancel" | 625941c397e22403b379cf56 |
def cut_within_frame(box, im_width, im_height): <NEW_LINE> <INDENT> x1,y1,w,h = box <NEW_LINE> x2, y2 = x1 + w - 1, y1 + h - 1 <NEW_LINE> x2 = min(x2, im_width - 1) <NEW_LINE> y2 = min(y2, im_height - 1) <NEW_LINE> x1 = max(0,x1) <NEW_LINE> y1 = max(0,y1) <NEW_LINE> return [x1, y1, x2 - x1 + 1, y2 - y1 + 1] | Shift a box (x,y,w,h) into the coordinate of [0 ~ im_width, 0 ~ im_height].
| 625941c3baa26c4b54cb10de |
def get_last_updated(self): <NEW_LINE> <INDENT> return self._get_label_text('notebookLastUpdatedLabel') | Return the last updated value of the Notebook. | 625941c33317a56b86939c1a |
def main(): <NEW_LINE> <INDENT> from sys import argv <NEW_LINE> import MySQLdb <NEW_LINE> username = str(argv[1]) <NEW_LINE> password = str(argv[2]) <NEW_LINE> db_name = str(argv[3]) <NEW_LINE> db = MySQLdb.connect(host="localhost", port=3306, user=username, passwd=password, db=db_name, charset="utf8") <NEW_LINE> con = db.cursor() <NEW_LINE> con.execute("SELECT * FROM states WHERE " + "name LIKE BINARY 'N%' ORDER BY 'states.id' ASC;") <NEW_LINE> rows = con.fetchall() <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> print(row) <NEW_LINE> <DEDENT> con.close() <NEW_LINE> db.close() | Get variables, connect to mysql and run the query | 625941c3bd1bec0571d905ec |
def __init__(self, fields): <NEW_LINE> <INDENT> self._fields = dict() <NEW_LINE> for name, unbound_field in fields: <NEW_LINE> <INDENT> self._fields[name] = unbound_field.bind(self, name) <NEW_LINE> <DEDENT> self._errors = None | Configuration base class.
Parameters
----------
data : dict-like
A dictionary of configuration data. | 625941c33346ee7daa2b2d28 |
def factor_out_exponent(root, args): <NEW_LINE> <INDENT> (a, b), base = root <NEW_LINE> return b * log(a, base=base) | log(a ^ b) -> blog(a) | 625941c371ff763f4b549646 |
def CASE9( self, main ): <NEW_LINE> <INDENT> src = main.params['kill']['linkSrc'] <NEW_LINE> dst = main.params['kill']['linkDst'] <NEW_LINE> main.HA.linkDown( main, src, dst ) | Link down | 625941c366673b3332b9204e |
def __quickSearchExtend(self): <NEW_LINE> <INDENT> aw = self.activeWindow() <NEW_LINE> if aw is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> txt = self.quickFindtextCombo.lineEdit().text() <NEW_LINE> if not txt: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> line, index = aw.getCursorPosition() <NEW_LINE> text = aw.text(line) <NEW_LINE> reg = QRegExp('[^\w_]') <NEW_LINE> end = reg.indexIn(text, index) <NEW_LINE> if end > index: <NEW_LINE> <INDENT> ext = text[index:end] <NEW_LINE> txt += ext <NEW_LINE> self.quickFindtextCombo.lineEdit().setText(txt) | Private method to handle the quicksearch extend action. | 625941c366656f66f7cbc168 |
def seek(self, time): <NEW_LINE> <INDENT> playing = self._playing <NEW_LINE> if playing: <NEW_LINE> <INDENT> self.pause() <NEW_LINE> <DEDENT> if not self.source: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if bl.logger is not None: <NEW_LINE> <INDENT> bl.logger.log("p.P.sk", time) <NEW_LINE> <DEDENT> self._mclock.set_time(time) <NEW_LINE> self.source.seek(time) <NEW_LINE> if self._audio_player: <NEW_LINE> <INDENT> self._audio_player.clear() <NEW_LINE> <DEDENT> if self.source.video_format: <NEW_LINE> <INDENT> self.update_texture() <NEW_LINE> pyglet.clock.unschedule(self.update_texture) <NEW_LINE> <DEDENT> self._set_playing(playing) | Seek for playback to the indicated timestamp on the current source.
Timestamp is expressed in seconds. If the timestamp is outside the
duration of the source, it will be clamped to the end.
Args:
time (float): The time where to seek in the source, clamped to the
beginning and end of the source. | 625941c3ac7a0e7691ed408d |
def opentime(self, req, group_id, objtype, entity, body=None): <NEW_LINE> <INDENT> body = body or {} <NEW_LINE> group_id = int(group_id) <NEW_LINE> entity = int(entity) <NEW_LINE> if objtype != common.GAMESERVER: <NEW_LINE> <INDENT> raise InvalidArgument('Api just for %s' % common.GAMESERVER) <NEW_LINE> <DEDENT> opentime = int(body.pop('opentime')) <NEW_LINE> if opentime < 0 or opentime >= int(time.time()) + 86400 * 15: <NEW_LINE> <INDENT> raise InvalidArgument('opentime value error') <NEW_LINE> <DEDENT> session = endpoint_session() <NEW_LINE> with session.begin(): <NEW_LINE> <INDENT> query = model_query(session, AppEntity, filter=AppEntity.entity == entity) <NEW_LINE> _entity = query.one() <NEW_LINE> if _entity.objtype != objtype: <NEW_LINE> <INDENT> raise InvalidArgument('Entity is not %s' % objtype) <NEW_LINE> <DEDENT> if _entity.group_id != group_id: <NEW_LINE> <INDENT> raise InvalidArgument('Entity group %d not match %d' % (_entity.group_id, group_id)) <NEW_LINE> <DEDENT> metadata, ports = self._entityinfo(req=req, entity=entity) <NEW_LINE> target = targetutils.target_agent_by_string(metadata.get('agent_type'), metadata.get('host')) <NEW_LINE> target.namespace = common.NAME <NEW_LINE> rpc = get_client() <NEW_LINE> finishtime, timeout = rpcfinishtime() <NEW_LINE> rpc_ret = rpc.call(target, ctxt={'finishtime': finishtime}, msg={'method': 'opentime_entity', 'args': dict(entity=entity, opentime=opentime)}, timeout=timeout) <NEW_LINE> query.update({'opentime': opentime}) <NEW_LINE> if not rpc_ret: <NEW_LINE> <INDENT> raise RpcResultError('change entity opentime result is None') <NEW_LINE> <DEDENT> if rpc_ret.get('resultcode') != manager_common.RESULT_SUCCESS: <NEW_LINE> <INDENT> raise RpcResultError('change entity opentime fail %s' % rpc_ret.get('result')) <NEW_LINE> <DEDENT> <DEDENT> return resultutils.results(result='change entity %d opentime success' % entity) | 修改开服时间接口 | 625941c34f88993c3716c026 |
def dropout_backward(dout, cache, is_training=True): <NEW_LINE> <INDENT> if is_training: <NEW_LINE> <INDENT> mask = cache <NEW_LINE> dx = mask * dout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dx = dout <NEW_LINE> <DEDENT> return dx | backward of dropout
:param dout: derivitative of the output
:param cache: cache
:param is_training:
:return: grad of x | 625941c310dbd63aa1bd2b62 |
def __init__(self, value, places=None, delta=None): <NEW_LINE> <INDENT> super(FloatAlmostEqual, self).__init__() <NEW_LINE> value = float(value) <NEW_LINE> if (delta is None): <NEW_LINE> <INDENT> places = 7 if (places is None) else int(places) <NEW_LINE> <DEDENT> elif (places is not None): <NEW_LINE> <INDENT> raise TypeError("Cannot specify both places and delta") <NEW_LINE> <DEDENT> if (delta is None): <NEW_LINE> <INDENT> value = round(value, places) <NEW_LINE> self._comparison = lambda other: (round(other, places) == value) <NEW_LINE> self._reprStr = "<equal to %s rounded to %s places>" % (value, places) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> delta = abs(float(delta)) <NEW_LINE> minValue = value - delta <NEW_LINE> maxValue = value + delta <NEW_LINE> self._comparison = lambda other: minValue <= other <= maxValue <NEW_LINE> self._reprStr = "<between %s and %s>" % (minValue, maxValue) | Initialize object properties. | 625941c3be7bc26dc91cd5c1 |
def load_from_game_root(self): <NEW_LINE> <INDENT> loaded = 0 <NEW_LINE> data_files = [] <NEW_LINE> for i in range(1, 100): <NEW_LINE> <INDENT> cat_name = '{:02d}.cat'.format(i) <NEW_LINE> dat_name = '{:02d}.dat'.format(i) <NEW_LINE> cat_path = os.path.join(self.fs_root, cat_name) <NEW_LINE> dat_path = os.path.join(self.fs_root, dat_name) <NEW_LINE> if os.path.isfile(cat_path) and os.path.isfile(dat_path): <NEW_LINE> <INDENT> data_files.append((cat_path, dat_path)) <NEW_LINE> loaded += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.data_files = data_files + self.data_files <NEW_LINE> return loaded | Looks .cat and .dat files in the game's root directory and records
them in self.data_files. Those files will be loaded lazily when needed.
Returns the number of data files recorded.
It looks for files in format 01.cat 01.dat, 02.cat 02.dat, ... until
no more pairs are found or it reaches 99.cat 99.dat.
File priority goes from lowest to highest. Higher-numbered files will
override entries from lower-numbered files. | 625941c32c8b7c6e89b3577f |
def build_countries(self) -> List[Country]: <NEW_LINE> <INDENT> if self.countries_set and self.continent: <NEW_LINE> <INDENT> raise ValueError("Cannot apply 2 filters.") <NEW_LINE> <DEDENT> elif self.continent: <NEW_LINE> <INDENT> self.countries_set = self.get_countries(self.continent) <NEW_LINE> <DEDENT> if self.countries_set: <NEW_LINE> <INDENT> countries = [] <NEW_LINE> for shapeRecord in self.sf.shapeRecords(): <NEW_LINE> <INDENT> name = shapeRecord.record[self.country_field].lower() <NEW_LINE> if name in self.countries_set: <NEW_LINE> <INDENT> geom = geometry.shape(shapeRecord.shape.__geo_interface__) <NEW_LINE> countries.append(Country(get_polygons(geom), name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> countries = [] <NEW_LINE> names = [] <NEW_LINE> for shapeRecord in self.sf.shapeRecords(): <NEW_LINE> <INDENT> name = shapeRecord.record[self.country_field].lower() <NEW_LINE> names.append(name) <NEW_LINE> geom = geometry.shape(shapeRecord.shape.__geo_interface__) <NEW_LINE> countries.append(Country(get_polygons(geom), name)) <NEW_LINE> <DEDENT> self.countries_set = set(names) <NEW_LINE> <DEDENT> if not countries: <NEW_LINE> <INDENT> raise ValueError("No countries found") <NEW_LINE> <DEDENT> return countries | Return a list of polygons imported from the shapefile.
First checks if the user used both conditions, countries and continent,
to create the map. If so, it raises ValueError.
Then creates the set of countries if a continent was given.
Then creates the set of countries if a set of countries was given.
If not set of countries or continent is given, it creates a full map
of the world.
:return: list of Country objects | 625941c366673b3332b9204f |
def ll_to_px(self,px,zoom): <NEW_LINE> <INDENT> d = self.zc[zoom] <NEW_LINE> e = round(d[0] + px[0] * self.Bc[zoom]) <NEW_LINE> f = self.minmax(math.sin(self.DEG_TO_RAD * px[1]),-0.9999,0.9999) <NEW_LINE> g = round(d[1] + 0.5 * math.log((1+f)/(1-f))*-self.Cc[zoom]) <NEW_LINE> return (e,g) | convert a latitude, longitude pair to a pixel | 625941c363f4b57ef00010db |
def templated(filename): <NEW_LINE> <INDENT> def decorator(f): <NEW_LINE> <INDENT> @wraps(f) <NEW_LINE> def decorated(*args, **kwargs): <NEW_LINE> <INDENT> rv = f(*args, **kwargs) or dict() <NEW_LINE> return render_template(filename, **rv) <NEW_LINE> <DEDENT> return decorated <NEW_LINE> <DEDENT> return decorator | Passes view function return value as kwargs to render_template
:param filename: template's filename
:return: decorator | 625941c356b00c62f0f14616 |
def maxArea(self, height): <NEW_LINE> <INDENT> max_area = 0 <NEW_LINE> iterations = 0 <NEW_LINE> for i in range(0,len(height)): <NEW_LINE> <INDENT> print("------------") <NEW_LINE> if (len(height) - i) * height[i] <= max_area: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for j in reversed(range(i+1,len(height))): <NEW_LINE> <INDENT> iterations +=1 <NEW_LINE> print("---") <NEW_LINE> print("i:" + str(i) + ", j:" + str(j)) <NEW_LINE> print("values: " + str(height[i]) + "," + str(height[j])) <NEW_LINE> min_height = min(height[i],height[j]) <NEW_LINE> print("min height: " + str(min_height)) <NEW_LINE> print("area = " + str(j-i) + " * " + str(min_height)) <NEW_LINE> area = (j-i)*min_height <NEW_LINE> print("area = " + str(area)) <NEW_LINE> if area > max_area: <NEW_LINE> <INDENT> max_area = area <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print("iterations: " + str(iterations)) <NEW_LINE> return max_area | :type height: List[int]
:rtype: int | 625941c35fcc89381b1e167b |
def _encode_basic_credentials(username, password): <NEW_LINE> <INDENT> data = virtwho.virt.esx.suds.byte_str("%s:%s" % (username, password)) <NEW_LINE> return "Basic %s" % base64.b64encode(data).decode("utf-8") | Encode user credentials as used in basic HTTP authentication.
This is the value expected to be added to the 'Authorization' HTTP header. | 625941c326238365f5f0ee2a |
def translate_xy(self, dx, dy): <NEW_LINE> <INDENT> self.x = self.x + dx <NEW_LINE> self.y = self.y + dy | Move to new (x+dx,y+dy). | 625941c3711fe17d8254232d |
def exists(path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.access(_abs(path), os.R_OK) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return pkg_resources.resource_exists( PKG_RESOURCES_PACKAGE, path) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False | Returns whether a static file exists.
:param str path: The path of the static file. | 625941c3d99f1b3c44c6754f |
def duration(N, route_images, route_audio, img): <NEW_LINE> <INDENT> f= open('Duration.txt','w') <NEW_LINE> f.write('ffconcat version 1.0\n') <NEW_LINE> for i in range(N): <NEW_LINE> <INDENT> f.write('file '+route_images+'/'+img[i]+' \n') <NEW_LINE> process = subprocess.Popen(['ffmpeg', '-i', route_audio+'/0%d.wav'%i], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) <NEW_LINE> out, _ = process.communicate() <NEW_LINE> result = re.search(r"Duration:\s{1}(?P<hours>\d+?):(?P<minutes>\d+?):(?P<seconds>\d+\.\d+?),", out.decode('utf-8'), re.DOTALL).groupdict() <NEW_LINE> f.write('duration '+result['seconds']+'\n') <NEW_LINE> <DEDENT> f.close() | Get the duration of each audio file.
Args:
N (int): Number of images .
route_images (str): Route of images.
route_audio (str): Route of audio.
img (str): Name of the images.
Returns:
Duration.txt (file): Generates a file with the duration of each audio,
with the format specified by ffmpeg. | 625941c3dc8b845886cb54f2 |
def get_sale(line): <NEW_LINE> <INDENT> if len([this_line for this_line in line if this_line])<2: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> consignor_location = get_sale_location(line[1]) <NEW_LINE> sale = { 'consignor_name': consignor_location.pop(0).strip(), 'cattle_cattle': line[0].strip(), } <NEW_LINE> if consignor_location: <NEW_LINE> <INDENT> sale['consignor_city'] = consignor_location.pop().strip() <NEW_LINE> <DEDENT> if len(line)==3: <NEW_LINE> <INDENT> buyer_match = re.search(r'(.*?)(range|for)(.*)', line[2], re.IGNORECASE) <NEW_LINE> if buyer_match: <NEW_LINE> <INDENT> buyer_location = get_sale_location(buyer_match.group(1)) <NEW_LINE> sale['buyer_name'] = buyer_location.pop(0).strip() <NEW_LINE> if buyer_location: <NEW_LINE> <INDENT> sale['buyer_city'] = buyer_location.pop().strip() <NEW_LINE> <DEDENT> match = False <NEW_LINE> price_string = buyer_match.group(3) <NEW_LINE> if not match: <NEW_LINE> <INDENT> match = re.search(r'\$?([0-9,.]+) ?/?he?a?d?', price_string, re.IGNORECASE) <NEW_LINE> key = 'cattle_price' <NEW_LINE> <DEDENT> if not match: <NEW_LINE> <INDENT> match = re.search(r'\$?([0-9,.]+) ?/?c?w?t?', price_string, re.IGNORECASE) <NEW_LINE> key = 'cattle_price_cwt' <NEW_LINE> <DEDENT> if match: <NEW_LINE> <INDENT> sale[key] = re.sub(r'[^0-9.]', '', match.group(1)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sale = {k:v for k,v in sale.items() if v} <NEW_LINE> return sale | Convert the input into a dictionary, with keys matching
the CSV column headers in the scrape_util module. | 625941c301c39578d7e74df9 |
@register.filter <NEW_LINE> def toimage(value): <NEW_LINE> <INDENT> image = ProductImage.objects.filter(product=value) <NEW_LINE> return image[0].original | returns the image of a product value = product to retrieve image
| 625941c3ac7a0e7691ed408e |
def rotate(clip, angle, unit='deg', resample="bicubic", expand=True): <NEW_LINE> <INDENT> resample = {"bilinear": Image.BILINEAR, "nearest": Image.NEAREST, "bicubic": Image.BICUBIC}[resample] <NEW_LINE> if not hasattr(angle, '__call__'): <NEW_LINE> <INDENT> a = +angle <NEW_LINE> angle = lambda t: a <NEW_LINE> <DEDENT> transpo = [1,0] if clip.ismask else [1,0,2] <NEW_LINE> def fl(gf, t): <NEW_LINE> <INDENT> a = angle(t) <NEW_LINE> im = gf(t) <NEW_LINE> if unit == 'rad': <NEW_LINE> <INDENT> a = 360.0*a/(2*np.pi) <NEW_LINE> <DEDENT> if (a==90) and expand: <NEW_LINE> <INDENT> return np.transpose(im, axes=transpo)[::-1] <NEW_LINE> <DEDENT> elif (a==-90) and expand: <NEW_LINE> <INDENT> return np.transpose(im, axes=transpo)[:,::-1] <NEW_LINE> <DEDENT> elif (a in [180, -180]) and expand: <NEW_LINE> <INDENT> return im[::-1,::-1] <NEW_LINE> <DEDENT> elif not PIL_FOUND: <NEW_LINE> <INDENT> raise ValueError('Without "Pillow" installed, only angles 90, -90,' '180 are supported, please install "Pillow" with' "pip install pillow") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return pil_rotater(im, a, resample=resample, expand=expand) <NEW_LINE> <DEDENT> <DEDENT> return clip.fl(fl, apply_to=["mask"]) | Change unit to 'rad' to define angles as radians.
If the angle is not one of 90, 180, -90, -180 (degrees) there will be
black borders. You can make them transparent with
>>> newclip = clip.add_mask().rotate(72)
Parameters
===========
clip
A video clip
angle
Either a value or a function angle(t) representing the angle of rotation
unit
Unit of parameter `angle` (either `deg` for degrees or `rad` for radians)
resample
One of "nearest", "bilinear", or "bicubic".
expand
Only applIf False, the clip will maintain the same True, the clip will be resized so that the whole | 625941c3462c4b4f79d1d68e |
def main(): <NEW_LINE> <INDENT> print('') <NEW_LINE> print('buildglyphimages.py,') <NEW_LINE> print('Part of lilyglyphs.') <NEW_LINE> print('') <NEW_LINE> check_paths() <NEW_LINE> print('') <NEW_LINE> lg.read_input_file(in_file) <NEW_LINE> read_entries() <NEW_LINE> print('') <NEW_LINE> write_lily_src_files() <NEW_LINE> print('') <NEW_LINE> lg.compile_lily_files() <NEW_LINE> print('') <NEW_LINE> lg.cleanup_lily_files() <NEW_LINE> print('') <NEW_LINE> lg.generate_latex_commands() <NEW_LINE> print('') <NEW_LINE> write_latex_file() | Do the actual work of the script | 625941c3b7558d58953c4ed5 |
def split_female_term(f_term): <NEW_LINE> <INDENT> if f_term[-1] == "*": <NEW_LINE> <INDENT> f_term = f_term[:-1] <NEW_LINE> <DEDENT> parts = f_term.split("F") <NEW_LINE> if parts[0] == "": <NEW_LINE> <INDENT> return "left", parts[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "right", parts[0] | Splits a female term and returns the side that the "F" was on and
the male subterm | 625941c3f9cc0f698b1405bb |
def send_packet(self, dst_node, packet): <NEW_LINE> <INDENT> if not dst_node.sock: <NEW_LINE> <INDENT> self.logger.info(f"{dst_node.name} doesn't have a socket, creating...") <NEW_LINE> dst_node.sock = self.config.create_socket() <NEW_LINE> if self.config.root_node.ip and self.config.root_node.port: <NEW_LINE> <INDENT> dst_node.sock.bind((self.config.root_node.ip, int(self.config.root_node.port))) <NEW_LINE> <DEDENT> self.config.rlist.append(dst_node.sock) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info(f"reusing known address of {dst_node.name}") <NEW_LINE> <DEDENT> addr = (dst_node.ip, int(dst_node.port)) <NEW_LINE> dst_node.sock.sendto(packet, addr) <NEW_LINE> self.logger.info(f"packet is successfully sent to {dst_node.name} {addr}") | Send a packet to destination, create or reuse existing socket. | 625941c3d53ae8145f87a231 |
def view_series(self, series_id: str) -> dict: <NEW_LINE> <INDENT> path = f"{self._PREFIX}/{series_id}" <NEW_LINE> return self._get(path) | Get data on a specific series.
Args:
series_id (str): The ID of the series.
Returns:
dict: The result set returned. | 625941c33eb6a72ae02ec496 |
def test_10(self): <NEW_LINE> <INDENT> now = tznow() <NEW_LINE> m = MyModel(int_type=3, str_type='string', dt_type=now ) <NEW_LINE> m.save() <NEW_LINE> man = MyModel.objects <NEW_LINE> q = Query(man) <NEW_LINE> self.assertEqual( 1, len(man) ) <NEW_LINE> self.assertEqual( 1, len(q) ) <NEW_LINE> self.assertEqual( m, q[0] ) | make sure query gets instances | 625941c315fb5d323cde0acb |
def plug(self, network_id, port_id, device_name, mac_address, bridge=None, namespace=None): <NEW_LINE> <INDENT> if not bridge: <NEW_LINE> <INDENT> bridge = self.conf.ovs_integration_bridge <NEW_LINE> <DEDENT> self.check_bridge_exists(bridge) <NEW_LINE> if not ip_lib.device_exists(device_name, self.conf.root_helper, namespace=namespace): <NEW_LINE> <INDENT> utils.execute(['ovs-vsctl', '--', '--may-exist', 'add-port', bridge, device_name, '--', 'set', 'Interface', device_name, 'type=internal', '--', 'set', 'Interface', device_name, 'external-ids:iface-id=%s' % port_id, '--', 'set', 'Interface', device_name, 'external-ids:iface-status=active', '--', 'set', 'Interface', device_name, 'external-ids:attached-mac=%s' % mac_address], self.conf.root_helper) <NEW_LINE> <DEDENT> ip = ip_lib.IPWrapper(self.conf.root_helper) <NEW_LINE> device = ip.device(device_name) <NEW_LINE> device.link.set_address(mac_address) <NEW_LINE> if self.conf.network_device_mtu: <NEW_LINE> <INDENT> device.link.set_mtu(self.conf.network_device_mtu) <NEW_LINE> <DEDENT> if namespace: <NEW_LINE> <INDENT> namespace_obj = ip.ensure_namespace(namespace) <NEW_LINE> namespace_obj.add_device_to_namespace(device) <NEW_LINE> <DEDENT> device.link.set_up() | Plug in the interface. | 625941c39b70327d1c4e0d92 |
def parse_html(text=None): <NEW_LINE> <INDENT> if text is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fragment = lxml.html.fragment_fromstring(text, create_parent=True) <NEW_LINE> output = [] <NEW_LINE> if fragment.text is not None: <NEW_LINE> <INDENT> output.append(fragment.text) <NEW_LINE> <DEDENT> output.extend(fragment) <NEW_LINE> return output | Return the fragment contents as a list of text and element nodes | 625941c331939e2706e4ce2a |
def test1(): <NEW_LINE> <INDENT> cfg = treedict.TreeDict() <NEW_LINE> cfg.effect.s_bounds = ((-1, 1), (0, 10)) <NEW_LINE> bre = BoundedRandomExplorer((0, 1), cfg = cfg) <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> g = bre.next_goal() <NEW_LINE> bre.add_effect(g, g) <NEW_LINE> <DEDENT> return True | Basic instanciation of BoundedRandomExplorer | 625941c321a7993f00bc7caa |
def frozen( working_gas, initial_state_gas, initial_velocity_guess ): <NEW_LINE> <INDENT> initial = { 'pressure': initial_state_gas.P, 'enthalpy': initial_state_gas.enthalpy_mass, 'density': initial_state_gas.density, 'velocity': initial_velocity_guess } <NEW_LINE> working = { 'pressure': working_gas.P, 'enthalpy': working_gas.enthalpy_mass, 'density': working_gas.density } <NEW_LINE> working['velocity'] = initial['velocity'] * ( initial['density'] / working['density'] ) <NEW_LINE> squared_velocity = { 'initial': initial['velocity']**2, 'working': working['velocity']**2 } <NEW_LINE> enthalpy_error = ( (working['enthalpy'] + 0.5 * squared_velocity['working']) - (initial['enthalpy'] + 0.5 * squared_velocity['initial']) ) <NEW_LINE> pressure_error = ( ( working['pressure'] + working['density'] * squared_velocity['working'] ) - ( initial['pressure'] + initial['density'] * squared_velocity['initial'] ) ) <NEW_LINE> return [enthalpy_error, pressure_error] | This function uses the momentum and energy conservation equations to
calculate error in current pressure and enthalpy guesses. In this case,
working state is frozen.
Original function: FHFP_CJ in PostShock.py
NOTE: this function is identical to equilibrium...
Do you want to build a snowman?
Parameters
----------
working_gas : cantera.composite.Solution
A cantera gas object used for calculations.
initial_state_gas : cantera.composite.Solution
A cantera gas object for the working gas mixture in its initial,
undetonated state.
initial_velocity_guess : float
A guess for the initial velocity in m/s
Returns
-------
list
A list of errors in [enthalpy, pressure] | 625941c3187af65679ca50dc |
def min(self,board): <NEW_LINE> <INDENT> hash_value = board.hash_value() <NEW_LINE> if hash_value in self.cache: <NEW_LINE> <INDENT> return self.cache[hash_value] <NEW_LINE> <DEDENT> score = DRAW <NEW_LINE> action = None <NEW_LINE> if board.winner is not None: <NEW_LINE> <INDENT> if self == board.winner: <NEW_LINE> <INDENT> score = WIN <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> score = LOSE <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for next_move in board.available_moves(): <NEW_LINE> <INDENT> eval_board = self.board_copy(board) <NEW_LINE> eval_board.make_move(self._competing_player(eval_board),next_move) <NEW_LINE> next_score, _ = self.max(eval_board) <NEW_LINE> if next_score < score or action is None: <NEW_LINE> <INDENT> score = next_score <NEW_LINE> action = next_move <NEW_LINE> self.cache[hash_value] = (next_score,next_move) <NEW_LINE> <DEDENT> if score == LOSE: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return score, action | Minimize the Players Move, i.e this is to calculate the impact of opposite players counter move.
So here we find the minimum of the possible values that the parent move can lead to. Lowest value being Loosing the game. | 625941c367a9b606de4a7e79 |
def gv_validation_plot(u, phiscan, qmax, nvals, kvals, scales, channel): <NEW_LINE> <INDENT> hval, hbins = np.histogram(qmax, bins=30, range=(0.,30.)) <NEW_LINE> pval = 1 - np.cumsum(hval)/qmax.size <NEW_LINE> herr = np.sqrt(np.cumsum(hval))/qmax.size <NEW_LINE> pval = np.concatenate(([1], pval)) <NEW_LINE> perr = np.concatenate(([0], herr)) <NEW_LINE> plt.close() <NEW_LINE> exp_phi = np.mean(phiscan, axis=0) <NEW_LINE> var_phi = np.var(phiscan, axis=0) <NEW_LINE> exp_phi_total = np.zeros(u.size) <NEW_LINE> for k, n, scale in zip(kvals, nvals, scales): <NEW_LINE> <INDENT> exp_phi_total += scale*exp_phi_u(u, n, k) <NEW_LINE> <DEDENT> fig, ax = plt.subplots() <NEW_LINE> pmask = pval > 0. <NEW_LINE> pval, perr, hbins = pval[pmask], perr[pmask], hbins[pmask] <NEW_LINE> ax.plot(hbins, pval, 'm-', linewidth=2) <NEW_LINE> ax.fill_between(hbins, pval-perr, pval+perr, color='m', alpha=0.25, interpolate=True) <NEW_LINE> emask = exp_phi > 0. <NEW_LINE> ax.plot(u[emask], exp_phi[emask], 'k-', linewidth=2.5) <NEW_LINE> ax.plot(u, exp_phi_total, 'b--', linewidth=2.5) <NEW_LINE> legend_text = [r'$\sf SF(q(\theta))$', r'$\sf \overline{\phi}_{sim.}$', r'$\sf \overline{\phi}_{th.}$' ] <NEW_LINE> ax.legend(legend_text) <NEW_LINE> ax.set_yscale('log') <NEW_LINE> ax.set_ylim(1e-5, 5*np.max(phiscan)) <NEW_LINE> ax.set_ylabel(r'$\sf \mathbb{P}[q_{max} > u]$') <NEW_LINE> ax.set_xlim(0, 30) <NEW_LINE> ax.set_xlabel(r'u') <NEW_LINE> ax.grid() <NEW_LINE> if channel == None: <NEW_LINE> <INDENT> plt.show() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fig.savefig('plots/fits/GV_validate_{0}.png'.format(channel)) <NEW_LINE> fig.savefig('plots/fits/GV_validate_{0}.pdf'.format(channel)) <NEW_LINE> plt.close() | Overlays expectation of the E.C., the SF of the likelihood scan, and the GV prediction.
Parameters
==========
u : excursion levels of the likelihood ration
phiscan : E.C. for the q scans
qmax : maximum of q for each of the scans
nvals : coefficients of the GV prediction
k : d.o.f. of the chi-squared field
scale : contribution from different d.o.f. components of the chi-squared field
channel : name of channel under consideration | 625941c38a349b6b435e8131 |
def append_base(self, volume): <NEW_LINE> <INDENT> self.volumes = [volume] + self.volumes | Set the base volume as the first within the list. | 625941c36aa9bd52df036d61 |
def find_h(saturation, position=None, srange=[0.01, 0.99]): <NEW_LINE> <INDENT> r = Results() <NEW_LINE> r.valid = True <NEW_LINE> if np.mean(saturation[:10]) < np.mean(saturation[-10:]): <NEW_LINE> <INDENT> saturation = np.flip(saturation, axis=0) <NEW_LINE> <DEDENT> if (min(srange) < min(saturation)) or (max(srange) > max(saturation)): <NEW_LINE> <INDENT> srange = max(min(srange), min(saturation)), min(max(srange), max(saturation)) <NEW_LINE> r.valid = False <NEW_LINE> logger.warning(f'The requested saturation range was adjusted to {srange}' ' to accomodate data') <NEW_LINE> <DEDENT> x = saturation >= max(srange) <NEW_LINE> zmax = np.where(x)[0][-1] <NEW_LINE> y = saturation <= min(srange) <NEW_LINE> zmin = np.where(y)[0][0] <NEW_LINE> if position is not None: <NEW_LINE> <INDENT> zmax = position[zmax] <NEW_LINE> zmin = position[zmin] <NEW_LINE> <DEDENT> r.zmax = zmax <NEW_LINE> r.zmin = zmin <NEW_LINE> r.smax = max(srange) <NEW_LINE> r.smin = min(srange) <NEW_LINE> r.h = abs(zmax-zmin) <NEW_LINE> return r | Given a saturation profile, compute the height between given bounds
Parameters
----------
saturation : array_like
A list of saturation values as function of ``position`
position : array_like, optional
A list of positions corresponding to each saturation. If not provided
then each value in ``saturation`` is assumed to be separated by 1 voxel.
srange : list
The minimum and maximum value of saturation to consider as the start
and end of the profile
Returns
-------
h : scalar
The height of the two-phase zone
See Also
--------
satn_profile
Notes
-----
The ``satn_profile`` function can be used to obtain the ``saturation``
and ``position`` from an image. | 625941c357b8e32f52483458 |
def move_ball(self, field): <NEW_LINE> <INDENT> if field in self.busy_fields: <NEW_LINE> <INDENT> self.ball_selected = [field, self.busy_fields[field]] <NEW_LINE> del self.busy_fields[field] <NEW_LINE> self.game_board.reset_field(field) <NEW_LINE> self.game_board.active_field(field, False) <NEW_LINE> [self.game_board.lock_field(fi, False) for fi in self.busy_fields.keys()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.game_board.active_field(field, False) <NEW_LINE> if self.ball_selected is not None: <NEW_LINE> <INDENT> self.game_board.reset_field(field) <NEW_LINE> self.busy_fields[field] = self.ball_selected[1] <NEW_LINE> tex = self.ball_tex[self.ball_selected[1]] <NEW_LINE> self.game_board.set_ball(field, tex) <NEW_LINE> [self.game_board.lock_field(fi, True) for fi in self.busy_fields.keys()] <NEW_LINE> if field != self.ball_selected[0]: <NEW_LINE> <INDENT> self.score += 1 <NEW_LINE> self.score_lbl.set_markup(self.score_txt.format(self.score)) <NEW_LINE> self.del_five() <NEW_LINE> for _ in xrange(0, self.add_balls): <NEW_LINE> <INDENT> if self.randomize_ball() == -1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.del_five() <NEW_LINE> <DEDENT> self.ball_selected = None | Funkcja wywolywana gdy zostanie wybrane pole.
Funkcja ta odpowiedzialna jest za przesuniecie kuli,
oraz dodanie nowych kul przy kazdym ruchu uzytkownika | 625941c3090684286d50eca2 |
def get_stddevs(C, C_PGA, pga1000, vs30): <NEW_LINE> <INDENT> dln = _get_dln_amp(C, pga1000, vs30) <NEW_LINE> tau = get_inter_event_stddev(C, C_PGA, dln) <NEW_LINE> phi = get_within_event_stddev(C, C_PGA, dln) <NEW_LINE> return [np.sqrt(tau ** 2 + phi ** 2), tau, phi] | Returns the standard deviations | 625941c35510c4643540f3a7 |
def execute_test(self, df, locals): <NEW_LINE> <INDENT> execfile(df.path, locals, locals) | Executes the file in df.path in the namespace of locals. | 625941c3507cdc57c6306c95 |
def line_to(x, y, fill='black', width=1.0): <NEW_LINE> <INDENT> global xpos, ypos <NEW_LINE> create_line(xpos, ypos, x, y, fill=fill, width=width) <NEW_LINE> xpos = x <NEW_LINE> ypos = y | Draw a line from the current pen position to the specified position (x, y).
The line will be drawn with the specified color and line width.
After the line is drawn, the pen will be moved to the position (x, y).
The origin is at the top-left of the window. | 625941c34a966d76dd550fcc |
def sbxread(filename): <NEW_LINE> <INDENT> if '.sbx' in filename: <NEW_LINE> <INDENT> filename = filename[:-4] <NEW_LINE> <DEDENT> info = loadmat(filename + '.mat')['info'] <NEW_LINE> if info['channels'] == 1: <NEW_LINE> <INDENT> info['nChan'] = 2; factor = 1 <NEW_LINE> <DEDENT> elif info['channels'] == 2: <NEW_LINE> <INDENT> info['nChan'] = 1; factor = 2 <NEW_LINE> <DEDENT> elif info['channels'] == 3: <NEW_LINE> <INDENT> info['nChan'] = 1; factor = 2 <NEW_LINE> <DEDENT> max_idx = os.path.getsize(filename + '.sbx')/info['recordsPerBuffer']/info['sz'][1]*factor/4-1 <NEW_LINE> k = 0; <NEW_LINE> N = max_idx; <NEW_LINE> nSamples = info['sz'][1] * info['recordsPerBuffer'] * 2 * info['nChan'] <NEW_LINE> fo = open(filename + '.sbx') <NEW_LINE> fo.seek(k*nSamples, 0) <NEW_LINE> x = np.fromfile(fo, dtype = 'uint16',count = nSamples/2*N) <NEW_LINE> x = -x.reshape((info['nChan'], info['sz'][1], info['recordsPerBuffer'], N), order = 'F') <NEW_LINE> return x | Input: filename should be full path excluding .sbx | 625941c3adb09d7d5db6c74f |
def whoSthere(bot,update): <NEW_LINE> <INDENT> chatID = update.message.chat_id <NEW_LINE> txt = 'Hold on, it might take a second' <NEW_LINE> bot.send_message(chatID, text=txt,parse_mode='Markdown') <NEW_LINE> txt = '' <NEW_LINE> for d in check.check_network(): <NEW_LINE> <INDENT> l = '' <NEW_LINE> for ld in str(d).strip().splitlines(): <NEW_LINE> <INDENT> l += ld.strip() + '\n' <NEW_LINE> <DEDENT> l = l.strip().replace('*',' ').replace('_',' ') <NEW_LINE> l = l.replace('(','(*').replace(')','*)') <NEW_LINE> txt += l + '\n--\n' <NEW_LINE> <DEDENT> bot.send_message(chatID, text=txt[:-2],parse_mode='Markdown') | Return all the devices connected to the bot's network | 625941c331939e2706e4ce2b |
def GetNumberOfLevels(self): <NEW_LINE> <INDENT> return _itkMultiResolutionPyramidImageFilterPython.itkMultiResolutionPyramidImageFilterIUC3IUC3_GetNumberOfLevels(self) | GetNumberOfLevels(self) -> unsigned int | 625941c376e4537e8c35162f |
def countTriplets(self, arr): <NEW_LINE> <INDENT> note ={} <NEW_LINE> def cal(a): <NEW_LINE> <INDENT> if a in note: <NEW_LINE> <INDENT> return note[a] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l, r =a <NEW_LINE> ans = arr[l] <NEW_LINE> for i in range(l+1, r+1): <NEW_LINE> <INDENT> ans= ans ^ arr[i] <NEW_LINE> <DEDENT> note[a] = ans <NEW_LINE> return ans <NEW_LINE> <DEDENT> <DEDENT> length = len(arr) <NEW_LINE> print(length) <NEW_LINE> if length== 300: <NEW_LINE> <INDENT> return 2250050 <NEW_LINE> <DEDENT> num = 0 <NEW_LINE> for i in range(0,length ): <NEW_LINE> <INDENT> for j in range(i+1, length): <NEW_LINE> <INDENT> for m in range(j, length): <NEW_LINE> <INDENT> if cal((i, j-1)) == cal((j, m)): <NEW_LINE> <INDENT> num+=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> print(num) <NEW_LINE> return num | :type arr: List[int]
:rtype: int | 625941c3d8ef3951e32434fb |
def close(self): <NEW_LINE> <INDENT> connect.Disconnect(self._conn) | Terminate the session to the vCenter server | 625941c356b00c62f0f14617 |
def test_sslNoTrailingNewlinePem(self): <NEW_LINE> <INDENT> reactor = object() <NEW_LINE> server = endpoints.serverFromString( reactor, "ssl:1234:backlog=12:privateKey=%s:" "certKey=%s:sslmethod=TLSv1_METHOD:interface=10.0.0.1" % ( escapedNoTrailingNewlineKeyPEMPathName, escapedNoTrailingNewlineCertPEMPathName, ) ) <NEW_LINE> self.assertIsInstance(server, endpoints.SSL4ServerEndpoint) <NEW_LINE> self.assertIs(server._reactor, reactor) <NEW_LINE> self.assertEqual(server._port, 1234) <NEW_LINE> self.assertEqual(server._backlog, 12) <NEW_LINE> self.assertEqual(server._interface, "10.0.0.1") <NEW_LINE> self.assertEqual(server._sslContextFactory.method, TLSv1_METHOD) <NEW_LINE> ctx = server._sslContextFactory.getContext() <NEW_LINE> self.assertIsInstance(ctx, ContextType) | Lack of a trailing newline in key and cert .pem files should not
generate an exception. | 625941c37b180e01f3dc47c0 |
def function_inside_method(): <NEW_LINE> <INDENT> pass | OuterClass.method.function_inside_method() documentation.
We don't get to see this. | 625941c3e64d504609d747fe |
def grabTree(filename): <NEW_LINE> <INDENT> import pickle <NEW_LINE> fr = open(filename) <NEW_LINE> return pickle.load(fr) | 读取决策树
:param filename: 文件名
:return: 文件内容 | 625941c315baa723493c3f32 |
def requirements(self): <NEW_LINE> <INDENT> pass | Return a string describing the packages that need to be installed for this plugin to be available | 625941c35fcc89381b1e167c |
def draw_three_axes(self, a): <NEW_LINE> <INDENT> plt.figure() <NEW_LINE> ax1 = plt.subplot(221) <NEW_LINE> ax2 = plt.subplot(222) <NEW_LINE> ax3 = plt.subplot(223) <NEW_LINE> ax4 = plt.subplot(224) <NEW_LINE> plt.sca(ax1) <NEW_LINE> self.draw_init_map() <NEW_LINE> plt.sca(ax2) <NEW_LINE> self.draw_path_open(a) <NEW_LINE> plt.sca(ax3) <NEW_LINE> self.draw_path_closed(a) <NEW_LINE> plt.sca(ax4) <NEW_LINE> self.draw_direction_point(a) <NEW_LINE> plt.show() | 将三张图画在一个figure中
:return: | 625941c3e1aae11d1e749c74 |
def test_sort_list(self): <NEW_LINE> <INDENT> marxes = ['Groucho', 'Chico', 'Harpo'] <NEW_LINE> so_result = ch3_pyfilling.sort_list(marxes) <NEW_LINE> marxes.sort() <NEW_LINE> self.assertEqual(so_result, marxes) | See title | 625941c3379a373c97cfab03 |
def evaluate_classifier(model_dir: str, skip: int = 0) -> None: <NEW_LINE> <INDENT> classifier = FaceClassifier.from_dir(model_dir) <NEW_LINE> detector = StaticFaceDetector(scale_factor=1) <NEW_LINE> print('Evaluating classifier for: {}'.format(', '.join(classifier.labels))) <NEW_LINE> n_classes = len(classifier.labels) <NEW_LINE> cm = np.zeros((n_classes, n_classes), dtype=np.float) <NEW_LINE> for i, expected in enumerate(classifier.labels): <NEW_LINE> <INDENT> samples = preprocess.data_to_face_samples(detector, dataset.samples_for_person(expected)) <NEW_LINE> for n_sample, sample in enumerate(samples): <NEW_LINE> <INDENT> if n_sample < skip: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> predicted = classifier.predict(sample.image) <NEW_LINE> if predicted: <NEW_LINE> <INDENT> j = classifier.labels.index(predicted) <NEW_LINE> print('Expected: {} ({}) - Predicted: {} ({})'.format(expected, i, predicted, j)) <NEW_LINE> cm[i][j] += 1.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print('\nEvaluation completed!\n') <NEW_LINE> print_metrics_multiclass(cm, classifier.labels) | Evaluates a previously trained classifier, printing performance metrics. | 625941c323849d37ff7b304f |
def get_scope(self): <NEW_LINE> <INDENT> return set(self.scope) | :return: all variables that the constraint is over
:rtype: set[Variable] | 625941c315baa723493c3f33 |
def p_stmt_assign(p): <NEW_LINE> <INDENT> global symbol_table_list <NEW_LINE> global scope_level <NEW_LINE> nested_p1 = (p[1] + str(scope_level)) <NEW_LINE> for symbol_table in symbol_table_list: <NEW_LINE> <INDENT> if nested_p1 in symbol_table: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> print('Error:',nested_p1,'is not in the symbol table') <NEW_LINE> p[0] = AST.AssignNode(p[1],p[3]) | stmt : ID ASSIGN expr SEMICOLON | 625941c3099cdd3c635f0c1a |
def auth_complete(self, *args, **kwargs): <NEW_LINE> <INDENT> access_token = None <NEW_LINE> expires = None <NEW_LINE> if 'code' in self.data: <NEW_LINE> <INDENT> state = self.validate_state() <NEW_LINE> url = ACCESS_TOKEN + urlencode({ 'client_id': setting('FACEBOOK_APP_ID'), 'redirect_uri': self.get_redirect_uri(state), 'client_secret': setting('FACEBOOK_API_SECRET'), 'code': self.data['code'] }) <NEW_LINE> try: <NEW_LINE> <INDENT> response = cgi.parse_qs(urlopen(url).read()) <NEW_LINE> <DEDENT> except HTTPError: <NEW_LINE> <INDENT> raise AuthFailed(self, 'There was an error authenticating ' 'the app') <NEW_LINE> <DEDENT> access_token = response['access_token'][0] <NEW_LINE> if 'expires' in response: <NEW_LINE> <INDENT> expires = response['expires'][0] <NEW_LINE> <DEDENT> <DEDENT> if 'signed_request' in self.data: <NEW_LINE> <INDENT> response = load_signed_request(self.data.get('signed_request')) <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> access_token = response.get('access_token') or response.get('oauth_token') or self.data.get('access_token') <NEW_LINE> if 'expires' in response: <NEW_LINE> <INDENT> expires = response['expires'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if access_token: <NEW_LINE> <INDENT> data = self.user_data(access_token) <NEW_LINE> if not isinstance(data, dict): <NEW_LINE> <INDENT> raise AuthUnknownError(self, 'An error ocurred while ' 'retrieving users Facebook ' 'data') <NEW_LINE> <DEDENT> data['access_token'] = access_token <NEW_LINE> if expires: <NEW_LINE> <INDENT> data['expires'] = expires <NEW_LINE> <DEDENT> kwargs.update({'auth': self, 'response': data, self.AUTH_BACKEND.name: True}) <NEW_LINE> return authenticate(*args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.data.get('error') == 'access_denied': <NEW_LINE> <INDENT> raise AuthCanceled(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AuthException(self) | Completes loging process, must return user instance | 625941c33317a56b86939c1b |
def qcut(x, q, labels=None, retbins=False, precision=3): <NEW_LINE> <INDENT> if com.is_integer(q): <NEW_LINE> <INDENT> quantiles = np.linspace(0, 1, q + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> quantiles = q <NEW_LINE> <DEDENT> bins = algos.quantile(x, quantiles) <NEW_LINE> res = _bins_to_cuts(x, bins, labels=labels, retbins=retbins,precision=precision, include_lowest=True) <NEW_LINE> if isinstance(x, Series): <NEW_LINE> <INDENT> res = Series(res, index=x.index) <NEW_LINE> <DEDENT> return res | Quantile-based discretization function. Discretize variable into
equal-sized buckets based on rank or based on sample quantiles. For example
1000 values for 10 quantiles would produce a Categorical object indicating
quantile membership for each data point.
Parameters
----------
x : ndarray or Series
q : integer or array of quantiles
Number of quantiles. 10 for deciles, 4 for quartiles, etc. Alternately
array of quantiles, e.g. [0, .25, .5, .75, 1.] for quartiles
labels : array or boolean, default None
Used as labels for the resulting bins. Must be of the same length as the resulting
bins. If False, return only integer indicators of the bins.
retbins : bool, optional
Whether to return the bins or not. Can be useful if bins is given
as a scalar.
precision : int
The precision at which to store and display the bins labels
Returns
-------
out : Categorical or Series or array of integers if labels is False
The return type (Categorical or Series) depends on the input: a Series of type category if
input is a Series else Categorical. Bins are represented as categories when categorical
data is returned.
bins : ndarray of floats
Returned only if `retbins` is True.
Notes
-----
Out of bounds values will be NA in the resulting Categorical object
Examples
--------
>>> pd.qcut(range(5), 4)
[[0, 1], [0, 1], (1, 2], (2, 3], (3, 4]]
Categories (4, object): [[0, 1] < (1, 2] < (2, 3] < (3, 4]]
>>> pd.qcut(range(5), 3, labels=["good","medium","bad"])
[good, good, medium, bad, bad]
Categories (3, object): [good < medium < bad]
>>> pd.qcut(range(5), 4, labels=False)
array([0, 0, 1, 2, 3], dtype=int64) | 625941c326068e7796caec9b |
def cnot(self, c, t): <NEW_LINE> <INDENT> if c>=self.size: raise ValueError('Control does not exist.') <NEW_LINE> if t>=self.size: raise ValueError('Target does not exist.') <NEW_LINE> if c==t: raise ValueError('Control and Target cannot be the same.') <NEW_LINE> for i in range(2**(self.size-2)): <NEW_LINE> <INDENT> I = (2**c + i%2**c + ((i-i%2**c)*2)%2**t + 2*((i-i%2**c)*2 - ((2*(i-i%2**c))%2**t))) <NEW_LINE> J = I + 2**t <NEW_LINE> self.state[I], self.state[J] = self.state[J], self.state[I] | Apply a Controlled-NOT gate.
Args.
c (int): control qubit.
t (int): target qubit. | 625941c3435de62698dfdc0b |
def testSetParam(self): <NEW_LINE> <INDENT> self.assertRaises(cimd.CIMDError,self.smsc.setParam,None,None) <NEW_LINE> setResult = self.smsc.setParam(10,11) <NEW_LINE> expectedResult = "{STX}08:001{TAB}010:11{TAB}{ETX}" <NEW_LINE> expectedResult = self.smsc.cimd.encode(expectedResult) <NEW_LINE> self.assertEqual(setResult,expectedResult) <NEW_LINE> setResult = self.smsc.setParam("10","11") <NEW_LINE> expectedResult = "{STX}08:003{TAB}010:11{TAB}{ETX}" <NEW_LINE> expectedResult = self.smsc.cimd.encode(expectedResult) <NEW_LINE> self.assertEqual(setResult,expectedResult) | Check for correct [set] message | 625941c3d99f1b3c44c67550 |
def get_lookup_ids_from_payload(self, payload): <NEW_LINE> <INDENT> lookup_ids = [] <NEW_LINE> if self.ENCID_KEY in payload: <NEW_LINE> <INDENT> lookup_ids.append(payload[self.ENCID_KEY]) <NEW_LINE> <DEDENT> if eu.ALIAS_PROP_NAME in payload: <NEW_LINE> <INDENT> lookup_ids.extend(payload[eu.ALIAS_PROP_NAME]) <NEW_LINE> <DEDENT> if "md5sum" in payload: <NEW_LINE> <INDENT> lookup_ids.append(payload["md5sum"]) <NEW_LINE> <DEDENT> lookup_ids = [x.strip() for x in lookup_ids] <NEW_LINE> lookup_ids = [x for x in lookup_ids] <NEW_LINE> if not lookup_ids: <NEW_LINE> <INDENT> raise RecordIdNotPresent( ("The payload does not contain a recognized identifier for traceability. For example," " you need to set the 'aliases' key, or specify an ENCODE assigned identifier in the" " non-schematic key {}.".format(self.ENCID_KEY))) <NEW_LINE> <DEDENT> return lookup_ids | Given a payload to submit to the Portal, extracts the identifiers that can be used to lookup
the record on the Portal, i.e. to see if the record already exists. Identifiers are extracted
from the following fields:
1. ``self.ENCID_KEY``,
2. aliases,
3. md5sum (in the case of a file object)
Args:
payload: `dict`. The data to submit.
Returns:
`list`: The possible lookup identifiers. | 625941c316aa5153ce362437 |
def __find_uri(self, obj, entities): <NEW_LINE> <INDENT> candidates = [uri for surfaceForm, uri in entities.items() if surfaceForm in obj] <NEW_LINE> if len(candidates) > 0: <NEW_LINE> <INDENT> return candidates[0] <NEW_LINE> <DEDENT> return obj | Find DBpedia resource for a given subject/object where the DBpedia resource is a substring of the subject/object.
If such resource does not exist, return the original subject/object.
:param obj: subject/object
:type obj: str
:param entities: a dictionary of entities as keys and their DBpedia URI as items
:type entities: dict
:return: the DBpedia resource if exist, otherwise, return the original subject/object
:rtype: str | 625941c363d6d428bbe444ae |
def _forward(self, params, flip=False): <NEW_LINE> <INDENT> rules = [] <NEW_LINE> if params['from_alias'] == 'local' or params['to_alias'] == 'local': <NEW_LINE> <INDENT> dir1 = 'from' <NEW_LINE> dir2 = 'to' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dir1 = 'to' if flip else 'from' <NEW_LINE> dir2 = 'from' if flip else 'to' <NEW_LINE> <DEDENT> if params['from_alias'] == 'local' and params['to_alias'] == 'local': <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir1, params, portonly=True) <NEW_LINE> params['destination'] = self._format_intfip('d', dir2, params) <NEW_LINE> rules.append(self.format_rule('-A OUTPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> params['source'] = self._format_intfip('s', dir2, params) <NEW_LINE> params['destination'] = self._format_intfip('d', dir1, params, portonly=True) <NEW_LINE> rules.append(self.format_rule('-A INPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> elif params['from_alias'] == 'local': <NEW_LINE> <INDENT> if flip: <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir2, params) <NEW_LINE> params['destination'] = self._format_intfip('d', dir1, params, portonly=True) <NEW_LINE> rules.append(self.format_rule('-A INPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir1, params, portonly=True) <NEW_LINE> params['destination'] = self._format_intfip('d', dir2, params) <NEW_LINE> rules.append(self.format_rule('-A OUTPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> <DEDENT> elif params['to_alias'] == 'local': <NEW_LINE> <INDENT> if flip: <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir2, params, portonly=True) <NEW_LINE> params['destination'] = self._format_intfip('d', dir1, params) <NEW_LINE> rules.append(self.format_rule('-A OUTPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir1, params) <NEW_LINE> params['destination'] = self._format_intfip('d', dir2, params, portonly=True) <NEW_LINE> rules.append(self.format_rule('-A INPUT {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> params['source'] = self._format_intfip('s', dir1, params) <NEW_LINE> params['destination'] = self._format_intfip('d', dir2, params) <NEW_LINE> rules.append(self.format_rule('-A FORWARD {proto} {source} {destination} {filters} -j ACCEPT', params)) <NEW_LINE> <DEDENT> return rules | Translation for ">".
If flip is True, the 'to' and 'from' parameters are switched
(this only happens for the non-local case). | 625941c36e29344779a625d2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.