language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def clear_bp(cls, ctx, register): """ Clears a hardware breakpoint. @see: find_slot, set_bp @type ctx: dict( str S{->} int ) @param ctx: Thread context dictionary. @type register: int @param register: Slot (debug register) for hardware breakpoint. """ ctx['Dr7'] &= cls.clearMask[register] ctx['Dr%d' % register] = 0
python
def load_config_file(configuration_file=None, expand=True): """ Load a configuration file with backup directories and rotation schemes. :param configuration_file: Override the pathname of the configuration file to load (a string or :data:`None`). :param expand: :data:`True` to expand filename patterns to their matches, :data:`False` otherwise. :returns: A generator of tuples with four values each: 1. An execution context created using :mod:`executor.contexts`. 2. The pathname of a directory with backups (a string). 3. A dictionary with the rotation scheme. 4. A dictionary with additional options. :raises: :exc:`~exceptions.ValueError` when `configuration_file` is given but doesn't exist or can't be loaded. This function is used by :class:`RotateBackups` to discover user defined rotation schemes and by :mod:`rotate_backups.cli` to discover directories for which backup rotation is configured. When `configuration_file` isn't given :class:`~update_dotdee.ConfigLoader` is used to search for configuration files in the following locations: - ``/etc/rotate-backups.ini`` and ``/etc/rotate-backups.d/*.ini`` - ``~/.rotate-backups.ini`` and ``~/.rotate-backups.d/*.ini`` - ``~/.config/rotate-backups.ini`` and ``~/.config/rotate-backups.d/*.ini`` All of the available configuration files are loaded in the order given above, so that sections in user-specific configuration files override sections by the same name in system-wide configuration files. """ expand_notice_given = False if configuration_file: loader = ConfigLoader(available_files=[configuration_file], strict=True) else: loader = ConfigLoader(program_name='rotate-backups', strict=False) for section in loader.section_names: items = dict(loader.get_options(section)) context_options = {} if coerce_boolean(items.get('use-sudo')): context_options['sudo'] = True if items.get('ssh-user'): context_options['ssh_user'] = items['ssh-user'] location = coerce_location(section, **context_options) rotation_scheme = dict((name, coerce_retention_period(items[name])) for name in SUPPORTED_FREQUENCIES if name in items) options = dict(include_list=split(items.get('include-list', '')), exclude_list=split(items.get('exclude-list', '')), io_scheduling_class=items.get('ionice'), strict=coerce_boolean(items.get('strict', 'yes')), prefer_recent=coerce_boolean(items.get('prefer-recent', 'no'))) # Don't override the value of the 'removal_command' property unless the # 'removal-command' configuration file option has a value set. if items.get('removal-command'): options['removal_command'] = shlex.split(items['removal-command']) # Expand filename patterns? if expand and location.have_wildcards: logger.verbose("Expanding filename pattern %s on %s ..", location.directory, location.context) if location.is_remote and not expand_notice_given: logger.notice("Expanding remote filename patterns (may be slow) ..") expand_notice_given = True for match in sorted(location.context.glob(location.directory)): if location.context.is_directory(match): logger.verbose("Matched directory: %s", match) expanded = Location(context=location.context, directory=match) yield expanded, rotation_scheme, options else: logger.verbose("Ignoring match (not a directory): %s", match) else: yield location, rotation_scheme, options
python
def create_constant(self, expression, *, verbose=True): """Append a constant to the stored list. Parameters ---------- expression : str Expression for the new constant. verbose : boolean (optional) Toggle talkback. Default is True See Also -------- set_constants Remove and replace all constants. remove_constant Remove an individual constant. """ if expression in self.constant_expressions: wt_exceptions.ObjectExistsWarning.warn(expression) return self.constants[self.constant_expressions.index(expression)] constant = Constant(self, expression) if constant.units is None: constant.convert(constant.variables[0].units) self._constants.append(constant) self.flush() self._on_constants_updated() if verbose: print("Constant '{}' added".format(constant.expression)) return constant
java
public static SVGPath drawFakeVoronoi(Projection2D proj, List<double[]> means) { CanvasSize viewport = proj.estimateViewport(); final SVGPath path = new SVGPath(); // Difference final double[] dirv = VMath.minus(means.get(1), means.get(0)); VMath.rotate90Equals(dirv); double[] dir = proj.fastProjectRelativeDataToRenderSpace(dirv); // Mean final double[] mean = VMath.plus(means.get(0), means.get(1)); VMath.timesEquals(mean, 0.5); double[] projmean = proj.fastProjectDataToRenderSpace(mean); double factor = viewport.continueToMargin(projmean, dir); path.moveTo(projmean[0] + factor * dir[0], projmean[1] + factor * dir[1]); // Inverse direction: dir[0] *= -1; dir[1] *= -1; factor = viewport.continueToMargin(projmean, dir); path.drawTo(projmean[0] + factor * dir[0], projmean[1] + factor * dir[1]); return path; }
python
def _construct_key(previous_key, separator, new_key): """ Returns the new_key if no previous key exists, otherwise concatenates previous key, separator, and new_key :param previous_key: :param separator: :param new_key: :return: a string if previous_key exists and simply passes through the new_key otherwise """ if previous_key: return u"{}{}{}".format(previous_key, separator, new_key) else: return new_key
java
public Observable<RoleDefinitionInner> getAsync(String scope, String roleDefinitionId) { return getWithServiceResponseAsync(scope, roleDefinitionId).map(new Func1<ServiceResponse<RoleDefinitionInner>, RoleDefinitionInner>() { @Override public RoleDefinitionInner call(ServiceResponse<RoleDefinitionInner> response) { return response.body(); } }); }
python
def convert(self): """Initiate one-shot conversion. The current settings are used, with the exception of continuous mode.""" c = self.config c &= (~MCP342x._continuous_mode_mask & 0x7f) # Force one-shot c |= MCP342x._not_ready_mask # Convert logger.debug('Convert ' + hex(self.address) + ' config: ' + bin(c)) self.bus.write_byte(self.address, c)
python
def parse_datetime(value): """Attempts to parse `value` into an instance of ``datetime.datetime``. If `value` is ``None``, this function will return ``None``. Args: value: A timestamp. This can be a string or datetime.datetime value. """ if not value: return None elif isinstance(value, datetime.datetime): return value return dateutil.parser.parse(value)
python
def hide_routemap_holder_route_map_content_set_ipv6_interface_ipv6_null0(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy") route_map = ET.SubElement(hide_routemap_holder, "route-map") name_key = ET.SubElement(route_map, "name") name_key.text = kwargs.pop('name') action_rm_key = ET.SubElement(route_map, "action-rm") action_rm_key.text = kwargs.pop('action_rm') instance_key = ET.SubElement(route_map, "instance") instance_key.text = kwargs.pop('instance') content = ET.SubElement(route_map, "content") set = ET.SubElement(content, "set") ipv6 = ET.SubElement(set, "ipv6") interface = ET.SubElement(ipv6, "interface") ipv6_null0 = ET.SubElement(interface, "ipv6-null0") callback = kwargs.pop('callback', self._callback) return callback(config)
java
public static HttpResponse executePost(final String url, final String basicAuthUsername, final String basicAuthPassword, final String entity, final Map<String, Object> parameters, final Map<String, Object> headers) { try { return execute(url, HttpMethod.POST.name(), basicAuthUsername, basicAuthPassword, parameters, headers, entity); } catch (final Exception e) { LOGGER.error(e.getMessage(), e); } return null; }
java
private SemanticType checkConstant(Expr.Constant expr, Environment env) { Value item = expr.getValue(); switch (item.getOpcode()) { case ITEM_null: return Type.Null; case ITEM_bool: return Type.Bool; case ITEM_int: return Type.Int; case ITEM_byte: return Type.Byte; case ITEM_utf8: // FIXME: this is not an optimal solution. The reason being that we // have lost nominal information regarding whether it is an instance // of std::ascii or std::utf8, for example. return new SemanticType.Array(Type.Int); default: return internalFailure("unknown constant encountered: " + expr, expr); } }
java
protected void createIndexesOnColumns(EntityMetadata m, String tableName, List<Column> columns, Class columnType) { Object pooledConnection = null; try { Cassandra.Client api = null; pooledConnection = getConnection(); api = (org.apache.cassandra.thrift.Cassandra.Client) getConnection(pooledConnection); KsDef ksDef = api.describe_keyspace(m.getSchema()); List<CfDef> cfDefs = ksDef.getCf_defs(); // Column family definition on which secondary index creation is // required CfDef columnFamilyDefToUpdate = null; boolean isUpdatable = false; for (CfDef cfDef : cfDefs) { if (cfDef.getName().equals(tableName)) { columnFamilyDefToUpdate = cfDef; break; } } if (columnFamilyDefToUpdate == null) { log.error("Join table {} not available.", tableName); throw new PersistenceException("table" + tableName + " not found!"); } // create a column family, in case it is not already available. // Get list of indexes already created List<ColumnDef> columnMetadataList = columnFamilyDefToUpdate.getColumn_metadata(); List<String> indexList = new ArrayList<String>(); if (columnMetadataList != null) { for (ColumnDef columnDef : columnMetadataList) { indexList.add(new StringAccessor().fromBytes(String.class, columnDef.getName())); } // need to set them to null else it is giving problem on update // column family and trying to add again existing indexes. // columnFamilyDefToUpdate.column_metadata = null; } // Iterate over all columns for creating secondary index on them for (Column column : columns) { ColumnDef columnDef = new ColumnDef(); columnDef.setName(column.getName()); columnDef.setValidation_class(CassandraValidationClassMapper.getValidationClass(columnType, false)); columnDef.setIndex_type(IndexType.KEYS); // Add secondary index only if it's not already created // (if already created, it would be there in column family // definition) if (!indexList.contains(new StringAccessor().fromBytes(String.class, column.getName()))) { isUpdatable = true; columnFamilyDefToUpdate.addToColumn_metadata(columnDef); } } // Finally, update column family with modified column family // definition if (isUpdatable) { columnFamilyDefToUpdate.setKey_validation_class(CassandraValidationClassMapper .getValidationClass(m.getIdAttribute().getJavaType(), isCql3Enabled(m))); api.system_update_column_family(columnFamilyDefToUpdate); } } catch (Exception e) { log.warn("Could not create secondary index on column family {}, Caused by: . ", tableName, e); } finally { releaseConnection(pooledConnection); } }
python
def gpg_verify( path_to_verify, sigdata, sender_key_info, config_dir=None ): """ Verify a file on disk was signed by the given sender. @sender_key_info should be a dict with { 'key_id': ... 'key_data': ... 'app_name'; ... } Return {'status': True} on success Return {'error': ...} on error """ if config_dir is None: config_dir = get_config_dir() # ingest keys tmpdir = make_gpg_tmphome( prefix="verify", config_dir=config_dir ) res = gpg_stash_key( "verify", sender_key_info['key_data'], config_dir=config_dir, gpghome=tmpdir ) if res is None: shutil.rmtree(tmpdir) return {'error': 'Failed to stash key %s' % sender_key_info['key_id']} # stash detached signature fd, path = tempfile.mkstemp( prefix=".sig-verify-" ) f = os.fdopen(fd, "w") f.write( sigdata ) f.flush() os.fsync(f.fileno()) f.close() # verify gpg = gnupg.GPG( homedir=tmpdir ) with open(path, "r") as fd_in: res = gpg.verify_file( fd_in, data_filename=path_to_verify ) shutil.rmtree(tmpdir) try: os.unlink(path) except: pass if not res: log.debug("verify_file error: %s" % res.__dict__) return {'error': 'Failed to decrypt data'} log.debug("verification succeeded from keys in %s" % config_dir) return {'status': True}
java
@Override public void handlePress(CallbackQuery query) { InlineMenu lastMenu = owner.getLastMenu(); if (lastMenu != null) { executeCallback(); owner.unregister(); lastMenu.start(); } }
java
public ArrayList<String> collections() { string_vector v = ti.collections(); int size = (int) v.size(); ArrayList<String> l = new ArrayList<>(size); for (int i = 0; i < size; i++) { l.add(v.get(i)); } return l; }
java
public ConfigParseOptions appendIncluder(ConfigIncluder includer) { if (includer == null) throw new NullPointerException("null includer passed to appendIncluder"); if (this.includer == includer) return this; else if (this.includer != null) return setIncluder(this.includer.withFallback(includer)); else return setIncluder(includer); }
python
def read_gpx(xml, gpxns=None): """Parse a GPX file into a GpxModel. Args: xml: A file-like-object opened in binary mode - that is containing bytes rather than characters. The root element of the XML should be a <gpx> element containing a version attribute. GPX versions 1.1 is supported. gpxns: The XML namespace for GPX in Clarke notation (i.e. delimited by curly braces). If None, (the default) the namespace used in the document will be determined automatically. """ tree = etree.parse(xml) gpx_element = tree.getroot() return parse_gpx(gpx_element, gpxns=gpxns)
java
@GET @Path("{guid}/traits") @Produces(Servlets.JSON_MEDIA_TYPE) public Response getTraitNames(@PathParam("guid") String guid) { if (LOG.isDebugEnabled()) { LOG.debug("==> EntityResource.getTraitNames({})", guid); } AtlasPerfTracer perf = null; try { if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) { perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityResource.getTraitNames(" + guid + ")"); } if (LOG.isDebugEnabled()) { LOG.debug("Fetching trait names for entity={}", guid); } final List<AtlasClassification> classifications = entitiesStore.getClassifications(guid); List<String> traitNames = new ArrayList<>(); for (AtlasClassification classification : classifications) { traitNames.add(classification.getTypeName()); } JSONObject response = new JSONObject(); response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId()); response.put(AtlasClient.RESULTS, new JSONArray(traitNames)); response.put(AtlasClient.COUNT, traitNames.size()); return Response.ok(response).build(); } catch (AtlasBaseException e) { LOG.error("Unable to get trait definition for entity {}", guid, e); throw toWebApplicationException(e); } catch (IllegalArgumentException e) { LOG.error("Unable to get trait definition for entity {}", guid, e); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST)); } catch (WebApplicationException e) { LOG.error("Unable to get trait names for entity {}", guid, e); throw e; } catch (Throwable e) { LOG.error("Unable to get trait names for entity {}", guid, e); throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR)); } finally { AtlasPerfTracer.log(perf); if (LOG.isDebugEnabled()) { LOG.debug("<== EntityResource.getTraitNames({})", guid); } } }
python
def is_line_in_file(filename: str, line: str) -> bool: """ Detects whether a line is present within a file. Args: filename: file to check line: line to search for (as an exact match) """ assert "\n" not in line with open(filename, "r") as file: for fileline in file: if fileline == line: return True return False
python
def get_vault_query_session(self, proxy): """Gets the OsidSession associated with the vault query service. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.authorization.VaultQuerySession) - a ``VaultQuerySession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_vault_query() is false`` *compliance: optional -- This method must be implemented if ``supports_vault_query()`` is true.* """ if not self.supports_vault_query(): raise errors.Unimplemented() # pylint: disable=no-member return sessions.VaultQuerySession(proxy=proxy, runtime=self._runtime)
java
private static Type findGenericInterface(Class<?> sourceClass, Class<?> targetBaseInterface) { for (int i = 0; i < sourceClass.getInterfaces().length; i++) { Class<?> inter = sourceClass.getInterfaces()[i]; if (inter == targetBaseInterface) { return sourceClass.getGenericInterfaces()[0]; } else { Type deeper = findGenericInterface(inter, targetBaseInterface); if (deeper != null) { return deeper; } } } return null; }
java
protected DomainObjectMatch<DomainObject> createGenMatchForInternal(List<DomainObject> domainObjects, String domainObjectTypeName) { InternalDomainAccess iAccess = this.queryExecutor.getMappingInfo().getInternalDomainAccess(); try { iAccess.loadDomainInfoIfNeeded(); List<Object> dobjs = new ArrayList<Object>(); for (DomainObject dobj : domainObjects) { dobjs.add(InternalAccess.getRawObject(dobj)); } @SuppressWarnings("rawtypes") Class clazz = iAccess.getClassForName(domainObjectTypeName); @SuppressWarnings("unchecked") DomainObjectMatch<?> delegate = createMatchForInternal(dobjs, clazz); DomainObjectMatch<DomainObject> ret = APIAccess.createDomainObjectMatch(DomainObject.class, delegate); return ret; } catch (Throwable e) { if (e instanceof RuntimeException) throw (RuntimeException)e; else throw new RuntimeException(e); } }
java
public static vrid_nsip_binding[] get(nitro_service service, Long id) throws Exception{ vrid_nsip_binding obj = new vrid_nsip_binding(); obj.set_id(id); vrid_nsip_binding response[] = (vrid_nsip_binding[]) obj.get_resources(service); return response; }
python
def satisfier(self, term): # type: (Term) -> Assignment """ Returns the first Assignment in this solution such that the sublist of assignments up to and including that entry collectively satisfies term. """ assigned_term = None # type: Term for assignment in self._assignments: if assignment.dependency.name != term.dependency.name: continue if ( not assignment.dependency.is_root and not assignment.dependency.name == term.dependency.name ): if not assignment.is_positive(): continue assert not term.is_positive() return assignment if assigned_term is None: assigned_term = assignment else: assigned_term = assigned_term.intersect(assignment) # As soon as we have enough assignments to satisfy term, return them. if assigned_term.satisfies(term): return assignment raise RuntimeError("[BUG] {} is not satisfied.".format(term))
python
def patch_data(data, L=100, try_diag=True, verbose=False): '''Patch ``data`` (for example Markov chain output) into parts of length ``L``. Return a Gaussian mixture where each component gets the empirical mean and covariance of one patch. :param data: Matrix-like array; the points to be patched. Expect ``data[i]`` as the d-dimensional i-th point. :param L: Integer; the length of one patch. The last patch will be shorter if ``L`` is not a divisor of ``len(data)``. :param try_diag: Bool; If some patch does not define a proper covariance matrix, it cannot define a Gaussian component. ``try_diag`` defines how to handle that case: If ``True`` (default), the off-diagonal elements are set to zero and it is tried to form a Gaussian with that matrix again. If that fails as well, the patch is skipped. If ``False`` the patch is skipped directly. :param verbose: Bool; If ``True`` print all status information. ''' # patch data into length L patches patches = _np.array([data[patch_start:patch_start + L] for patch_start in range(0, len(data), L)]) # calculate means and covs means = _np.array([_np.mean(patch, axis=0) for patch in patches]) covs = _np.array([_np.cov (patch, rowvar=0) for patch in patches]) # form gaussian components components = [] skipped = [] for i, (mean, cov) in enumerate(zip(means, covs)): try: this_comp = Gauss(mean, cov) components.append(this_comp) except _np.linalg.LinAlgError as error1: if verbose: print("Could not form Gauss from patch %i. Reason: %s" % (i, repr(error1))) if try_diag: cov = _np.diag(_np.diag(cov)) try: this_comp = Gauss(mean, cov) components.append(this_comp) if verbose: print('Diagonal covariance attempt succeeded.') except _np.linalg.LinAlgError as error2: skipped.append(i) if verbose: print("Diagonal covariance attempt failed. Reason: %s" % repr(error2)) else: # if not try_diag skipped.append(i) # print skipped components if any if skipped: print("WARNING: Could not form Gaussians from: %s" % skipped) # create and return mixture return MixtureDensity(components)
python
def create_time_labels(self): """Create the time labels, but don't plot them yet. Notes ----- It's necessary to have the height of the time labels, so that we can adjust the main scene. Not very robust, because it uses seconds as integers. """ min_time = int(floor(min(self.data.axis['time'][0]))) max_time = int(ceil(max(self.data.axis['time'][0]))) n_time_labels = self.parent.value('n_time_labels') self.idx_time = [] self.time_pos = [] for one_time in linspace(min_time, max_time, n_time_labels): x_label = (self.data.start_time + timedelta(seconds=one_time)).strftime('%H:%M:%S') item = QGraphicsSimpleTextItem(x_label) item.setFlag(QGraphicsItem.ItemIgnoresTransformations) self.idx_time.append(item) self.time_pos.append(QPointF(one_time, len(self.idx_label) * self.parent.value('y_distance')))
python
def auth(self, request): """ let's auth the user to the Service :param request: request object :return: callback url :rtype: string that contains the url to redirect after auth """ request_token = super(ServiceTrello, self).auth(request) callback_url = self.callback_url(request) # URL to redirect user to, to authorize your app auth_url_str = '{auth_url}?oauth_token={token}' auth_url_str += '&scope={scope}&name={name}' auth_url_str += '&expiration={expiry}&oauth_callback={callback_url}' auth_url = auth_url_str.format(auth_url=self.AUTH_URL, token=request_token['oauth_token'], scope=self.scope, name=self.app_name, expiry=self.expiry, callback_url=callback_url) return auth_url
python
def read_index(group, version='1.1'): """Return the index stored in a h5features group. :param h5py.Group group: The group to read the index from. :param str version: The h5features version of the `group`. :return: a 1D numpy array of features indices. """ if version == '0.1': return np.int64(group['index'][...]) elif version == '1.0': return group['file_index'][...] else: return group['index'][...]
python
def mock_bable(monkeypatch): """ Mock the BaBLEInterface class with some controllers inside. """ mocked_bable = MockBaBLE() mocked_bable.set_controllers([ Controller(0, '11:22:33:44:55:66', '#0'), Controller(1, '22:33:44:55:66:11', '#1', settings={'powered': True, 'low_energy': True}), Controller(2, '33:44:55:66:11:22', '#2', settings={'powered': True}) ]) monkeypatch.setattr(bable_interface, 'BaBLEInterface', lambda: mocked_bable) return mocked_bable
python
def cmd_connection_type(self): """Generates statistics on how many requests are made via HTTP and how many are made via SSL. .. note:: This only works if the request path contains the default port for SSL (443). .. warning:: The ports are hardcoded, they should be configurable. """ https = 0 non_https = 0 for line in self._valid_lines: if line.is_https(): https += 1 else: non_https += 1 return https, non_https
python
def grid_search(script: str, params: typing.Iterable[str], dry_run: bool=False) -> None: """ Build all grid search parameter configurations and optionally run them. :param script: String of command prefix, e.g. ``cxflow train -v -o log``. :param params: Iterable collection of strings in standard **cxflow** param form, e.g. ``'numerical_param=[1, 2]'`` or ``'text_param=["hello", "cio"]'``. :param dry_run: If set to ``True``, the built commands will only be printed instead of executed. """ commands = _build_grid_search_commands(script=script, params=params) if dry_run: logging.warning('Dry run') for command in commands: logging.info(command) else: for command in commands: try: completed_process = subprocess.run(command) logging.info('Command `%s` completed with exit code %d', command, completed_process.returncode) except Exception as _: # pylint: disable=broad-except logging.error('Command `%s` failed.', command)
java
public void extractValues(HashMap<String, String> values) { for (MultipleSyntaxElements l : childContainers) { l.extractValues(values); } }
python
def eqarea_magic(in_file='sites.txt', dir_path=".", input_dir_path="", spec_file="specimens.txt", samp_file="samples.txt", site_file="sites.txt", loc_file="locations.txt", plot_by="all", crd="g", ignore_tilt=False, save_plots=True, fmt="svg", contour=False, color_map="coolwarm", plot_ell="", n_plots=5, interactive=False): """ makes equal area projections from declination/inclination data Parameters ---------- in_file : str, default "sites.txt" dir_path : str output directory, default "." input_dir_path : str input file directory (if different from dir_path), default "" spec_file : str input specimen file name, default "specimens.txt" samp_file: str input sample file name, default "samples.txt" site_file : str input site file name, default "sites.txt" loc_file : str input location file name, default "locations.txt" plot_by : str [spc, sam, sit, loc, all] (specimen, sample, site, location, all), default "all" crd : ['s','g','t'], coordinate system for plotting whereby: s : specimen coordinates, aniso_tile_correction = -1 g : geographic coordinates, aniso_tile_correction = 0 (default) t : tilt corrected coordinates, aniso_tile_correction = 100 ignore_tilt : bool default False. If True, data are unoriented (allows plotting of measurement dec/inc) save_plots : bool plot and save non-interactively, default True fmt : str ["png", "svg", "pdf", "jpg"], default "svg" contour : bool plot as color contour colormap : str color map for contour plotting, default "coolwarm" see cartopy documentation for more options plot_ell : str [F,K,B,Be,Bv] plot Fisher, Kent, Bingham, Bootstrap ellipses or Boostrap eigenvectors default "" plots none n_plots : int maximum number of plots to make, default 5 if you want to make all possible plots, specify "all" interactive : bool, default False interactively plot and display for each specimen (this is best used on the command line or in the Python interpreter) Returns --------- type - Tuple : (True or False indicating if conversion was sucessful, file name(s) written) """ saved = [] # parse out input/out directories input_dir_path, dir_path = pmag.fix_directories(input_dir_path, dir_path) # initialize some variables verbose = pmagplotlib.verbose FIG = {} # plot dictionary FIG['eqarea'] = 1 # eqarea is figure 1 pmagplotlib.plot_init(FIG['eqarea'], 5, 5) # get coordinate system if crd == "s": coord = "-1" elif crd == "t": coord = "100" else: coord = "0" # get item to plot by if plot_by == 'all': plot_key = 'all' elif plot_by == 'sit': plot_key = 'site' elif plot_by == 'sam': plot_key = 'sample' elif plot_by == 'spc': plot_key = 'specimen' else: plot_by = 'all' plot_key = 'all' # get distribution to plot ellipses/eigenvectors if desired if save_plots: verbose = False # set keys dec_key = 'dir_dec' inc_key = 'dir_inc' tilt_key = 'dir_tilt_correction' # create contribution fnames = {"specimens": spec_file, "samples": samp_file, 'sites': site_file, 'locations': loc_file} if not os.path.exists(pmag.resolve_file_name(in_file, input_dir_path)): print('-E- Could not find {}'.format(in_file)) return False, [] contribution = cb.Contribution(input_dir_path, custom_filenames=fnames, single_file=in_file) table_name = list(contribution.tables.keys())[0] contribution.add_magic_table("contribution") # get contribution id if available for server plots if pmagplotlib.isServer: con_id = contribution.get_con_id() # try to propagate all names to measurement level try: contribution.propagate_location_to_samples() contribution.propagate_location_to_specimens() contribution.propagate_location_to_measurements() except KeyError as ex: pass # the object that contains the DataFrame + useful helper methods: data_container = contribution.tables[table_name] # the actual DataFrame: data = data_container.df plot_type = data_container.dtype if plot_key != "all" and plot_key not in data.columns: print("-E- You can't plot by {} with the data provided".format(plot_key)) return False, [] # add tilt key into DataFrame columns if it isn't there already if tilt_key not in data.columns: data.loc[:, tilt_key] = None if verbose: print(len(data), ' records read from ', in_file) # find desired dec,inc data: dir_type_key = '' # # get plotlist if not plotting all records # plotlist = [] if plot_key != "all": # return all where plot_key is not blank if plot_key not in data.columns: print('-E- Can\'t plot by "{}". That header is not in infile: {}'.format( plot_key, in_file)) return False, [] plots = data[data[plot_key].notnull()] plotlist = plots[plot_key].unique() # grab unique values else: plotlist.append('All') if n_plots != "all": if len(plotlist) > n_plots: plotlist = plotlist[:n_plots] fignum = 0 for plot in plotlist: fignum += 1 FIG['eqarea'] = fignum pmagplotlib.plot_init(FIG['eqarea'], 5, 5) if plot_ell: dist = plot_ell.upper() # if dist type is unrecognized, use Fisher if dist not in ['F', 'K', 'B', 'BE', 'BV']: dist = 'F' if dist == "BV": fignum += 1 FIG['bdirs'] = fignum pmagplotlib.plot_init(FIG['bdirs'], 5, 5) if verbose: print(plot) if plot == 'All': # plot everything at once plot_data = data else: # pull out only partial data plot_data = data[data[plot_key] == plot] # get location names for the data locs = [] if 'location' in plot_data.columns: locs = plot_data['location'].dropna().unique() DIblock = [] GCblock = [] # SLblock, SPblock = [], [] title = plot mode = 1 if dec_key not in plot_data.columns: print("-W- No dec/inc data") continue # get all records where dec & inc values exist plot_data = plot_data[plot_data[dec_key].notnull() & plot_data[inc_key].notnull()] if plot_data.empty: print("-W- No dec/inc data") continue # get metadata for naming the plot file locations = str(data_container.get_name('location', df_slice=plot_data)) site = str(data_container.get_name('site', df_slice=plot_data)) sample = str(data_container.get_name('sample', df_slice=plot_data)) specimen = str(data_container.get_name('specimen', df_slice=plot_data)) # make sure method_codes is in plot_data if 'method_codes' not in plot_data.columns: plot_data['method_codes'] = '' # get data blocks # would have to ignore tilt to use measurement level data DIblock = data_container.get_di_block(df_slice=plot_data, tilt_corr=coord, excl=['DE-BFP'], ignore_tilt=ignore_tilt) if title == 'All': if len(locs): title = " ,".join(locs) + " - {} {} plotted".format(str(len(DIblock)), plot_type) else: title = "{} {} plotted".format(str(len(DIblock)), plot_type) #SLblock = [[ind, row['method_codes']] for ind, row in plot_data.iterrows()] # get great circles great_circle_data = data_container.get_records_for_code('DE-BFP', incl=True, use_slice=True, sli=plot_data) if len(great_circle_data) > 0: gc_cond = great_circle_data[tilt_key] == coord GCblock = [[float(row[dec_key]), float(row[inc_key])] for ind, row in great_circle_data[gc_cond].iterrows()] #SPblock = [[ind, row['method_codes']] for ind, row in great_circle_data[gc_cond].iterrows()] if len(DIblock) > 0: if not contour: pmagplotlib.plot_eq(FIG['eqarea'], DIblock, title) else: pmagplotlib.plot_eq_cont( FIG['eqarea'], DIblock, color_map=color_map) else: pmagplotlib.plot_net(FIG['eqarea']) if len(GCblock) > 0: for rec in GCblock: pmagplotlib.plot_circ(FIG['eqarea'], rec, 90., 'g') if len(DIblock) == 0 and len(GCblock) == 0: if verbose: print("no records for plotting") fignum -= 1 if 'bdirs' in FIG: fignum -= 1 continue # sys.exit() if plot_ell: ppars = pmag.doprinc(DIblock) # get principal directions nDIs, rDIs, npars, rpars = [], [], [], [] for rec in DIblock: angle = pmag.angle([rec[0], rec[1]], [ ppars['dec'], ppars['inc']]) if angle > 90.: rDIs.append(rec) else: nDIs.append(rec) if dist == 'B': # do on whole dataset etitle = "Bingham confidence ellipse" bpars = pmag.dobingham(DIblock) for key in list(bpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (bpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (bpars[key])) npars.append(bpars['dec']) npars.append(bpars['inc']) npars.append(bpars['Zeta']) npars.append(bpars['Zdec']) npars.append(bpars['Zinc']) npars.append(bpars['Eta']) npars.append(bpars['Edec']) npars.append(bpars['Einc']) if dist == 'F': etitle = "Fisher confidence cone" if len(nDIs) > 2: fpars = pmag.fisher_mean(nDIs) for key in list(fpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (fpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (fpars[key])) mode += 1 npars.append(fpars['dec']) npars.append(fpars['inc']) npars.append(fpars['alpha95']) # Beta npars.append(fpars['dec']) isign = abs(fpars['inc']) / fpars['inc'] npars.append(fpars['inc']-isign*90.) # Beta inc npars.append(fpars['alpha95']) # gamma npars.append(fpars['dec']+90.) # Beta dec npars.append(0.) # Beta inc if len(rDIs) > 2: fpars = pmag.fisher_mean(rDIs) if verbose: print("mode ", mode) for key in list(fpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (fpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (fpars[key])) mode += 1 rpars.append(fpars['dec']) rpars.append(fpars['inc']) rpars.append(fpars['alpha95']) # Beta rpars.append(fpars['dec']) isign = abs(fpars['inc']) / fpars['inc'] rpars.append(fpars['inc']-isign*90.) # Beta inc rpars.append(fpars['alpha95']) # gamma rpars.append(fpars['dec']+90.) # Beta dec rpars.append(0.) # Beta inc if dist == 'K': etitle = "Kent confidence ellipse" if len(nDIs) > 3: kpars = pmag.dokent(nDIs, len(nDIs)) if verbose: print("mode ", mode) for key in list(kpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (kpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (kpars[key])) mode += 1 npars.append(kpars['dec']) npars.append(kpars['inc']) npars.append(kpars['Zeta']) npars.append(kpars['Zdec']) npars.append(kpars['Zinc']) npars.append(kpars['Eta']) npars.append(kpars['Edec']) npars.append(kpars['Einc']) if len(rDIs) > 3: kpars = pmag.dokent(rDIs, len(rDIs)) if verbose: print("mode ", mode) for key in list(kpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (kpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (kpars[key])) mode += 1 rpars.append(kpars['dec']) rpars.append(kpars['inc']) rpars.append(kpars['Zeta']) rpars.append(kpars['Zdec']) rpars.append(kpars['Zinc']) rpars.append(kpars['Eta']) rpars.append(kpars['Edec']) rpars.append(kpars['Einc']) else: # assume bootstrap if dist == 'BE': if len(nDIs) > 5: BnDIs = pmag.di_boot(nDIs) Bkpars = pmag.dokent(BnDIs, 1.) if verbose: print("mode ", mode) for key in list(Bkpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (Bkpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (Bkpars[key])) mode += 1 npars.append(Bkpars['dec']) npars.append(Bkpars['inc']) npars.append(Bkpars['Zeta']) npars.append(Bkpars['Zdec']) npars.append(Bkpars['Zinc']) npars.append(Bkpars['Eta']) npars.append(Bkpars['Edec']) npars.append(Bkpars['Einc']) if len(rDIs) > 5: BrDIs = pmag.di_boot(rDIs) Bkpars = pmag.dokent(BrDIs, 1.) if verbose: print("mode ", mode) for key in list(Bkpars.keys()): if key != 'n' and verbose: print(" ", key, '%7.1f' % (Bkpars[key])) if key == 'n' and verbose: print(" ", key, ' %i' % (Bkpars[key])) mode += 1 rpars.append(Bkpars['dec']) rpars.append(Bkpars['inc']) rpars.append(Bkpars['Zeta']) rpars.append(Bkpars['Zdec']) rpars.append(Bkpars['Zinc']) rpars.append(Bkpars['Eta']) rpars.append(Bkpars['Edec']) rpars.append(Bkpars['Einc']) etitle = "Bootstrapped confidence ellipse" elif dist == 'BV': sym = {'lower': ['o', 'c'], 'upper': [ 'o', 'g'], 'size': 3, 'edgecolor': 'face'} if len(nDIs) > 5: BnDIs = pmag.di_boot(nDIs) pmagplotlib.plot_eq_sym( FIG['bdirs'], BnDIs, 'Bootstrapped Eigenvectors', sym) if len(rDIs) > 5: BrDIs = pmag.di_boot(rDIs) if len(nDIs) > 5: # plot on existing plots pmagplotlib.plot_di_sym(FIG['bdirs'], BrDIs, sym) else: pmagplotlib.plot_eq( FIG['bdirs'], BrDIs, 'Bootstrapped Eigenvectors') if dist == 'B': if len(nDIs) > 3 or len(rDIs) > 3: pmagplotlib.plot_conf(FIG['eqarea'], etitle, [], npars, 0) elif len(nDIs) > 3 and dist != 'BV': pmagplotlib.plot_conf(FIG['eqarea'], etitle, [], npars, 0) if len(rDIs) > 3: pmagplotlib.plot_conf(FIG['eqarea'], etitle, [], rpars, 0) elif len(rDIs) > 3 and dist != 'BV': pmagplotlib.plot_conf(FIG['eqarea'], etitle, [], rpars, 0) for key in list(FIG.keys()): files = {} #if filename: # use provided filename # filename += '.' + fmt if pmagplotlib.isServer: # use server plot naming convention if plot_key == 'all': filename = 'LO:_'+locations+'_SI:__SA:__SP:__CO:_'+crd+'_TY:_'+key+'_.'+fmt else: filename = 'LO:_'+locations+'_SI:_'+site+'_SA:_'+sample + \ '_SP:_'+str(specimen)+'_CO:_'+crd+'_TY:_'+key+'_.'+fmt elif plot_key == 'all': filename = 'all' if locs: loc_string = "_".join( [str(loc).replace(' ', '_') for loc in locs]) filename += "_" + loc_string filename += "_" + crd + "_" + key filename += ".{}".format(fmt) else: # use more readable naming convention filename = '' # fix this if plot_by is location , for example use_names = {'location': [locations], 'site': [locations, site], 'sample': [locations, site, sample], 'specimen': [locations, site, sample, specimen]} use = use_names[plot_key] use.extend([crd, key]) # [locations, site, sample, specimen, crd, key]: for item in use: if item: item = item.replace(' ', '_') filename += item + '_' if filename.endswith('_'): filename = filename[:-1] filename += ".{}".format(fmt) if not pmagplotlib.isServer: filename = os.path.join(dir_path, filename) files[key] = filename if pmagplotlib.isServer: titles = {'eqarea': 'Equal Area Plot'} FIG = pmagplotlib.add_borders(FIG, titles, con_id=con_id) saved_figs = pmagplotlib.save_plots(FIG, files) saved.extend(saved_figs) elif save_plots: saved_figs = pmagplotlib.save_plots(FIG, files, incl_directory=True) saved.extend(saved_figs) continue elif interactive: pmagplotlib.draw_figs(FIG) ans = input(" S[a]ve to save plot, [q]uit, Return to continue: ") if ans == "q": return True, [] if ans == "a": saved_figs = pmagplotlib.save_plots(FIG, files, incl_directory=True) saved.extend(saved) continue return True, saved
python
def get_total_irradiance(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth, dni, ghi, dhi, dni_extra=None, airmass=None, albedo=.25, surface_type=None, model='isotropic', model_perez='allsitescomposite1990', **kwargs): r""" Determine total in-plane irradiance and its beam, sky diffuse and ground reflected components, using the specified sky diffuse irradiance model. .. math:: I_{tot} = I_{beam} + I_{sky diffuse} + I_{ground} Sky diffuse models include: * isotropic (default) * klucher * haydavies * reindl * king * perez Parameters ---------- surface_tilt : numeric Panel tilt from horizontal. surface_azimuth : numeric Panel azimuth from north. solar_zenith : numeric Solar zenith angle. solar_azimuth : numeric Solar azimuth angle. dni : numeric Direct Normal Irradiance ghi : numeric Global horizontal irradiance dhi : numeric Diffuse horizontal irradiance dni_extra : None or numeric, default None Extraterrestrial direct normal irradiance airmass : None or numeric, default None Airmass albedo : numeric, default 0.25 Surface albedo surface_type : None or String, default None Surface type. See grounddiffuse. model : String, default 'isotropic' Irradiance model. model_perez : String, default 'allsitescomposite1990' Used only if model='perez'. See :py:func:`perez`. Returns ------- total_irrad : OrderedDict or DataFrame Contains keys/columns ``'poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse', 'poa_ground_diffuse'``. """ poa_sky_diffuse = get_sky_diffuse( surface_tilt, surface_azimuth, solar_zenith, solar_azimuth, dni, ghi, dhi, dni_extra=dni_extra, airmass=airmass, model=model, model_perez=model_perez) poa_ground_diffuse = get_ground_diffuse(surface_tilt, ghi, albedo, surface_type) aoi_ = aoi(surface_tilt, surface_azimuth, solar_zenith, solar_azimuth) irrads = poa_components(aoi_, dni, poa_sky_diffuse, poa_ground_diffuse) return irrads
java
@SuppressWarnings("unchecked") public <T extends XPathBuilder> T setContainer(WebLocator container) { this.container = container; return (T) this; }
java
public static String getDefaultDesignPath(Component component) { String className = component.getClass().getName(); String designPath = className.replace(".", "/") + ".html"; return designPath; }
python
def parse_relationship(document, xmlcontent, rel_type): """Parse relationship document. Relationships hold information like external or internal references for links. Relationships are placed in file '_rels/document.xml.rels'. """ doc = etree.fromstring(xmlcontent) for elem in doc: if elem.tag == _name('{{{pr}}}Relationship'): rel = {'target': elem.attrib['Target'], 'type': elem.attrib['Type'], 'target_mode': elem.attrib.get('TargetMode', 'Internal')} document.relationships[rel_type][elem.attrib['Id']] = rel
java
public static double truncate(double value, int places) { if (places < 0) { throw new IllegalArgumentException(); } long factor = (long) java.lang.Math.pow(10, places); value = value * factor; long tmp = (long) value; return (double) tmp / factor; }
java
public boolean enlist(BeanO beanO) { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "enlist : " + beanO); ensureActive(); BeanO oldBeanO = ivBeanOs.put(beanO.beanId, beanO); if (oldBeanO == null) { ivBeanOList.add(beanO); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "enlist : true"); return true; } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "enlist : false"); return false; }
python
def _AddClearFieldMethod(message_descriptor, cls): """Helper for _AddMessageMethods().""" def ClearField(self, field_name): try: field = message_descriptor.fields_by_name[field_name] except KeyError: try: field = message_descriptor.oneofs_by_name[field_name] if field in self._oneofs: field = self._oneofs[field] else: return except KeyError: raise ValueError('Protocol message %s() has no "%s" field.' % (message_descriptor.name, field_name)) if field in self._fields: # To match the C++ implementation, we need to invalidate iterators # for map fields when ClearField() happens. if hasattr(self._fields[field], 'InvalidateIterators'): self._fields[field].InvalidateIterators() # Note: If the field is a sub-message, its listener will still point # at us. That's fine, because the worst than can happen is that it # will call _Modified() and invalidate our byte size. Big deal. del self._fields[field] if self._oneofs.get(field.containing_oneof, None) is field: del self._oneofs[field.containing_oneof] # Always call _Modified() -- even if nothing was changed, this is # a mutating method, and thus calling it should cause the field to become # present in the parent message. self._Modified() cls.ClearField = ClearField
java
public Action build(Class<?> clazz) { Action action = new Action(); String className = clazz.getName(); Profile profile = profileService.getProfile(className); org.beangle.struts2.annotation.Action an = clazz .getAnnotation(org.beangle.struts2.annotation.Action.class); StringBuilder sb = new StringBuilder(); // namespace sb.append(profile.getUriPath()); if (null != an) { String name = an.value(); if (!name.startsWith("/")) { if (Constants.SEO_URI.equals(profile.getUriPathStyle())) { sb.append(unCamel(substringBeforeLast(profile.getInfix(className), "/")) + "/" + name); } else { sb.append(name); } } else { sb.append(name.substring(1)); } } else { if (Constants.SHORT_URI.equals(profile.getUriPathStyle())) { String simpleName = className.substring(className.lastIndexOf('.') + 1); sb.append(uncapitalize(simpleName.substring(0, simpleName.length() - profile.getActionSuffix().length()))); } else if (Constants.SIMPLE_URI.equals(profile.getUriPathStyle())) { sb.append(profile.getInfix(className)); } else if (Constants.SEO_URI.equals(profile.getUriPathStyle())) { sb.append(unCamel(profile.getInfix(className))); } else { throw new RuntimeException("unsupported uri style " + profile.getUriPathStyle()); } } action.path(sb.toString()); action.method(profile.getDefaultMethod()).extention(profile.getUriExtension()); return action; }
java
public static Statement close(Statement stmt, Logger logExceptionTo) { return close(stmt, logExceptionTo, null); }
java
public static Table tablePercents(Table table, CategoricalColumn<?> column1, CategoricalColumn<?> column2) { Table xTabs = counts(table, column1, column2); return tablePercents(xTabs); }
python
def visit_index(self, node, parent): """visit a Index node by returning a fresh instance of it""" newnode = nodes.Index(parent=parent) newnode.postinit(self.visit(node.value, newnode)) return newnode
python
def hash(value, algorithm='sha512'): ''' .. versionadded:: 2014.7.0 Encodes a value with the specified encoder. value The value to be hashed. algorithm : sha512 The algorithm to use. May be any valid algorithm supported by hashlib. CLI Example: .. code-block:: bash salt '*' random.hash 'I am a string' md5 ''' if six.PY3 and isinstance(value, six.string_types): # Under Python 3 we must work with bytes value = value.encode(__salt_system_encoding__) if hasattr(hashlib, ALGORITHMS_ATTR_NAME) and algorithm in getattr(hashlib, ALGORITHMS_ATTR_NAME): hasher = hashlib.new(algorithm) hasher.update(value) out = hasher.hexdigest() elif hasattr(hashlib, algorithm): hasher = hashlib.new(algorithm) hasher.update(value) out = hasher.hexdigest() else: raise SaltInvocationError('You must specify a valid algorithm.') return out
python
def _version_less_than_or_equal_to(self, v1, v2): """ Returns true if v1 <= v2. """ # pylint: disable=no-name-in-module, import-error from distutils.version import LooseVersion return LooseVersion(v1) <= LooseVersion(v2)
java
public static long factorial(int n) { if (n < 0 || n > MAX_LONG_FACTORIAL) { throw new IllegalArgumentException("Argument must be in the range 0 - 20."); } long factorial = 1; for (int i = n; i > 1; i--) { factorial *= i; } return factorial; }
java
public static <O> KNNQuery<O> getKNNQuery(Relation<O> relation, DistanceFunction<? super O> distanceFunction, Object... hints) { final DistanceQuery<O> distanceQuery = relation.getDistanceQuery(distanceFunction, hints); return relation.getKNNQuery(distanceQuery, hints); }
python
def _NotesSlideShapeFactory(shape_elm, parent): """ Return an instance of the appropriate shape proxy class for *shape_elm* on a notes slide. """ tag_name = shape_elm.tag if tag_name == qn('p:sp') and shape_elm.has_ph_elm: return NotesSlidePlaceholder(shape_elm, parent) return BaseShapeFactory(shape_elm, parent)
python
def _step_envs(self, action): """Perform step(action) on environments and update initial_frame_stack.""" self._frame_counter += 1 real_env_step_tuple = self.real_env.step(action) sim_env_step_tuple = self.sim_env.step(action) self.sim_env.add_to_initial_stack(real_env_step_tuple[0]) return self._pack_step_tuples(real_env_step_tuple, sim_env_step_tuple)
python
def _create_thumbnail(self, model_instance, thumbnail, image_name): """ Resizes and saves the thumbnail image """ thumbnail = self._do_resize(thumbnail, self.thumbnail_size) full_image_name = self.generate_filename(model_instance, image_name) thumbnail_filename = _get_thumbnail_filename(full_image_name) thumb = self._get_simple_uploaded_file(thumbnail, thumbnail_filename) self.storage.save(thumbnail_filename, thumb)
java
public static Set<SelectorName> nameSetFrom( SelectorName name ) { if (name == null) return Collections.emptySet(); return Collections.singleton(name); }
java
public static void assertTree(int rootType, String preorder, ParseResults parseResults) { assertTree(rootType, preorder, parseResults.getTree()); }
python
def getRenderModelThumbnailURL(self, pchRenderModelName, pchThumbnailURL, unThumbnailURLLen): """Returns the URL of the thumbnail image for this rendermodel""" fn = self.function_table.getRenderModelThumbnailURL peError = EVRRenderModelError() result = fn(pchRenderModelName, pchThumbnailURL, unThumbnailURLLen, byref(peError)) return result, peError
java
public int getInt(String key, int default_) { Object o = get(key); return o instanceof Number ? ((Number) o).intValue() : default_; }
java
public static <T> T[] noNullElements(final T[] array, final String message) { return INSTANCE.noNullElements(array, message); }
python
def stop_consuming(self): """Tell RabbitMQ that you would like to stop consuming by sending the Basic.Cancel RPC command. """ if self._channel: logger.info('Sending a Basic.Cancel RPC command to RabbitMQ') self._channel.basic_cancel(self.on_cancelok, self._consumer_tag)
java
public static <K, V> Stream<Entry<K, V>> getEntryStreamWithFilter( Map<K, V> map, Predicate<? super Entry<K, V>> predicate) { return buildEntryStream(map).filter(predicate); }
python
def dicts_equal(d1, d2): """ Perform a deep comparison of two dictionaries Handles: - Primitives - Nested dicts - Lists of primitives """ # check for different sizes if len(d1) != len(d2): return False # check for different keys for k in d1: if k not in d2: return False for k in d2: if k not in d1: return False # compare each element in dict for k in d1: if type(d1[k]) != type(d2[k]): # different value types return False # lists elif isinstance(d1[k], list): if not (sorted(d1[k]) == sorted(d2[k])): return False # nested dicts elif isinstance(d1[k], dict): if not dicts_equal(d1[k], d2[k]): return False # primitives else: if d1[k] != d2[k]: return False return True
java
protected int tasksToPreempt(JobInProgress job, TaskType type, long curTime) { JobInfo info = infos.get(job); if (info == null || poolMgr.isMaxTasks(info.poolName, type)) return 0; String pool = info.poolName; long minShareTimeout = poolMgr.getMinSharePreemptionTimeout(pool); long fairShareTimeout = poolMgr.getFairSharePreemptionTimeout(); int tasksDueToMinShare = 0; int tasksDueToFairShare = 0; boolean poolBelowMinSlots = poolMgr.getRunningTasks(pool, type) < poolMgr.getMinSlots(pool, type); if (type == TaskType.MAP) { if (curTime - info.lastTimeAtMapMinShare > minShareTimeout && poolBelowMinSlots) { tasksDueToMinShare = info.minMaps - info.runningMaps; } if (curTime - info.lastTimeAtMapHalfFairShare > fairShareTimeout) { double fairShare = Math.min(info.mapFairShare, runnableTasks(info, type)); tasksDueToFairShare = (int) (fairShare - info.runningMaps); } } else { // type == TaskType.REDUCE if (curTime - info.lastTimeAtReduceMinShare > minShareTimeout && poolBelowMinSlots) { tasksDueToMinShare = info.minReduces - info.runningReduces; } if (curTime - info.lastTimeAtReduceHalfFairShare > fairShareTimeout) { double fairShare = Math.min(info.reduceFairShare, runnableTasks(info, type)); tasksDueToFairShare = (int) (fairShare - info.runningReduces); } } int tasksToPreempt = Math.max(tasksDueToMinShare, tasksDueToFairShare); int neededNonSpeculativeTasks = type == TaskType.MAP ? info.neededMaps - info.neededSpeculativeMaps : info.neededReduces - info.neededSpeculativeReduces; // We do not preempt for speculative execution tasks tasksToPreempt = Math.min(neededNonSpeculativeTasks, tasksToPreempt); if (tasksToPreempt > 0) { String message = "Should preempt " + tasksToPreempt + " " + type + " tasks for " + job.getJobID() + ": tasksDueToMinShare = " + tasksDueToMinShare + ", tasksDueToFairShare = " + tasksDueToFairShare + ", runningTasks = " + runningTasks(info, type); LOG.info(message); } return tasksToPreempt < 0 ? 0 : tasksToPreempt; }
python
def krai_to_raw(self, amount): """ Multiply an krai amount by the krai ratio. :param amount: Amount in krai to convert to raw :type amount: int :raises: :py:exc:`nano.rpc.RPCException` >>> rpc.krai_to_raw(amount=1) 1000000000000000000000000000 """ amount = self._process_value(amount, 'int') payload = {"amount": amount} resp = self.call('krai_to_raw', payload) return int(resp['amount'])
java
public void getAllTitleID(Callback<List<Integer>> callback) throws NullPointerException { gw2API.getAllTitleIDs().enqueue(callback); }
python
def get_selected_tab(self): """Returns the tab specific by the GET request parameter. In the event that there is no GET request parameter, the value of the query parameter is invalid, or the tab is not allowed/enabled, the return value of this function is None. """ selected = self.request.GET.get(self.param_name, None) if selected: try: tab_group, tab_name = selected.split(SEPARATOR) except ValueError: return None if tab_group == self.get_id(): self._selected = self.get_tab(tab_name) return self._selected
java
private void resolveAllUnresolvedBindings(GinjectorBindings collection) throws UnableToCompleteException { // Create known/explicit bindings before descending into children. This ensures that they are // available to any children that may need to depend on them. createBindingsForFactories(collection); // Visit all children and resolve bindings as appropriate. This visitation may add implicit // bindings (and dependencies) to this ginjector for (GinjectorBindings child : collection.getChildren()) { resolveAllUnresolvedBindings(child); } // Resolve bindings within this ginjector and validate that everything looks OK. collection.resolveBindings(); }
python
def train_phrases(paths, out='data/bigram_model.phrases', tokenizer=word_tokenize, **kwargs): """ Train a bigram phrase model on a list of files. """ n = 0 for path in paths: print('Counting lines for {0}...'.format(path)) n += sum(1 for line in open(path, 'r')) print('Processing {0} lines...'.format(n)) # Change to use less memory. Default is 40m. kwargs = { 'max_vocab_size': 40000000, 'threshold': 8. }.update(kwargs) print('Training bigrams...') bigram = Phrases(_phrase_doc_stream(paths, n, tokenizer=word_tokenize), **kwargs) print('Saving...') bigram.save(out)
python
def get_structure_with_nodes(self, find_min=True, min_dist=0.5, tol=0.2, threshold_frac=None, threshold_abs=None): """ Get the modified structure with the possible interstitial sites added. The species is set as a DummySpecie X. Args: find_min (bool): True to find local minimum else maximum, otherwise find local maximum. min_dist (float): The minimum distance (in Angstrom) that a predicted site needs to be from existing atoms. A min_dist with value <= 0 returns all sites without distance checking. tol (float): A distance tolerance of nodes clustering that sites too closed to other predicted sites will be merged. PBC is taken into account. threshold_frac (float): optional fraction of extrema, which returns `threshold_frac * tot_num_extrema` extrema fractional coordinates based on highest/lowest intensity. E.g. set 0.2 to insert DummySpecie atom at the extrema with 20% highest or lowest intensity. Value range: 0 <= threshold_frac <= 1 Note that threshold_abs and threshold_frac should not set in the same time. threshold_abs (float): optional filter. When searching for local minima, intensity <= threshold_abs returns; when searching for local maxima, intensity >= threshold_abs returns. Note that threshold_abs and threshold_frac should not set in the same time. Returns: structure (Structure) """ structure = self.structure.copy() self.get_local_extrema(find_min=find_min, threshold_frac=threshold_frac, threshold_abs=threshold_abs) self.remove_collisions(min_dist) self.cluster_nodes(tol=tol) for fc in self.extrema_coords: structure.append("X", fc) return structure
python
def load_config(cls): """ Load global and local configuration files and update if needed.""" config_file = os.path.expanduser(cls.home_config) global_conf = cls.load(config_file, 'global') cls.load(cls.local_config, 'local') # update global configuration if needed cls.update_config(config_file, global_conf)
python
def mapping_ref(self, es_mappings): """ Retruns a dictionary of mappings and the fiels names in dot notation args: mappings: es mapping defitions to parse """ new_map = {} for key, value in es_mappings.items(): for sub_key, sub_value in value.items(): new_map["/".join([key, sub_key])] = \ mapping_fields(sub_value['properties']) return new_map
python
def reduce_formula(sym_amt, iupac_ordering=False): """ Helper method to reduce a sym_amt dict to a reduced formula and factor. Args: sym_amt (dict): {symbol: amount}. iupac_ordering (bool, optional): Whether to order the formula by the iupac "electronegativity" series, defined in Table VI of "Nomenclature of Inorganic Chemistry (IUPAC Recommendations 2005)". This ordering effectively follows the groups and rows of the periodic table, except the Lanthanides, Actanides and hydrogen. Note that polyanions will still be determined based on the true electronegativity of the elements. Returns: (reduced_formula, factor). """ syms = sorted(sym_amt.keys(), key=lambda x: [get_el_sp(x).X, x]) syms = list(filter( lambda x: abs(sym_amt[x]) > Composition.amount_tolerance, syms)) factor = 1 # Enforce integers for doing gcd. if all((int(i) == i for i in sym_amt.values())): factor = abs(gcd(*(int(i) for i in sym_amt.values()))) polyanion = [] # if the composition contains a poly anion if len(syms) >= 3 and get_el_sp(syms[-1]).X - get_el_sp(syms[-2]).X < 1.65: poly_sym_amt = {syms[i]: sym_amt[syms[i]] / factor for i in [-2, -1]} (poly_form, poly_factor) = reduce_formula( poly_sym_amt, iupac_ordering=iupac_ordering) if poly_factor != 1: polyanion.append("({}){}".format(poly_form, int(poly_factor))) syms = syms[:len(syms) - 2 if polyanion else len(syms)] if iupac_ordering: syms = sorted(syms, key=lambda x: [get_el_sp(x).iupac_ordering, x]) reduced_form = [] for s in syms: normamt = sym_amt[s] * 1.0 / factor reduced_form.append(s) reduced_form.append(formula_double_format(normamt)) reduced_form = "".join(reduced_form + polyanion) return reduced_form, factor
python
def populateFromRow(self, continuousSetRecord): """ Populates the instance variables of this ContinuousSet from the specified DB row. """ self._filePath = continuousSetRecord.dataurl self.setAttributesJson(continuousSetRecord.attributes)
java
public void setStatusMapping(Map<String, Integer> statusMapping) { Assert.notNull(statusMapping, "StatusMapping must not be null"); this.statusMapping = new HashMap<>(statusMapping); }
python
def get_slice(self, start=None, end=None): """ Return a new list of text fragments, indexed from start (included) to end (excluded). :param int start: the start index, included :param int end: the end index, excluded :rtype: :class:`~aeneas.textfile.TextFile` """ if start is not None: start = min(max(0, start), len(self) - 1) else: start = 0 if end is not None: end = min(max(0, end), len(self)) end = max(end, start + 1) else: end = len(self) new_text = TextFile() for fragment in self.fragments[start:end]: new_text.add_fragment(fragment) return new_text
python
def _resolve(self, spec): """Attempt resolving cache URIs when a remote spec is provided. """ if not spec.remote: return spec try: resolved_urls = self._resolver.resolve(spec.remote) if resolved_urls: # keep the bar separated list of URLs convention return CacheSpec(local=spec.local, remote='|'.join(resolved_urls)) # no-op return spec except Resolver.ResolverError as e: self._log.warn('Error while resolving from {0}: {1}'.format(spec.remote, str(e))) # If for some reason resolver fails we continue to use local cache if spec.local: return CacheSpec(local=spec.local, remote=None) # resolver fails but there is no local cache return None
java
private Entity createOntologyTerm(OWLClass ontologyTermClass) { String ontologyTermIRI = ontologyTermClass.getIRI().toString(); String ontologyTermName = loader.getLabel(ontologyTermClass); OntologyTerm ontologyTerm = ontologyTermFactory.create(); ontologyTerm.setId(idGenerator.generateId()); ontologyTerm.setOntologyTermIri(ontologyTermIRI); ontologyTerm.setOntologyTermName(ontologyTermName); ontologyTerm.setOntologyTermSynonyms(createSynonyms(ontologyTermClass)); ontologyTerm.setOntologyTermDynamicAnnotations(createDynamicAnnotations(ontologyTermClass)); ontologyTerm.setOntologyTermNodePaths(nodePathsPerOntologyTerm.get(ontologyTermIRI)); ontologyTerm.setOntology(ontologyEntity); ontologyTermRepository.add(ontologyTerm); return ontologyTerm; }
java
final DAO loadExternalDAO(String classSimpleName) { ServiceLoader<DAO> daoLoader = ServiceLoader.load(DAO.class, Para.getParaClassLoader()); for (DAO dao : daoLoader) { if (dao != null && classSimpleName.equalsIgnoreCase(dao.getClass().getSimpleName())) { return dao; } } return null; }
java
public static MomentInterval parse( String text, ChronoParser<Moment> parser, String intervalPattern ) throws ParseException { return IntervalParser.parsePattern(text, MomentIntervalFactory.INSTANCE, parser, intervalPattern); }
java
private static Map<Tag, String> createTags(String ufsName, UfsStatus status) { Map<Tag, String> tagMap = new HashMap<>(); tagMap.put(Tag.UFS, ufsName); tagMap.put(Tag.OWNER, status.getOwner()); tagMap.put(Tag.GROUP, status.getGroup()); tagMap.put(Tag.MODE, String.valueOf(status.getMode())); if (status instanceof UfsFileStatus) { tagMap.put(Tag.TYPE, Type.FILE.name()); tagMap.put(Tag.CONTENT_HASH, ((UfsFileStatus) status).getContentHash()); } else { tagMap.put(Tag.TYPE, Type.DIRECTORY.name()); } return tagMap; }
java
@SuppressWarnings("unchecked") public List<Failure> validate(Validate v, ResourceBundle rb) { if (v != null && Key.MANAGED_CONNECTION == v.getKey() && ManagedConnection.class.isAssignableFrom(v.getClazz())) { boolean error = false; ValidateObject vo = null; if (v instanceof ValidateObject) vo = (ValidateObject)v; if (vo == null) { error = true; } else { try { Class clz = vo.getClazz(); Method gmd = SecurityActions.getMethod(clz, "getMetaData", (Class[])null); Object md = gmd.invoke(vo.getObject(), (Object[])null); if (md == null) error = true; } catch (Throwable t) { // Ignore } } if (error) { List<Failure> failures = new ArrayList<Failure>(1); Failure failure = new Failure(Severity.ERROR, SECTION, rb.getString("mc.MCGetMetaData"), vo.getClazz().getName()); failures.add(failure); return failures; } } return null; }
java
protected boolean areBranchCompatible(PlanNode plan1, PlanNode plan2) { if (plan1 == null || plan2 == null) { throw new NullPointerException(); } // if there is no open branch, the children are always compatible. // in most plans, that will be the dominant case if (this.hereJoinedBranches == null || this.hereJoinedBranches.isEmpty()) { return true; } for (OptimizerNode joinedBrancher : hereJoinedBranches) { final PlanNode branch1Cand = plan1.getCandidateAtBranchPoint(joinedBrancher); final PlanNode branch2Cand = plan2.getCandidateAtBranchPoint(joinedBrancher); if (branch1Cand != null && branch2Cand != null && branch1Cand != branch2Cand) { return false; } } return true; }
python
def _execute(self, method_function, method_name, resource, **params): """ Generic TeleSign REST API request handler. :param method_function: The Requests HTTP request function to perform the request. :param method_name: The HTTP method name, as an upper case string. :param resource: The partial resource URI to perform the request against, as a string. :param params: Body params to perform the HTTP request with, as a dictionary. :return: The RestClient Response object. """ resource_uri = "{api_host}{resource}".format(api_host=self.api_host, resource=resource) url_encoded_fields = self._encode_params(params) headers = RestClient.generate_telesign_headers(self.customer_id, self.api_key, method_name, resource, url_encoded_fields, user_agent=self.user_agent) if method_name in ['POST', 'PUT']: payload = {'data': url_encoded_fields} else: payload = {'params': url_encoded_fields} response = self.Response(method_function(resource_uri, headers=headers, timeout=self.timeout, **payload)) return response
java
public void addPoi(final Poi BLIP) { if (pois.keySet().contains(BLIP.getName())) { updatePoi(BLIP.getName(), BLIP.getLocation()); } else { pois.put(BLIP.getName(), BLIP); } checkForBlips(); }
java
@Override public final void prepareNarInfo(final File baseDir, final MavenProject project, final NarInfo narInfo, final AbstractNarMojo mojo) throws MojoExecutionException { if (getNoArchDirectory(baseDir, project.getArtifactId(), project.getVersion()).exists()) { narInfo.setNar(null, NarConstants.NAR_NO_ARCH, project.getGroupId() + ":" + project.getArtifactId() + ":" + NarConstants.NAR_TYPE + ":" + NarConstants.NAR_NO_ARCH); } final String artifactIdVersion = project.getArtifactId() + "-" + project.getVersion(); // list all directories in basedir, scan them for classifiers final String[] subDirs = baseDir.list(); final ArrayList<String> classifiers = new ArrayList<>(); for (int i = 0; subDirs != null && i < subDirs.length; i++) { // skip entries not belonging to this project if (!subDirs[i].startsWith(artifactIdVersion)) { continue; } final String classifier = subDirs[i].substring(artifactIdVersion.length() + 1); // skip noarch here if (classifier.equals(NarConstants.NAR_NO_ARCH)) { continue; } classifiers.add(classifier); } if (!classifiers.isEmpty()) { for (final String classifier : classifiers) { final int lastDash = classifier.lastIndexOf('-'); final String type = classifier.substring(lastDash + 1); final AOL aol = new AOL(classifier.substring(0, lastDash)); if (narInfo.getOutput(aol, null) == null) { narInfo.setOutput(aol, mojo.getOutput(!Library.EXECUTABLE.equals(type))); } if (mojo.getLibsName() != null) { narInfo.setLibs(aol, mojo.getLibsName()); } // We prefer shared to jni/executable/static/none, if (type.equals(Library.SHARED)) // overwrite whatever we had { narInfo.setBinding(aol, type); narInfo.setBinding(null, type); } else { // if the binding is already set, then don't write it for // jni/executable/none. if (narInfo.getBinding(aol, null) == null) { narInfo.setBinding(aol, type); } else if (type.equals(Library.STATIC)) { //static lib is preferred over other remaining types; see #231 narInfo.setBinding(aol, type); narInfo.setBinding(null, type); } if (narInfo.getBinding(null, null) == null) { narInfo.setBinding(null, type); } } narInfo.setNar(null, type, project.getGroupId() + ":" + project.getArtifactId() + ":" + NarConstants.NAR_TYPE + ":" + "${aol}" + "-" + type); } // setting this first stops the per type config because getOutput check // for aol defaults to this generic one... if (mojo != null && narInfo.getOutput(null, null) == null) { narInfo.setOutput(null, mojo.getOutput(true)); } } }
python
def conf_budget(self, budget): """ Set limit on the number of conflicts. """ if self.minisat: pysolvers.minisat22_cbudget(self.minisat, budget)
java
@Override @FFDCIgnore({ RuntimeException.class }) public Object invoke(Exchange exchange, final Object serviceObject, Method m, List<Object> params) { //bean customizer.... final Object realServiceObject; final OperationResourceInfo ori = exchange.get(OperationResourceInfo.class); final ClassResourceInfo cri = ori.getClassResourceInfo(); //replace with CDI or EJB for per-request resource classes, in this place InjectionUtil has already // put related context object into ThreadLocal //SingleTon resources's replacement is put in InjectionUtil.injectContextProxiesAndApplication() method if (!cri.isSingleton()) { Class<?> clazz = serviceObject.getClass(); JaxRsFactoryBeanCustomizer beanCustomizer = libertyJaxRsServerFactoryBean.findBeanCustomizer(clazz); if (beanCustomizer != null) { realServiceObject = beanCustomizer.beforeServiceInvoke(serviceObject, cri.isSingleton(), libertyJaxRsServerFactoryBean.getBeanCustomizerContext(beanCustomizer)); if (realServiceObject == serviceObject && !beanCustomizer.getClass().getName().equalsIgnoreCase("com.ibm.ws.jaxrs20.ejb.JaxRsFactoryBeanEJBCustomizer") && !cri.contextsAvailable() && !cri.paramsAvailable()) { //call postConstruct method if it has not been repleaced with EJB/CDI for per-request resources Method postConstructMethod = ResourceUtils.findPostConstructMethod(realServiceObject.getClass()); InjectionUtils.invokeLifeCycleMethod(realServiceObject, postConstructMethod); } } else { realServiceObject = serviceObject; if (!cri.contextsAvailable() && !cri.paramsAvailable()) { //if bean customizer is null, means it is a pojo, we need to call postConstruct here Method postConstructMethod = ResourceUtils.findPostConstructMethod(serviceObject.getClass()); InjectionUtils.invokeLifeCycleMethod(serviceObject, postConstructMethod); } } } else { realServiceObject = serviceObject; } // Message message = JAXRSUtils.getCurrentMessage(); Object theProvider = null; if (isEnableBeanValidation && cxfBeanValidationProviderClass != null) { theProvider = getProvider(message); try { if (isValidateServiceObject()) { //theProvider.validateBean(serviceObject); callValidationMethod("validateBean", new Object[] { realServiceObject }, theProvider); } //theProvider.validateParameters(serviceObject, m, params.toArray()); callValidationMethod("validateParameters", new Object[] { realServiceObject, m, params.toArray() }, theProvider); } catch (RuntimeException e) { // Since BeanValidation is enabled, if this exception is a ConstraintViolationException // then we will want to put a FaultListener on the message so that // when this exception bubbles up to PhaseInterceptorChain that we do not // use default logging which will log this exception. BeanValidation is // supposed to block logging these messages. if (beanValidationFaultListener != null && beanValidationFaultListener.cve.isInstance(e)) { Message m2 = exchange.getInMessage(); m2.put(FaultListener.class.getName(), beanValidationFaultListener); } //re-throw exception. If the FaultListener is set then a ConstraintViolation will not //be logged in the messages.log. throw e; } } Object response = super.invoke(exchange, realServiceObject, m, params); if (isEnableBeanValidation && cxfBeanValidationProviderClass != null && theProvider != null) { if (response instanceof MessageContentsList) { MessageContentsList list = (MessageContentsList) response; if (list.size() == 1) { Object entity = list.get(0); if (entity instanceof Response) { //theProvider.validateReturnValue(serviceObject, m, ((Response) entity).getEntity()); callValidationMethod("validateReturnValue", new Object[] { realServiceObject, m, ((Response) entity).getEntity() }, theProvider); } else { //theProvider.validateReturnValue(serviceObject, m, entity); callValidationMethod("validateReturnValue", new Object[] { realServiceObject, m, entity }, theProvider); } } } } return response; }
python
def _set_bin_view(self, session): """Sets the underlying bin view to match current view""" if self._bin_view == COMPARATIVE: try: session.use_comparative_bin_view() except AttributeError: pass else: try: session.use_plenary_bin_view() except AttributeError: pass
python
def print_solution(model, solver): """Prints the solution associated with solver. If solver has already had Solve() called on it, prints the solution. This includes each variable and its assignment, along with the objective function and its optimal value. If solver has not had Solve() called on it, or there is no feasible solution, this will probably crash. Args: model: A pywrapcp.CpModel object. solver: A pywrapcp.CpSolver object. Returns: Nothing, but prints the solution associated with solver. """ model_proto = model.Proto() response_proto = solver.ResponseProto() variables_in_objective_map = {} maximization = False if model_proto.HasField('objective'): objective = model_proto.objective for i in range(len(objective.vars)): variables_in_objective_map[objective.vars[i]] = objective.coeffs[i] if objective.scaling_factor < 0.0: maximization = True variable_assignments = [] variables_in_objective = [] num_vars = len(model_proto.variables) for var_index in range(num_vars): if not model_proto.variables[var_index].name: continue variable_name = model_proto.variables[var_index].name if var_index in variables_in_objective_map: coefficient = variables_in_objective_map[var_index] if coefficient: if maximization: coefficient *= -1 if coefficient < 0: variables_in_objective.append(' - {} * {}'.format( -coefficient, variable_name)) elif coefficient > 0: variables_in_objective.append(' + {} * {}'.format( coefficient, variable_name)) variable_assignments.append(' {} = {}\n'.format( variable_name, response_proto.solution[var_index])) print(''.join(variable_assignments), end='') # Strip the leading '+' if it exists. if variables_in_objective and variables_in_objective[0][1] == '+': variables_in_objective[0] = variables_in_objective[0][2:] print('{}:{}'.format('Maximize' if maximization else 'Minimize', ''.join(variables_in_objective))) print('Objective value: {}\n'.format(solver.ObjectiveValue()))
java
public V get(SerializationService serializationService) { if (!valueExists) { // it's ok to deserialize twice in case of race assert serializationService != null; value = serializationService.toObject(serializedValue); valueExists = true; } return value; }
java
public static <T> void verify(Vertex<T> vertex) throws CyclicDependencyException { // We need a list of vertices that contains the entire graph, so build it. List<Vertex<T>> vertices = new ArrayList<Vertex<T>>(); addDependencies(vertex, vertices); verify(vertices); }
python
def __get_pid_and_tid(self): "Internally used by get_pid() and get_tid()." self.dwThreadId, self.dwProcessId = \ win32.GetWindowThreadProcessId(self.get_handle())
java
public DoublePoint Multiply(DoublePoint point1, DoublePoint point2) { DoublePoint result = new DoublePoint(point1); result.Multiply(point2); return result; }
python
def send_caught_exception_stack_proceeded(self, thread): """Sends that some thread was resumed and is no longer showing an exception trace. """ thread_id = get_thread_id(thread) int_cmd = InternalSendCurrExceptionTraceProceeded(thread_id) self.post_internal_command(int_cmd, thread_id) self.process_internal_commands()
python
def enter_history(self): """ Display the history. """ app = get_app() app.vi_state.input_mode = InputMode.NAVIGATION def done(f): result = f.result() if result is not None: self.default_buffer.text = result app.vi_state.input_mode = InputMode.INSERT history = History(self, self.default_buffer.document) future = run_coroutine_in_terminal(history.app.run_async) future.add_done_callback(done)
java
public com.squareup.okhttp.Call getAlliancesAllianceIdIconsAsync(Integer allianceId, String datasource, String ifNoneMatch, final ApiCallback<AllianceIconsResponse> callback) throws ApiException { com.squareup.okhttp.Call call = getAlliancesAllianceIdIconsValidateBeforeCall(allianceId, datasource, ifNoneMatch, callback); Type localVarReturnType = new TypeToken<AllianceIconsResponse>() { }.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; }
java
public void add(String name,String value) throws IllegalArgumentException { if (value==null) throw new IllegalArgumentException("null value"); FieldInfo info=getFieldInfo(name); Field field=getField(info,false); Field last=null; if (field!=null) { while(field!=null && field._version==_version) { last=field; field=field._next; } } if (field!=null) field.reset(value,_version); else { // create the field field=new Field(info,value,_version); // look for chain to add too if(last!=null) { field._prev=last; last._next=field; } else if (info.hashCode()<_index.length) _index[info.hashCode()]=_fields.size(); _fields.add(field); } }
java
synchronized void updateValue(long value) { if (value != currentValue) { currentValue = value; IOEvent evt = new IOEvent(this); getDevice().pinChanged(evt); // the device listeners receive the event first for (PinEventListener listener : listeners) { // then pin listeners receive the event listener.onValueChange(evt); } } }
python
def rarity(brands, exemplars): """ Compute a score for each follower that is sum_i (1/n_i), where n_i is the degree of the ith exemplar they follow. The score for a brand is then the average of their follower scores.""" rarity = compute_rarity_scores(exemplars) scores = {} for brand, followers in brands: scores[brand] = sum(rarity[f] for f in followers) / len(followers) return scores
python
def get_encoded_text(container, xpath): """Return text for element at xpath in the container xml if it is there. Parameters ---------- container : xml.etree.ElementTree.Element The element to be searched in. xpath : str The path to be looked for. Returns ------- result : str """ try: return "".join(container.find(xpath, ns).itertext()) except AttributeError: return None
java
public boolean checkAccess(String path, boolean readOnly) { final FileServiceMXBean fileService = getFileService(); if (readOnly) { //we can read from both the read and write list return (FileServiceUtil.isPathContained(fileService.getReadList(), path) || FileServiceUtil.isPathContained(fileService.getWriteList(), path)); } else { //we can write only to the write list return FileServiceUtil.isPathContained(fileService.getWriteList(), path); } }
java
protected int StackOpp() { if (key == "ifelse") return -3; if (key == "roll" || key == "put") return -2; if (key == "callsubr" || key == "callgsubr" || key == "add" || key == "sub" || key == "div" || key == "mul" || key == "drop" || key == "and" || key == "or" || key == "eq") return -1; if (key == "abs" || key == "neg" || key == "sqrt" || key == "exch" || key == "index" || key == "get" || key == "not" || key == "return") return 0; if (key == "random" || key == "dup") return 1; return 2; }
java
public DescribeTransitGatewayRouteTablesResult withTransitGatewayRouteTables(TransitGatewayRouteTable... transitGatewayRouteTables) { if (this.transitGatewayRouteTables == null) { setTransitGatewayRouteTables(new com.amazonaws.internal.SdkInternalList<TransitGatewayRouteTable>(transitGatewayRouteTables.length)); } for (TransitGatewayRouteTable ele : transitGatewayRouteTables) { this.transitGatewayRouteTables.add(ele); } return this; }