language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def df_to_tsv(df, tsvfile, suffix): """ Serialize the dataframe as a tsv """ tsvfile += suffix columns = ["SampleKey"] + sorted(x for x in df.columns if x.endswith(suffix)) tf = df.reindex_axis(columns, axis='columns') tf.sort_values("SampleKey") tf.to_csv(tsvfile, sep='\t', index=False, float_format='%.4g', na_rep="na") print("TSV output written to `{}` (# samples={})"\ .format(tsvfile, tf.shape[0]), file=sys.stderr)
python
def _validate_hue(df, hue): """ The top-level ``hue`` parameter present in most plot types accepts a variety of input types. This method condenses this variety into a single preferred format---an iterable---which is expected by all submethods working with the data downstream of it. Parameters ---------- df : GeoDataFrame The full data input, from which standardized ``hue`` information may need to be extracted. hue : Series, GeoSeries, iterable, str The data column whose entries are being discretely colorized, as (loosely) passed by the top-level ``hue`` variable. required : boolean Whether or not this parameter is required for the plot in question. Returns ------- hue : iterable The ``hue`` parameter input as an iterable. """ if hue is None: return None elif isinstance(hue, str): hue = df[hue] return hue else: return gpd.GeoSeries(hue)
java
public ServiceFuture<EnvironmentSettingInner> updateAsync(String resourceGroupName, String labAccountName, String labName, String environmentSettingName, EnvironmentSettingFragment environmentSetting, final ServiceCallback<EnvironmentSettingInner> serviceCallback) { return ServiceFuture.fromResponse(updateWithServiceResponseAsync(resourceGroupName, labAccountName, labName, environmentSettingName, environmentSetting), serviceCallback); }
java
public Object execute(final Object dataIn) throws DevFailed { xlogger.entry(name); Object result; try { result = behavior.execute(dataIn); } catch (final DevFailed e) { lastError = e; throw e; } xlogger.exit(name); return result; }
python
def create_new(cls, oldvalue, *args): "Raise if the old value already exists" if oldvalue is not None: raise AlreadyExistsException('%r already exists' % (oldvalue,)) return cls.create_instance(*args)
java
void write_attribute_reply(int timeout) { DevError[] errors = null; try { if (timeout==NO_TIMEOUT) dev.write_attribute_reply(id); else dev.write_attribute_reply(id, 0); } catch(AsynReplyNotArrived e) { errors = e.errors; } catch(DevFailed e) { errors = e.errors; } cb.attr_written(new AttrWrittenEvent(dev, names, errors)); }
java
public DiffBuilder append(final String fieldName, final double lhs, final double rhs) { validateFieldNameNotNull(fieldName); if (objectsTriviallyEqual) { return this; } if (Double.doubleToLongBits(lhs) != Double.doubleToLongBits(rhs)) { diffs.add(new Diff<Double>(fieldName) { private static final long serialVersionUID = 1L; @Override public Double getLeft() { return Double.valueOf(lhs); } @Override public Double getRight() { return Double.valueOf(rhs); } }); } return this; }
java
public static ByteBuf copyShort(int value) { ByteBuf buf = buffer(2); buf.writeShort(value); return buf; }
python
def send(self, data): """ Tries to send data to the client. :param data: Data to be sent :return: True if the data was sent, False on error """ if data is not None: data = data.encode("UTF-8") try: self.wfile.write(data) self.wfile.flush() return True except IOError: # An error occurred, mask it # -> This allows to handle the command even if the client has been # disconnect (i.e. "echo stop 0 | nc localhost 9000") return False
python
def convert(from_currency, to_currency, from_currency_price=1): """ convert from from_currency to to_currency using cached info """ get_cache() from_currency, to_currency = validate_currency(from_currency, to_currency) update_cache(from_currency, to_currency) return ccache[from_currency][to_currency]['value'] * from_currency_price
java
public static MotionDirection getVerticalMotionDirection(MotionEvent e1, MotionEvent e2, float threshold) { float delta = getVerticalMotionRawDelta(e1, e2); return getVerticalMotionDirection(delta, threshold); }
java
protected ProxyArtifactStore createProxyArtifactStore() { return new ProxyArtifactStore( repositoryMetadataManager, remoteArtifactRepositories, remotePluginRepositories, localRepository, artifactFactory, artifactResolver, archetypeManager, getLog() ); }
python
def swap_columns(self, column_name_1, column_name_2, inplace=False): """ Returns an SFrame with two column positions swapped. If inplace == False (default) this operation does not modify the current SFrame, returning a new SFrame. If inplace == True, this operation modifies the current SFrame, returning self. Parameters ---------- column_name_1 : string Name of column to swap column_name_2 : string Name of other column to swap inplace : bool, optional. Defaults to False. Whether the SFrame is modified in place. Returns ------- out : SFrame The SFrame with swapped columns. Examples -------- >>> sf = turicreate.SFrame({'id': [1, 2, 3], 'val': ['A', 'B', 'C']}) >>> res = sf.swap_columns('id', 'val') >>> res +-----+-----+ | val | id | +-----+-----+ | A | 1 | | B | 2 | | C | 3 | +----+-----+ [3 rows x 2 columns] """ colnames = self.column_names() colid_1 = colnames.index(column_name_1) colid_2 = colnames.index(column_name_2) if inplace: ret = self else: ret = self.copy() with cython_context(): ret.__proxy__.swap_columns(colid_1, colid_2) ret._cache = None return ret
java
@Override public Iterable<T> findAll(@NonNull Sort sort) { Assert.notNull(sort, "sort of findAll should not be null"); final DocumentQuery query = new DocumentQuery(Criteria.getInstance(CriteriaType.ALL)).with(sort); return operation.find(query, information.getJavaType(), information.getCollectionName()); }
python
def make(id, client, cls, parent_id=None, json=None): """ Makes an api object based on an id and class. :param id: The id of the object to create :param client: The LinodeClient to give the new object :param cls: The class type to instantiate :param parent_id: The parent id for derived classes :param json: The JSON to use to populate the new class :returns: An instance of cls with the given id """ from .dbase import DerivedBase if issubclass(cls, DerivedBase): return cls(client, id, parent_id, json) else: return cls(client, id, json)
python
def read_sphinx_environment(pth): """Read the sphinx environment.pickle file at path `pth`.""" with open(pth, 'rb') as fo: env = pickle.load(fo) return env
java
@Override public void serialize(JsonObject value, JsonGenerator gen, SerializerProvider serializers) throws IOException, JsonProcessingException { gen.writeObject(value.getMap()); }
python
def _zm_request(self, method, api_url, data=None, timeout=DEFAULT_TIMEOUT) -> dict: """Perform a request to the ZoneMinder API.""" try: # Since the API uses sessions that expire, sometimes we need to # re-auth if the call fails. for _ in range(ZoneMinder.LOGIN_RETRIES): req = requests.request( method, urljoin(self._server_url, api_url), data=data, cookies=self._cookies, timeout=timeout, verify=self._verify_ssl) if not req.ok: self.login() else: break else: _LOGGER.error('Unable to get API response from ZoneMinder') try: return req.json() except ValueError: _LOGGER.exception('JSON decode exception caught while' 'attempting to decode "%s"', req.text) return {} except requests.exceptions.ConnectionError: _LOGGER.exception('Unable to connect to ZoneMinder') return {}
java
public static String getPippoVersion() { // and the key inside the properties file. String pippoVersionPropertyKey = "pippo.version"; String pippoVersion; try { Properties prop = new Properties(); URL url = ClasspathUtils.locateOnClasspath(PippoConstants.LOCATION_OF_PIPPO_BUILTIN_PROPERTIES); InputStream stream = url.openStream(); prop.load(stream); pippoVersion = prop.getProperty(pippoVersionPropertyKey); } catch (Exception e) { //this should not happen. Never. throw new PippoRuntimeException("Something is wrong with your build. Cannot find resource {}", PippoConstants.LOCATION_OF_PIPPO_BUILTIN_PROPERTIES); } return pippoVersion; }
python
def get_b(self): """Returns the bottom border of the cell""" start_point, end_point = self._get_bottom_line_coordinates() width = self._get_bottom_line_width() color = self._get_bottom_line_color() return CellBorder(start_point, end_point, width, color)
python
def event_source_mapping_present(name, EventSourceArn, FunctionName, StartingPosition, Enabled=True, BatchSize=100, region=None, key=None, keyid=None, profile=None): ''' Ensure event source mapping exists. name The name of the state definition. EventSourceArn The Amazon Resource Name (ARN) of the Amazon Kinesis or the Amazon DynamoDB stream that is the event source. FunctionName The Lambda function to invoke when AWS Lambda detects an event on the stream. You can specify an unqualified function name (for example, "Thumbnail") or you can specify Amazon Resource Name (ARN) of the function (for example, "arn:aws:lambda:us-west-2:account-id:function:ThumbNail"). AWS Lambda also allows you to specify only the account ID qualifier (for example, "account-id:Thumbnail"). Note that the length constraint applies only to the ARN. If you specify only the function name, it is limited to 64 character in length. StartingPosition The position in the stream where AWS Lambda should start reading. (TRIM_HORIZON | LATEST) Enabled Indicates whether AWS Lambda should begin polling the event source. By default, Enabled is true. BatchSize The largest number of records that AWS Lambda will retrieve from your event source at the time of invoking your function. Your function receives an event with all the retrieved records. The default is 100 records. region Region to connect to. key Secret key to be used. keyid Access key to be used. profile A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': None, 'result': True, 'comment': '', 'changes': {} } r = __salt__['boto_lambda.event_source_mapping_exists']( EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile) if 'error' in r: ret['result'] = False ret['comment'] = ('Failed to create event source mapping: ' '{0}.'.format(r['error']['message'])) return ret if not r.get('exists'): if __opts__['test']: ret['comment'] = ('Event source mapping {0} is set ' 'to be created.'.format(FunctionName)) ret['result'] = None return ret r = __salt__['boto_lambda.create_event_source_mapping']( EventSourceArn=EventSourceArn, FunctionName=FunctionName, StartingPosition=StartingPosition, Enabled=Enabled, BatchSize=BatchSize, region=region, key=key, keyid=keyid, profile=profile) if not r.get('created'): ret['result'] = False ret['comment'] = ('Failed to create event source mapping: ' '{0}.'.format(r['error']['message'])) return ret _describe = __salt__['boto_lambda.describe_event_source_mapping']( EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile) ret['name'] = _describe['event_source_mapping']['UUID'] ret['changes']['old'] = {'event_source_mapping': None} ret['changes']['new'] = _describe ret['comment'] = ('Event source mapping {0} ' 'created.'.format(ret['name'])) return ret ret['comment'] = os.linesep.join( [ret['comment'], 'Event source mapping is present.']) ret['changes'] = {} _describe = __salt__['boto_lambda.describe_event_source_mapping']( EventSourceArn=EventSourceArn, FunctionName=FunctionName, region=region, key=key, keyid=keyid, profile=profile)['event_source_mapping'] need_update = False options = {'BatchSize': BatchSize} for key, val in six.iteritems(options): if _describe[key] != val: need_update = True ret['changes'].setdefault('old', {})[key] = _describe[key] ret['changes'].setdefault('new', {})[key] = val # verify FunctionName against FunctionArn function_arn = _get_function_arn(FunctionName, region=region, key=key, keyid=keyid, profile=profile) if _describe['FunctionArn'] != function_arn: need_update = True ret['changes'].setdefault('new', {})['FunctionArn'] = function_arn ret['changes'].setdefault('old', {})['FunctionArn'] = _describe[ 'FunctionArn'] # TODO check for 'Enabled', since it doesn't directly map to a specific # state if need_update: ret['comment'] = os.linesep.join( [ret['comment'], 'Event source mapping to be modified']) if __opts__['test']: ret['comment'] = ( 'Event source mapping {0} set to be modified.'.format( _describe['UUID'] ) ) ret['result'] = None return ret _r = __salt__['boto_lambda.update_event_source_mapping']( UUID=_describe['UUID'], FunctionName=FunctionName, Enabled=Enabled, BatchSize=BatchSize, region=region, key=key, keyid=keyid, profile=profile) if not _r.get('updated'): ret['result'] = False ret['comment'] = ('Failed to update mapping: ' '{0}.'.format(_r['error']['message'])) ret['changes'] = {} return ret
java
public <R> SingleOutputStreamOperator<R> apply(WindowFunction<T, R, K, W> function, TypeInformation<R> resultType) { function = input.getExecutionEnvironment().clean(function); return apply(new InternalIterableWindowFunction<>(function), resultType, function); }
java
protected String getOpenGalleryCall( CmsObject cms, I_CmsWidgetDialog widgetDialog, I_CmsWidgetParameter param, long hashId) { StringBuffer sb = new StringBuffer(128); sb.append("javascript:cmsOpenDialog('"); // the gallery title sb.append(widgetDialog.getMessages().key(Messages.getButtonName(getGalleryName()))).append("', '"); // the gallery path sb.append(OpenCms.getSystemInfo().getOpenCmsContext()).append(PATH_GALLERY_JSP); // set the content locale Locale contentLocale = widgetDialog.getLocale(); try { I_CmsXmlContentValue value = (I_CmsXmlContentValue)param; contentLocale = value.getLocale(); } catch (Exception e) { // may fail if widget is not opened from xml content editor, ignore } sb.append("?__locale=").append(contentLocale.toString()); // add other open parameters for (Entry<String, String> paramEntry : getGalleryOpenParams( cms, widgetDialog.getMessages(), param, widgetDialog instanceof CmsDialog ? ((CmsDialog)widgetDialog).getParamResource() : null, hashId).entrySet()) { sb.append("&").append(paramEntry.getKey()).append("=").append(paramEntry.getValue()); } sb.append("', '").append(param.getId()).append("', 488, 650); return false;"); return sb.toString(); }
java
protected base_resource[] get_nitro_bulk_response(nitro_service service, String response) throws Exception { version_matrix_status_responses result = (version_matrix_status_responses) service.get_payload_formatter().string_to_resource(version_matrix_status_responses.class, response); if(result.errorcode != 0) { if (result.errorcode == SESSION_NOT_EXISTS) service.clear_session(); throw new nitro_exception(result.message, result.errorcode, (base_response [])result.version_matrix_status_response_array); } version_matrix_status[] result_version_matrix_status = new version_matrix_status[result.version_matrix_status_response_array.length]; for(int i = 0; i < result.version_matrix_status_response_array.length; i++) { result_version_matrix_status[i] = result.version_matrix_status_response_array[i].version_matrix_status[0]; } return result_version_matrix_status; }
python
def fave_mentions(api, dry_run=None): ''' Fave (aka like) recent mentions from user authenicated in 'api'. :api twitter_bot_utils.api.API :dry_run bool don't actually favorite, just report ''' f = api.favorites(include_entities=False, count=150) favs = [m.id_str for m in f] try: mentions = api.mentions_timeline(trim_user=True, include_entities=False, count=75) except Exception as e: raise e for mention in mentions: # only try to fav if not in recent favs if mention.id_str not in favs: try: api.logger.info('liking %s: %s', mention.id_str, mention.text) if not dry_run: api.create_favorite(mention.id_str, include_entities=False) except RateLimitError: api.logger.warning("reached Twitter's rate limit, sleeping for %d minutes", RATE_LIMIT_RESET_MINUTES) sleep(RATE_LIMIT_RESET_MINUTES * 60) api.create_favorite(mention.id_str, include_entities=False) except TweepError as e: api.logger.error('error liking %s', mention.id_str) api.logger.error("code %s: %s", e.api_code, e)
python
def on_get(resc, req, resp, rid): """ Find the model by id & serialize it back """ signals.pre_req.send(resc.model) signals.pre_req_find.send(resc.model) model = find(resc.model, rid) props = to_rest_model(model, includes=req.includes) resp.last_modified = model.updated resp.serialize(props) signals.post_req.send(resc.model) signals.post_req_find.send(resc.model)
python
def standardize(self): """ Standardize data. """ if self.preprocessed_data.empty: data = self.original_data else: data = self.preprocessed_data scaler = preprocessing.StandardScaler() data = pd.DataFrame(scaler.fit_transform(data), columns=data.columns, index=data.index) self.preprocessed_data = data
python
def prepareToCalcEndOfPrdvP(self): ''' Prepare to calculate end-of-period marginal value by creating an array of market resources that the agent could have next period, considering the grid of end-of-period normalized assets, the grid of persistent income levels, and the distribution of shocks he might experience next period. Parameters ---------- None Returns ------- aLvlNow : np.array 2D array of end-of-period assets; also stored as attribute of self. pLvlNow : np.array 2D array of persistent income levels this period. ''' ShkCount = self.TranShkValsNext.size pLvlCount = self.pLvlGrid.size aNrmCount = self.aXtraGrid.size pLvlNow = np.tile(self.pLvlGrid,(aNrmCount,1)).transpose() aLvlNow = np.tile(self.aXtraGrid,(pLvlCount,1))*pLvlNow + self.BoroCnstNat(pLvlNow) pLvlNow_tiled = np.tile(pLvlNow,(ShkCount,1,1)) aLvlNow_tiled = np.tile(aLvlNow,(ShkCount,1,1)) # shape = (ShkCount,pLvlCount,aNrmCount) if self.pLvlGrid[0] == 0.0: # aLvl turns out badly if pLvl is 0 at bottom aLvlNow[0,:] = self.aXtraGrid aLvlNow_tiled[:,0,:] = np.tile(self.aXtraGrid,(ShkCount,1)) # Tile arrays of the income shocks and put them into useful shapes PermShkVals_tiled = np.transpose(np.tile(self.PermShkValsNext,(aNrmCount,pLvlCount,1)),(2,1,0)) TranShkVals_tiled = np.transpose(np.tile(self.TranShkValsNext,(aNrmCount,pLvlCount,1)),(2,1,0)) ShkPrbs_tiled = np.transpose(np.tile(self.ShkPrbsNext,(aNrmCount,pLvlCount,1)),(2,1,0)) # Get cash on hand next period pLvlNext = self.pLvlNextFunc(pLvlNow_tiled)*PermShkVals_tiled mLvlNext = self.Rfree*aLvlNow_tiled + pLvlNext*TranShkVals_tiled # Store and report the results self.ShkPrbs_temp = ShkPrbs_tiled self.pLvlNext = pLvlNext self.mLvlNext = mLvlNext self.aLvlNow = aLvlNow return aLvlNow, pLvlNow
python
def strptime(cls, value, format): """ Parse a datetime string using the provided format. This also emulates `%z` support on Python 2. :param value: Datetime string :type value: str :param format: Format to use for parsing :type format: str :rtype: datetime :raises ValueError: Invalid format :raises TypeError: Invalid input type """ # Simplest case: direct parsing if cls.python_supports_z or '%z' not in format: return datetime.strptime(value, format) else: # %z emulation case assert format[-2:] == '%z', 'For performance, %z is only supported at the end of the string' # Parse dt = datetime.strptime(value[:-5], format[:-2]) # cutoff '%z' and '+0000' tz = FixedOffset(value[-5:]) # parse %z into tzinfo # Localize return dt.replace(tzinfo=tz)
java
public void marshall(UpdateVPCEConfigurationRequest updateVPCEConfigurationRequest, ProtocolMarshaller protocolMarshaller) { if (updateVPCEConfigurationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateVPCEConfigurationRequest.getArn(), ARN_BINDING); protocolMarshaller.marshall(updateVPCEConfigurationRequest.getVpceConfigurationName(), VPCECONFIGURATIONNAME_BINDING); protocolMarshaller.marshall(updateVPCEConfigurationRequest.getVpceServiceName(), VPCESERVICENAME_BINDING); protocolMarshaller.marshall(updateVPCEConfigurationRequest.getServiceDnsName(), SERVICEDNSNAME_BINDING); protocolMarshaller.marshall(updateVPCEConfigurationRequest.getVpceConfigurationDescription(), VPCECONFIGURATIONDESCRIPTION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def get_all_parents(go_objs): """Return a set containing all GO Term parents of multiple GOTerm objects.""" go_parents = set() for go_obj in go_objs: go_parents |= go_obj.get_all_parents() return go_parents
java
@Override public KamNode getKamNode(final Kam kam, String belTermString) { if (noLength(belTermString)) throw new InvalidArgument("belTermString", belTermString); KamInfo ki = kam.getKamInfo(); if (!exists(ki)) return null; KamNode kamNode; Integer nodeID; try { nodeID = kamStoreDao(ki).getKamNodeId(belTermString); kamNode = kam.findNode(nodeID); } catch (SQLException e) { final String fmt = "error getting KAM node ID for %s"; final String msg = format(fmt, ki.getName()); throw new KAMStoreException(msg, e); } return kamNode; }
python
def _create_tc_dirs(self): """Create app directories for logs and data files.""" tc_log_path = self.profile.get('args', {}).get('tc_log_path') if tc_log_path is not None and not os.path.isdir(tc_log_path): os.makedirs(tc_log_path) tc_out_path = self.profile.get('args', {}).get('tc_out_path') if tc_out_path is not None and not os.path.isdir(tc_out_path): os.makedirs(tc_out_path) tc_tmp_path = self.profile.get('args', {}).get('tc_tmp_path') if tc_tmp_path is not None and not os.path.isdir(tc_tmp_path): os.makedirs(tc_tmp_path)
python
def main(): """Read GeoTiff raster data and print statistics. The output will be:: rows: 130, cols: 100 LLCornerX: 755145.28, LLCornerY: 654294.06 cell size: 10.0 mean: 203.92, max: 284.07, min: 139.11 std: 32.32, sum: 2650967.00 """ input_tif = "../tests/data/Jamaica_dem.tif" rst = RasterUtilClass.read_raster(input_tif) # metadata information print("rows: %d, cols: %d" % (rst.nRows, rst.nCols)) print("LLCornerX: %.2f, LLCornerY: %.2f" % (rst.xMin, rst.yMin)) print("cell size: %.1f" % rst.dx) # basic statistics, nodata is excluded print("mean: %.2f, max: %.2f, min: %.2f" % (rst.get_average(), rst.get_max(), rst.get_min())) print("std: %.2f, sum: %.2f" % (rst.get_std(), rst.get_sum()))
python
def parent_workspace(context): """ Return containing workspace Returns None if not found. """ if IWorkspaceFolder.providedBy(context): return context for parent in aq_chain(context): if IWorkspaceFolder.providedBy(parent): return parent
python
def colorbrewer2_url(self): """ URL that can be used to view the color map at colorbrewer2.org. """ url = 'http://colorbrewer2.org/index.html?type={0}&scheme={1}&n={2}' return url.format(self.type.lower(), self.name, self.number)
python
def value_derived_from_wavefunction(self, state: np.ndarray, qubit_map: Dict[raw_types.Qid, int] ) -> Any: """The value of the display, derived from the full wavefunction. Args: state: The wavefunction. qubit_map: A dictionary from qubit to qubit index in the ordering used to define the wavefunction. """
python
def make_stmt_from_sort_key(key, verb): """Make a Statement from the sort key. Specifically, the sort key used by `group_and_sort_statements`. """ def make_agent(name): if name == 'None' or name is None: return None return Agent(name) StmtClass = get_statement_by_name(verb) inps = list(key[1]) if verb == 'Complex': stmt = StmtClass([make_agent(name) for name in inps]) elif verb == 'Conversion': stmt = StmtClass(make_agent(inps[0]), [make_agent(name) for name in inps[1]], [make_agent(name) for name in inps[2]]) elif verb == 'ActiveForm' or verb == 'HasActivity': stmt = StmtClass(make_agent(inps[0]), inps[1], inps[2]) else: stmt = StmtClass(*[make_agent(name) for name in inps]) return stmt
java
private void linkEntities() { for (int i =0; i< allModels.size() ; i++){ for (Chain chain : allModels.get(i)) { //logger.info("linking entities for " + chain.getId() + " " + chain.getName()); String entityId = asymId2entityId.get(chain.getId()); if (entityId==null) { // this can happen for instance if the cif file didn't have _struct_asym category at all // and thus we have no asymId2entityId mapping at all logger.info("No entity id could be found for chain {}", chain.getId()); continue; } int eId = Integer.parseInt(entityId); // Entities are not added for non-polymeric entities, if a chain is non-polymeric its entity won't be found. // TODO: add all entities and unique compounds and add methods to directly get polymer or non-polymer // asyms (chains). Either create a unique StructureImpl or modify existing for a better representation of the // mmCIF internal data structures but is compatible with Structure interface. // Some examples of PDB entries with this kind of problem: // - 2uub: asym_id X, chainName Z, entity_id 24: fully non-polymeric but still with its own chainName // - 3o6j: asym_id K, chainName Z, entity_id 6 : a single water molecule // - 1dz9: asym_id K, chainName K, entity_id 6 : a potassium ion alone EntityInfo entityInfo = structure.getEntityById(eId); if (entityInfo==null) { // Supports the case where the only chain members were from non-polymeric entity that is missing. // Solved by creating a new Compound(entity) to which this chain will belong. logger.info("Could not find an Entity for entity_id {}, for chain id {}, creating a new Entity.", eId, chain.getId()); entityInfo = new EntityInfo(); entityInfo.setMolId(eId); entityInfo.addChain(chain); if (chain.isWaterOnly()) { entityInfo.setType(EntityType.WATER); } else { entityInfo.setType(EntityType.NONPOLYMER); } chain.setEntityInfo(entityInfo); structure.addEntityInfo(entityInfo); } else { logger.debug("Adding chain with chain id {} (auth id {}) to Entity with entity_id {}", chain.getId(), chain.getName(), eId); entityInfo.addChain(chain); chain.setEntityInfo(entityInfo); } } } // if no entity information was present in file we then go and find the entities heuristically with EntityFinder List<EntityInfo> entityInfos = structure.getEntityInfos(); if (entityInfos==null || entityInfos.isEmpty()) { List<List<Chain>> polyModels = new ArrayList<>(); List<List<Chain>> nonPolyModels = new ArrayList<>(); List<List<Chain>> waterModels = new ArrayList<>(); for (List<Chain> model:allModels) { List<Chain> polyChains = new ArrayList<>(); List<Chain> nonPolyChains = new ArrayList<>(); List<Chain> waterChains = new ArrayList<>(); polyModels.add(polyChains); nonPolyModels.add(nonPolyChains); waterModels.add(waterChains); for (Chain c:model) { // we only have entities for polymeric chains, all others are ignored for assigning entities if (c.isWaterOnly()) { waterChains.add(c); } else if (c.isPureNonPolymer()) { nonPolyChains.add(c); } else { polyChains.add(c); } } } entityInfos = EntityFinder.findPolyEntities(polyModels); EntityFinder.createPurelyNonPolyEntities(nonPolyModels, waterModels, entityInfos); structure.setEntityInfos(entityInfos); } // final sanity check: it can happen that from the annotated entities some are not linked to any chains // e.g. 3s26: a sugar entity does not have any chains associated to it (it seems to be happening with many sugar compounds) // we simply log it, this can sign some other problems if the entities are used down the line for (EntityInfo e:entityInfos) { if (e.getChains().isEmpty()) { logger.info("Entity {} '{}' has no chains associated to it", e.getMolId()<0?"with no entity id":e.getMolId(), e.getDescription()); } } }
python
def get(self, name=None): """ Returns the plugin object with the given name. Or if a name is not given, the complete plugin dictionary is returned. :param name: Name of a plugin :return: None, single plugin or dictionary of plugins """ if name is None: return self._plugins else: if name not in self._plugins.keys(): return None else: return self._plugins[name]
python
def is_safe_attribute(self, obj, attr, value): """The sandboxed environment will call this method to check if the attribute of an object is safe to access. Per default all attributes starting with an underscore are considered private as well as the special attributes of internal python objects as returned by the :func:`is_internal_attribute` function. """ return not (attr.startswith('_') or is_internal_attribute(obj, attr))
python
def collect(self): """ Collect interrupt data """ if not os.access(self.PROC, os.R_OK): return False # Open PROC file file = open(self.PROC, 'r') # Get data for line in file: if not line.startswith('softirq'): continue data = line.split() metric_name = 'total' metric_value = int(data[1]) metric_value = int(self.derivative( metric_name, long(metric_value), counter)) self.publish(metric_name, metric_value) for i in range(2, len(data)): metric_name = str(i - 2) metric_value = int(data[i]) metric_value = int(self.derivative( metric_name, long(metric_value), counter)) self.publish(metric_name, metric_value) # Close file file.close()
python
def scan_band(self, band, **kwargs): """Run Kalibrate for a band. Supported keyword arguments: gain -- Gain in dB device -- Index of device to be used error -- Initial frequency error in ppm """ kal_run_line = fn.build_kal_scan_band_string(self.kal_bin, band, kwargs) raw_output = subprocess.check_output(kal_run_line.split(' '), stderr=subprocess.STDOUT) kal_normalized = fn.parse_kal_scan(raw_output) return kal_normalized
python
def interpolate(x, y, z, interp_type='linear', hres=50000, minimum_neighbors=3, gamma=0.25, kappa_star=5.052, search_radius=None, rbf_func='linear', rbf_smooth=0, boundary_coords=None): """Wrap interpolate_to_grid for deprecated interpolate function.""" return interpolate_to_grid(x, y, z, interp_type=interp_type, hres=hres, minimum_neighbors=minimum_neighbors, gamma=gamma, kappa_star=kappa_star, search_radius=search_radius, rbf_func=rbf_func, rbf_smooth=rbf_smooth, boundary_coords=boundary_coords)
python
def put_directory(self, target_path, local_directory, **kwargs): """Upload a directory with all its contents :param target_path: path of the directory to upload into :param local_directory: path to the local directory to upload :param \*\*kwargs: optional arguments that ``put_file`` accepts :returns: True if the operation succeeded, False otherwise :raises: HTTPResponseError in case an HTTP error status was returned """ target_path = self._normalize_path(target_path) if not target_path.endswith('/'): target_path += '/' gathered_files = [] if not local_directory.endswith('/'): local_directory += '/' basedir = os.path.basename(local_directory[0: -1]) + '/' # gather files to upload for path, _, files in os.walk(local_directory): gathered_files.append( (path, basedir + path[len(local_directory):], files) ) for path, remote_path, files in gathered_files: self.mkdir(target_path + remote_path + '/') for name in files: if not self.put_file(target_path + remote_path + '/', path + '/' + name, **kwargs): return False return True
python
def kill(timeout=15): ''' Kill the salt minion. timeout int seconds to wait for the minion to die. If you have a monitor that restarts ``salt-minion`` when it dies then this is a great way to restart after a minion upgrade. CLI example:: >$ salt minion[12] minion.kill minion1: ---------- killed: 7874 retcode: 0 minion2: ---------- killed: 29071 retcode: 0 The result of the salt command shows the process ID of the minions and the results of a kill signal to the minion in as the ``retcode`` value: ``0`` is success, anything else is a failure. ''' ret = { 'killed': None, 'retcode': 1, } comment = [] pid = __grains__.get('pid') if not pid: comment.append('Unable to find "pid" in grains') ret['retcode'] = salt.defaults.exitcodes.EX_SOFTWARE else: if 'ps.kill_pid' not in __salt__: comment.append('Missing command: ps.kill_pid') ret['retcode'] = salt.defaults.exitcodes.EX_SOFTWARE else: # The retcode status comes from the first kill signal ret['retcode'] = int(not __salt__['ps.kill_pid'](pid)) # If the signal was successfully delivered then wait for the # process to die - check by sending signals until signal delivery # fails. if ret['retcode']: comment.append('ps.kill_pid failed') else: for _ in range(timeout): time.sleep(1) signaled = __salt__['ps.kill_pid'](pid) if not signaled: ret['killed'] = pid break else: # The process did not exit before the timeout comment.append('Timed out waiting for minion to exit') ret['retcode'] = salt.defaults.exitcodes.EX_TEMPFAIL if comment: ret['comment'] = comment return ret
java
@Override public boolean isOn(Date date) { synchronized(calendar) { calendar.setTime(date); int dayOfYear = calendar.get(Calendar.DAY_OF_YEAR); calendar.setTime(computeInYear(calendar.getTime(), calendar)); return calendar.get(Calendar.DAY_OF_YEAR) == dayOfYear; } }
python
def range_min(self, i, k): """:returns: min{ t[i], t[i + 1], ..., t[k - 1]} :complexity: O(log len(t)) """ return self._range_min(1, 0, self.N, i, k)
python
def as_XYZ100_w(whitepoint): """A convenience function for getting whitepoints. ``whitepoint`` can be either a string naming a standard illuminant (see :func:`standard_illuminant_XYZ100`), or else a whitepoint given explicitly as an array-like of XYZ values. We internally call this function anywhere you have to specify a whitepoint (e.g. for CIECAM02 or CIELAB conversions). Always uses the "standard" 2 degree observer. """ if isinstance(whitepoint, str): return standard_illuminant_XYZ100(whitepoint) else: whitepoint = np.asarray(whitepoint, dtype=float) if whitepoint.shape[-1] != 3: raise ValueError("Bad whitepoint shape") return whitepoint
python
def aes_decrypt(value, secret, block_size=AES.block_size): """ AES decrypt @value with @secret using the |CFB| mode of AES with a cryptographically secure initialization vector. -> (#str) AES decrypted @value .. from vital.security import aes_encrypt, aes_decrypt aes_encrypt("Hello, world", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=' aes_decrypt( "zYgVYMbeOuiHR50aMFinY9JsfyMQCvpzI+LNqNcmZhw=", "aLWEFlwgwlreWELFNWEFWLEgwklgbweLKWEBGW") # -> 'Hello, world' .. """ if value is not None: cipher = AES.new(secret[:32], AES.MODE_CFB, value[:block_size]) return cipher.decrypt(uniorbytes(value[block_size * 2:], bytes))
java
public static Money total(Iterable<Money> monies) { MoneyUtils.checkNotNull(monies, "Money iterator must not be null"); Iterator<Money> it = monies.iterator(); if (it.hasNext() == false) { throw new IllegalArgumentException("Money iterator must not be empty"); } Money total = it.next(); MoneyUtils.checkNotNull(total, "Money iterator must not contain null entries"); while (it.hasNext()) { total = total.plus(it.next()); } return total; }
python
def _save_token_on_disk(self): """Helper function that saves the token on disk""" token = self._token.copy() # Client secret is needed for token refreshing and isn't returned # as a pared of OAuth token by default token.update(client_secret=self._client_secret) with codecs.open(config.TOKEN_FILE_PATH, 'w', 'utf8') as f: json.dump( token, f, ensure_ascii=False, sort_keys=True, indent=4, )
java
private void rewriteCall(Node getprop, String newMethodName) { checkArgument(getprop.isGetProp(), getprop); Node call = getprop.getParent(); checkArgument(call.isCall(), call); Node receiver = getprop.getFirstChild(); // This rewriting does not exactly preserve order of operations; the newly inserted static // method name will be resolved before `receiver` is evaluated. This is known to be safe due // to the eligibility checks earlier in the pass. // // We choose not to do a full-fidelity rewriting (e.g. using `ExpressionDecomposer`) because // doing so means extracting `receiver` into a new variable at each call-site. This has a // significant code-size impact (circa 2018-11-19). getprop.removeChild(receiver); call.replaceChild(getprop, receiver); call.addChildToFront(IR.name(newMethodName).srcref(getprop)); if (receiver.isSuper()) { // Case: `super.foo(a, b)` => `foo(this, a, b)` receiver.setToken(Token.THIS); } call.putBooleanProp(Node.FREE_CALL, true); compiler.reportChangeToEnclosingScope(call); }
java
public static CommerceDiscountRel fetchByCD_CN_Last( long commerceDiscountId, long classNameId, OrderByComparator<CommerceDiscountRel> orderByComparator) { return getPersistence() .fetchByCD_CN_Last(commerceDiscountId, classNameId, orderByComparator); }
java
private boolean countClassAccess(final int classAtStackIndex) { String calledClass; try { if (stack.getStackDepth() > classAtStackIndex) { OpcodeStack.Item itm = stack.getStackItem(classAtStackIndex); JavaClass cls = itm.getJavaClass(); if (cls != null) { calledClass = cls.getClassName(); countClassAccess(calledClass); return true; } } } catch (ClassNotFoundException cfne) { bugReporter.reportMissingClass(cfne); } return false; }
python
def from_bson_voronoi_list(bson_nb_voro_list, structure): """ Returns the voronoi_list needed for the VoronoiContainer object from a bson-encoded voronoi_list (composed of vlist and bson_nb_voro_list). :param vlist: List of voronoi objects :param bson_nb_voro_list: List of periodic sites involved in the Voronoi :return: The voronoi_list needed for the VoronoiContainer (with PeriodicSites as keys of the dictionary - not allowed in the BSON format) """ voronoi_list = [None] * len(bson_nb_voro_list) for isite, voro in enumerate(bson_nb_voro_list): if voro is None or voro == 'None': continue voronoi_list[isite] = [] for psd, dd in voro: struct_site = structure[dd['index']] periodic_site = PeriodicSite(struct_site._species, struct_site.frac_coords + psd[1], struct_site._lattice, properties=struct_site.properties) voronoi_list[isite].append((periodic_site, dd)) return voronoi_list
java
public Range<T> leftHalfOpen(T lower, T upper) { return new DenseRange<T>(sequencer, comparator, Endpoint.Exclude, lower, Optional.of(upper), Endpoint.Include); }
python
def removeContainer(tag): '''Check if a container with a given tag exists. Kill it if it exists. No extra side effects. Handles and reraises TypeError, and APIError exceptions. ''' container = getContainerByTag(tag) if container: # Build an Image using the dockerfile in the path try: container.remove(force=True) #print("Removed container", tag, "...") except APIError as exc: eprint("Unhandled error while removing container", tag) raise exc
java
public OperationStatus updatePhraseList(UUID appId, String versionId, int phraselistId, UpdatePhraseListOptionalParameter updatePhraseListOptionalParameter) { return updatePhraseListWithServiceResponseAsync(appId, versionId, phraselistId, updatePhraseListOptionalParameter).toBlocking().single().body(); }
java
public static BitSet getBitSet(IAtomContainer atomContainer) { BitSet bitSet; int size = atomContainer.getBondCount(); if (size != 0) { bitSet = new BitSet(size); for (int i = 0; i < size; i++) { bitSet.set(i); } } else { bitSet = new BitSet(); } return bitSet; }
java
@Override protected void preparePaintComponent(final Request request) { super.preparePaintComponent(request); if (!isInitialised()) { // Needs to be set per user as the model holds the current page index and total row per user. ExampleScrollableModel model = new ExampleScrollableModel(table, new String[]{"firstName", "lastName", "dateOfBirth"}, "more"); model.setIterateFirstLevel(true); table.setTableModel(model); setInitialised(true); } }
java
public static <T> Specification<T> not(Specification<T> proposition) { return new NotSpecification<T>(proposition); }
java
private int readChecksumChunk(byte b[], int off, int len) throws IOException { // invalidate buffer count = pos = 0; int read = 0; boolean retry = true; int retriesLeft = numOfRetries; do { retriesLeft--; try { read = readChunk(chunkPos, b, off, len, checksum); if( read > 0 ) { if( needChecksum() ) { sum.update(b, off, read); verifySum(chunkPos); if (cliData != null) { cliData.recordVerifyChunkCheckSumTime(); } } chunkPos += read; } retry = false; } catch (ChecksumException ce) { LOG.info("Found checksum error: b[" + off + ", " + (off+read) + "]=" + StringUtils.byteToHexString(b, off, off + read), ce); if (retriesLeft == 0) { throw ce; } // try a new replica if (seekToNewSource(chunkPos)) { // Since at least one of the sources is different, // the read might succeed, so we'll retry. seek(chunkPos); } else { // Neither the data stream nor the checksum stream are being read // from different sources, meaning we'll still get a checksum error // if we try to do the read again. We throw an exception instead. throw ce; } } } while (retry); return read; }
java
public void statementErrorOccurred(StatementEvent event) { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(this, tc, "statementErrorOccurred", "Notification of a fatal statement error received from the JDBC driver", AdapterUtil.toString(event.getSource()), AdapterUtil.toString(event.getStatement()), event.getSQLException() ); for (int i = 0; i < numHandlesInUse; i++) ((WSJdbcConnection) handlesInUse[i]).setPoolableFlag(event.getStatement(), false); if (isTraceOn && tc.isEntryEnabled()) Tr.exit(this, tc, "statementErrorOccurred"); }
java
public Object getFormatFor() { Transformer mapFunction = new Transformer() { public Object transform(Object param) { if (m_functionBean == null) { return new CmsDynamicFunctionFormatWrapper(m_cms, null); } int width = -1; String type = null; boolean isWidth = false; if (param instanceof Long) { width = (int)((Long)param).longValue(); isWidth = true; } else if (param instanceof Integer) { width = ((Integer)param).intValue(); isWidth = true; } else { type = param.toString(); } Format format; if (isWidth) { format = m_functionBean.getFormatForContainer(m_cms, "", width); } else { format = m_functionBean.getFormatForContainer(m_cms, type, -1); } CmsDynamicFunctionFormatWrapper wrapper = new CmsDynamicFunctionFormatWrapper(m_cms, format); return wrapper; } }; return CmsCollectionsGenericWrapper.createLazyMap(mapFunction); }
python
def _request_callback(self, request_id): """Construct a request callback for the given request ID.""" def callback(future): # Remove the future from the client requests map self._client_request_futures.pop(request_id, None) if future.cancelled(): future.set_exception(JsonRpcRequestCancelled()) message = { 'jsonrpc': JSONRPC_VERSION, 'id': request_id, } try: message['result'] = future.result() except JsonRpcException as e: log.exception("Failed to handle request %s", request_id) message['error'] = e.to_dict() except Exception: # pylint: disable=broad-except log.exception("Failed to handle request %s", request_id) message['error'] = JsonRpcInternalError.of(sys.exc_info()).to_dict() self._consumer(message) return callback
java
@Override public List<ComponentType> children() { synchronized (this) { List<ComponentType> children = new ArrayList<ComponentType>(); for (ComponentVertex child : this.children) { children.add(child.component()); } return Collections.unmodifiableList(children); } }
java
public void updatePoolProperties(String poolId, StartTask startTask, Collection<CertificateReference> certificateReferences, Collection<ApplicationPackageReference> applicationPackageReferences, Collection<MetadataItem> metadata, Iterable<BatchClientBehavior> additionalBehaviors) throws BatchErrorException, IOException { PoolUpdatePropertiesOptions options = new PoolUpdatePropertiesOptions(); BehaviorManager bhMgr = new BehaviorManager(this.customBehaviors(), additionalBehaviors); bhMgr.applyRequestBehaviors(options); PoolUpdatePropertiesParameter param = new PoolUpdatePropertiesParameter() .withMetadata(metadata == null ? new LinkedList<MetadataItem>() : new LinkedList<>(metadata)) .withApplicationPackageReferences( applicationPackageReferences == null ? new LinkedList<ApplicationPackageReference>() : new LinkedList<>(applicationPackageReferences)) .withCertificateReferences(certificateReferences == null ? new LinkedList<CertificateReference>() : new LinkedList<>(certificateReferences)) .withStartTask(startTask); this.parentBatchClient.protocolLayer().pools().updateProperties(poolId, param, options); }
python
def ob_is_tty(ob): """ checks if an object (like a file-like object) is a tty. """ fileno = get_fileno(ob) is_tty = False if fileno: is_tty = os.isatty(fileno) return is_tty
python
def sendHeartbeat(self): """ Posts the current state to the server. :param serverURL: the URL to ping. :return: """ for name, md in self.cfg.recordingDevices.items(): try: data = marshal(md, recordingDeviceFields) data['serviceURL'] = self.cfg.getServiceURL() + API_PREFIX + '/devices/' + name targetURL = self.serverURL + API_PREFIX + '/devices/' + name logger.info("Pinging " + targetURL) resp = self.httpclient.put(targetURL, json=data) if resp.status_code != 200: logger.warning("Unable to ping server at " + targetURL + " with " + str(data.keys()) + ", response is " + str(resp.status_code)) else: logger.info("Pinged server at " + targetURL + " with " + str(data.items())) except: logger.exception("Unable to ping server")
java
public static <T extends Comparable<? super T>> @NonNull List<T> orderedTopologicalSort(final @NonNull Graph<T> graph) { return topologicalSort(graph, SortType.comparable()); }
python
def get_invalid_txn_info(self, batch_id): """Fetches the id of the Transaction that failed within a particular Batch, as well as any error message or other data about the failure. Args: batch_id (str): The id of the Batch containing an invalid txn Returns: list of dict: A list of dicts with three possible keys: * 'id' - the header_signature of the invalid Transaction * 'message' - the error message sent by the TP * 'extended_data' - any additional data sent by the TP """ with self._lock: return [info.copy() for info in self._invalid.get(batch_id, [])]
java
public WebSocketExtension setParameter(String key, String value) { // Check the validity of the key. if (Token.isValid(key) == false) { // The key is not a valid token. throw new IllegalArgumentException("'key' is not a valid token."); } // If the value is not null. if (value != null) { // Check the validity of the value. if (Token.isValid(value) == false) { // The value is not a valid token. throw new IllegalArgumentException("'value' is not a valid token."); } } mParameters.put(key, value); return this; }
java
public final void changeStartDate(LocalDate date, boolean keepDuration) { requireNonNull(date); Interval interval = getInterval(); LocalDateTime newStartDateTime = getStartAsLocalDateTime().with(date); LocalDateTime endDateTime = getEndAsLocalDateTime(); if (keepDuration) { endDateTime = newStartDateTime.plus(getDuration()); setInterval(newStartDateTime, endDateTime, getZoneId()); } else { /* * We might have a problem if the new start time is AFTER the current end time. */ if (newStartDateTime.isAfter(endDateTime)) { interval = interval.withEndDateTime(newStartDateTime.plus(interval.getDuration())); } setInterval(interval.withStartDate(date)); } }
java
protected void showFinishing(LaJobRuntime runtime, long before, Throwable cause) { final String msg = buildFinishingMsg(runtime, before, cause); // also no use enabled if (noticeLogHook != null) { noticeLogHook.hookFinishing(runtime, msg, OptionalThing.ofNullable(cause, () -> { throw new IllegalStateException("Not found the cause: " + runtime); })); } JobNoticeLog.log(runtime.getNoticeLogLevel(), () -> msg); }
python
def get_logging_session_for_log(self, log_id, proxy): """Gets the ``OsidSession`` associated with the logging service for the given log. arg: log_id (osid.id.Id): the ``Id`` of the ``Log`` arg: proxy (osid.proxy.Proxy): a proxy return: (osid.logging.LoggingSession) - a ``LoggingSession`` raise: NotFound - no ``Log`` found by the given ``Id`` raise: NullArgument - ``log_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_logging()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_logging()`` and ``supports_visible_federation()`` are ``true``* """ if not self.supports_logging(): raise errors.Unimplemented() ## # Also include check to see if the catalog Id is found otherwise raise errors.NotFound ## # pylint: disable=no-member return sessions.LoggingSession(log_id, proxy, self._runtime)
java
protected void doForwardViterbi(Node[][] lattice, Instance carrier) { for (int l = 1; l < lattice.length; l++) { for (int c = 0; c < lattice[l].length; c++) { if (lattice[l][c] == null) continue; float bestScore = Float.NEGATIVE_INFINITY; int bestPath = -1; for (int p = 0; p < lattice[l - 1].length; p++) { if (lattice[l - 1][p] == null) continue; float score = lattice[l - 1][p].score + lattice[l][c].trans[p]; if (score > bestScore) { bestScore = score; bestPath = p; } } bestScore += lattice[l][c].score; lattice[l][c].addScore(bestScore, bestPath); } } }
python
def makepath(s, as_file=False): """Make a path from a string Expand out any variables, home squiggles, and normalise it See also http://stackoverflow.com/questions/26403972 """ if s is None: return None result = FilePath(s) if (os.path.isfile(s) or as_file) else DirectPath(s) return result.expandall()
java
public static void insertFromDevState(final DevState devStateValue, final DeviceAttribute deviceAttributeWritten) throws DevFailed { final Integer integerValue = Integer.valueOf(devStateValue.value()); switch (deviceAttributeWritten.getType()) { case TangoConst.Tango_DEV_SHORT: deviceAttributeWritten.insert(integerValue.shortValue()); break; case TangoConst.Tango_DEV_USHORT: deviceAttributeWritten.insert_us(integerValue.shortValue()); break; case TangoConst.Tango_DEV_CHAR: Except.throw_exception("TANGO_WRONG_DATA_ERROR", "input type Tango_DEV_CHAR not supported", "AttributeHelper.insertFromDevState(DevState value,deviceAttributeWritten)"); break; case TangoConst.Tango_DEV_UCHAR: deviceAttributeWritten.insert_uc(integerValue.shortValue()); break; case TangoConst.Tango_DEV_LONG: deviceAttributeWritten.insert(integerValue.intValue()); break; case TangoConst.Tango_DEV_ULONG: deviceAttributeWritten.insert_ul(integerValue.longValue()); break; case TangoConst.Tango_DEV_LONG64: Except.throw_exception("TANGO_WRONG_DATA_ERROR", "input type Tango_DEV_LONG64 not supported", "AttributeHelper.insertFromDevState(DevState value,deviceAttributeWritten)"); break; case TangoConst.Tango_DEV_ULONG64: deviceAttributeWritten.insert_u64(integerValue.longValue()); break; case TangoConst.Tango_DEV_INT: deviceAttributeWritten.insert(integerValue.intValue()); break; case TangoConst.Tango_DEV_FLOAT: deviceAttributeWritten.insert(integerValue.floatValue()); break; case TangoConst.Tango_DEV_DOUBLE: deviceAttributeWritten.insert(integerValue.doubleValue()); break; case TangoConst.Tango_DEV_STRING: deviceAttributeWritten.insert(integerValue.toString()); break; case TangoConst.Tango_DEV_BOOLEAN: if (integerValue.doubleValue() == 1) { deviceAttributeWritten.insert(true); } else { deviceAttributeWritten.insert(false); } break; case TangoConst.Tango_DEV_STATE: deviceAttributeWritten.insert(devStateValue); break; default: Except.throw_exception("TANGO_WRONG_DATA_ERROR", "input type " + deviceAttributeWritten.getType() + " not supported", "AttributeHelper.insertFromDevState(DevState value,deviceAttributeWritten)"); break; } }
java
@SuppressWarnings({ "unchecked" }) public static <V> V put(Map<? super TypedIdKey<V>, ? super V> map, Serializable id, V value) { map.put(new TypedIdKey<V>((Class<V>) value.getClass(), id), value); return value; }
java
public void buildInterfaceSummary(XMLNode node, Content packageSummaryContentTree) { String interfaceTableSummary = configuration.getText("doclet.Member_Table_Summary", configuration.getText("doclet.Interface_Summary"), configuration.getText("doclet.interfaces")); String[] interfaceTableHeader = new String[] { configuration.getText("doclet.Interface"), configuration.getText("doclet.Description") }; ClassDoc[] interfaces = pkg.interfaces(); if (interfaces.length > 0) { profileWriter.addClassesSummary( interfaces, configuration.getText("doclet.Interface_Summary"), interfaceTableSummary, interfaceTableHeader, packageSummaryContentTree); } }
python
def build_config(ctx, target, config_path, c, extra_path, ignore, verbose, silent, debug): """ Creates a LintConfig object based on a set of commandline parameters. """ config_builder = LintConfigBuilder() try: # Config precedence: # First, load default config or config from configfile if config_path: config_builder.set_from_config_file(config_path) elif os.path.exists(DEFAULT_CONFIG_FILE): config_builder.set_from_config_file(DEFAULT_CONFIG_FILE) # Then process any commandline configuration flags config_builder.set_config_from_string_list(c) # Finally, overwrite with any convenience commandline flags if ignore: config_builder.set_option('general', 'ignore', ignore) if silent: config_builder.set_option('general', 'verbosity', 0) elif verbose > 0: config_builder.set_option('general', 'verbosity', verbose) if extra_path: config_builder.set_option('general', 'extra-path', extra_path) if target: config_builder.set_option('general', 'target', target) if debug: config_builder.set_option('general', 'debug', debug) config = config_builder.build() return config, config_builder except LintConfigError as e: click.echo(u"Config Error: {0}".format(ustr(e))) ctx.exit(CONFIG_ERROR_CODE)
java
@Override @SuppressWarnings("unchecked") protected T convert(JsonNode data) { // The (T) cast prevents the commandline javac from choking "no unique maximal instance" return (T)this.mapper.convertValue(data, this.resultType); }
python
def parse_brome_config_from_browser_config(browser_config): """Parse the browser config and look for brome specific config Args: browser_config (dict) """ config = {} brome_keys = [key for key in browser_config if key.find(':') != -1] for brome_key in brome_keys: section, option = brome_key.split(':') value = browser_config[brome_key] if section not in config: config[section] = {} config[section][option] = value return config
java
public String getInitParameter(String param) { if (_initParams==null) return null; return (String)_initParams.get(param); }
python
def add_transitions_from_selected_state_to_parent(): """ Generates the default success transition of a state to its parent success port :return: """ task_string = "create transition" sub_task_string = "to parent state" selected_state_m, msg = get_selected_single_state_model_and_check_for_its_parent() if selected_state_m is None: logger.warning("Can not {0} {1}: {2}".format(task_string, sub_task_string, msg)) return logger.debug("Check to {0} {1} ...".format(task_string, sub_task_string)) state = selected_state_m.state parent_state = state.parent # find all possible from outcomes from_outcomes = get_all_outcomes_except_of_abort_and_preempt(state) # find lowest valid outcome id possible_oc_ids = [oc_id for oc_id in state.parent.outcomes.keys() if oc_id >= 0] possible_oc_ids.sort() to_outcome = state.parent.outcomes[possible_oc_ids[0]] oc_connected_to_parent = [oc for oc in from_outcomes if is_outcome_connect_to_state(oc, parent_state.state_id)] oc_not_connected = [oc for oc in from_outcomes if not state.parent.get_transition_for_outcome(state, oc)] if all(oc in oc_connected_to_parent for oc in from_outcomes): logger.info("Remove transition {0} because all outcomes are connected to it.".format(sub_task_string)) for from_outcome in oc_connected_to_parent: transition = parent_state.get_transition_for_outcome(state, from_outcome) parent_state.remove(transition) elif oc_not_connected: logger.debug("Create transition {0} ... ".format(sub_task_string)) for from_outcome in from_outcomes: parent_state.add_transition(state.state_id, from_outcome.outcome_id, parent_state.state_id, to_outcome.outcome_id) else: if remove_transitions_if_target_is_the_same(from_outcomes): logger.info("Removed transitions origin from outcomes of selected state {0}" "because all point to the same target.".format(sub_task_string)) return add_transitions_from_selected_state_to_parent() logger.info("Will not create transition {0}: Not clear situation of connected transitions." "There will be no transitions to other states be touched.".format(sub_task_string)) return True
java
public static <T> Iterator<T> singleIterator(T t) { Require.nonNull(t, "t"); return Collections.singleton(t).iterator(); }
python
def _add_url_routes(self, app): """Configure a list of URLs to route to their corresponding view method..""" # Because methods contain an extra ``self`` parameter, URL routes are mapped # to stub functions, which simply call the corresponding method. # For testing purposes, we map all available URLs to stubs, but the stubs # contain config checks to return 404 when a feature is disabled. # Define the stubs # ---------------- # def auth0_callback_stub(): # if not self.USER_ENABLE_AUTH0: abort(404) # return self.auth0_callback_view() def change_password_stub(): if not self.USER_ENABLE_CHANGE_PASSWORD: abort(404) return self.change_password_view() def change_username_stub(): if not self.USER_ENABLE_CHANGE_USERNAME: abort(404) return self.change_username_view() def confirm_email_stub(token): if not self.USER_ENABLE_CONFIRM_EMAIL: abort(404) return self.confirm_email_view(token) def edit_user_profile_stub(): return self.edit_user_profile_view() def email_action_stub(id, action): if not self.USER_ENABLE_MULTIPLE_EMAILS or not self.db_manager.UserEmailClass: abort(404) return self.email_action_view(id, action) def forgot_password_stub(): if not self.USER_ENABLE_FORGOT_PASSWORD: abort(404) return self.forgot_password_view() def manage_emails_stub(): if not self.USER_ENABLE_MULTIPLE_EMAILS or not self.db_manager.UserEmailClass: abort(404) return self.manage_emails_view() def invite_user_stub(): if not self.USER_ENABLE_INVITE_USER: abort(404) return self.invite_user_view() def login_stub(): return self.login_view() def logout_stub(): return self.logout_view() def register_stub(): if not self.USER_ENABLE_REGISTER: abort(404) return self.register_view() def resend_email_confirmation_stub(): if not self.USER_ENABLE_CONFIRM_EMAIL: abort(404) return self.resend_email_confirmation_view() def reset_password_stub(token): if not self.USER_ENABLE_FORGOT_PASSWORD: abort(404) return self.reset_password_view(token) # def unconfirmed_email_stub(): # return self.unconfirmed_email_view() def unauthorized_stub(): return self.unauthorized_view() # Add the URL routes # ------------------ # app.add_url_rule('/callbacks/auth0', 'user.auth0_callback', auth0_callback_stub) app.add_url_rule(self.USER_CHANGE_PASSWORD_URL, 'user.change_password', change_password_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_CHANGE_USERNAME_URL, 'user.change_username', change_username_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_CONFIRM_EMAIL_URL, 'user.confirm_email', confirm_email_stub) app.add_url_rule(self.USER_EDIT_USER_PROFILE_URL, 'user.edit_user_profile', edit_user_profile_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_EMAIL_ACTION_URL, 'user.email_action', email_action_stub) app.add_url_rule(self.USER_FORGOT_PASSWORD_URL, 'user.forgot_password', forgot_password_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_INVITE_USER_URL, 'user.invite_user', invite_user_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_LOGIN_URL, 'user.login', login_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_LOGOUT_URL, 'user.logout', logout_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_MANAGE_EMAILS_URL, 'user.manage_emails', manage_emails_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_REGISTER_URL, 'user.register', register_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_RESEND_EMAIL_CONFIRMATION_URL, 'user.resend_email_confirmation', resend_email_confirmation_stub, methods=['GET', 'POST']) app.add_url_rule(self.USER_RESET_PASSWORD_URL, 'user.reset_password', reset_password_stub, methods=['GET', 'POST'])
python
def tree_render(request, upy_context, vars_dictionary): """ It renders template defined in upy_context's page passed in arguments """ page = upy_context['PAGE'] return render_to_response(page.template.file_name, vars_dictionary, context_instance=RequestContext(request))
java
public NotificationChain basicSetInterruptible(Parameter newInterruptible, NotificationChain msgs) { Parameter oldInterruptible = interruptible; interruptible = newInterruptible; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BpsimPackage.PRIORITY_PARAMETERS__INTERRUPTIBLE, oldInterruptible, newInterruptible); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; }
java
@Override public EnableVolumeIOResult enableVolumeIO(EnableVolumeIORequest request) { request = beforeClientExecution(request); return executeEnableVolumeIO(request); }
java
@Override public Request<DeleteRouteTableRequest> getDryRunRequest() { Request<DeleteRouteTableRequest> request = new DeleteRouteTableRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; }
java
public void record (ChatChannel channel, String source, UserMessage msg, Name ...usernames) { // fill in the message's time stamp if necessary if (msg.timestamp == 0L) { msg.timestamp = System.currentTimeMillis(); } Entry entry = new Entry(channel, source, msg); for (Name username : usernames) { // add the message to this user's chat history List<Entry> history = getList(username); if (history == null) { continue; } history.add(entry); // if the history is big enough, potentially prune it (we always prune when asked for // the history, so this is just to balance memory usage with CPU expense) if (history.size() > 15) { prune(msg.timestamp, history); } } }
python
def build_swagger_spec(user, repo, sha, serverName): """Build grlc specification for the given github user / repo in swagger format """ if user and repo: # Init provenance recording prov_g = grlcPROV(user, repo) else: prov_g = None swag = swagger.get_blank_spec() swag['host'] = serverName try: loader = getLoader(user, repo, sha, prov_g) except Exception as e: # If repo does not exits swag['info'] = { 'title': 'ERROR!', 'description': str(e) } swag['paths'] = {} return swag prev_commit, next_commit, info, basePath = \ swagger.get_repo_info(loader, sha, prov_g) swag['prev_commit'] = prev_commit swag['next_commit'] = next_commit swag['info'] = info swag['basePath'] = basePath # TODO: can we pass loader to build_spec ? spec = swagger.build_spec(user, repo, sha, prov_g) for item in spec: swag['paths'][item['call_name']] = swagger.get_path_for_item(item) if prov_g: prov_g.end_prov_graph() swag['prov'] = prov_g.serialize(format='turtle') return swag
python
def run(self): '''compile the JS, then run superclass implementation''' if subprocess.call(['npm', '--version']) != 0: raise RuntimeError('npm is required to build the HTML renderer.') self.check_call(['npm', 'install'], cwd=HTML_RENDERER_DIR) self.check_call(['npm', 'run', 'build'], cwd=HTML_RENDERER_DIR) self.copy_file(HTML_RENDERER_DIR+'/dist/js/app.js', 'pyinstrument/renderers/html_resources/app.js') setuptools.command.build_py.build_py.run(self)
java
public static SipMessage frame(final Buffer buffer) throws IOException { if (true) return frame2(buffer); if (!couldBeSipMessage(buffer)) { throw new SipParseException(0, "Cannot be a SIP message because is doesnt start with \"SIP\" " + "(for responses) or a method (for requests)"); } // we just assume that the initial line // indeed is a correct sip line final Buffer rawInitialLine = buffer.readLine(); // which means that the headers are about // to start now. final int startHeaders = buffer.getReaderIndex(); Buffer currentLine = null; while ((currentLine = buffer.readLine()) != null && currentLine.hasReadableBytes()) { // just moving along, we don't really care why // we stop, we have found what we want anyway, which // is the boundary between headers and the potential // payload (or end of message) } final Buffer headers = buffer.slice(startHeaders, buffer.getReaderIndex()); Buffer payload = null; if (buffer.hasReadableBytes()) { payload = buffer.slice(); } if (SipInitialLine.isResponseLine(rawInitialLine)) { throw new RuntimeException("No longer using the old mutable sip messages"); // return new SipResponseImpl(rawInitialLine, headers, payload); } else { throw new RuntimeException("No longer using the old mutable sip messages"); // return new SipRequestImpl(rawInitialLine, headers, payload); } }
java
@Override public <T> List<T> dynamicQuery(DynamicQuery dynamicQuery) { return cpdAvailabilityEstimatePersistence.findWithDynamicQuery(dynamicQuery); }
java
@Override public void addTargetORBInitProperties(Properties initProperties, Map<String, Object> configProps, List<IIOPEndpoint> endpoints, Map<String, Object> extraProperties) { StringBuilder sb = new StringBuilder(); Map<String, List<TransportAddress>> addrMap = extractTransportAddresses(configProps, endpoints, sb); extraProperties.put(ADDR_KEY, addrMap); sb.setLength(sb.length() - 1); initProperties.put(ENDPOINT_KEY, sb.toString()); }
python
def get_model_args_kwargs(self): """ Inspect the model (or view in the case of no model) and return the args and kwargs. This functin is necessary because argspec returns in a silly format by default. """ source = self.get_model() if not source: return [], {} argspec = inspect.getargspec(source) kk = list(zip(*[reversed(l) for l in (argspec.args, argspec.defaults or [])])) kk.reverse() kwargs = OrderedDict(kk) args = [x for x in argspec.args if x not in kwargs.keys()] if args and args[0] == 'cls': args = args[1:] return args, kwargs
python
def Beta(alpha: vertex_constructor_param_types, beta: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex: """ One to one constructor for mapping some tensorShape of alpha and beta to a matching tensorShaped Beta. :param alpha: the alpha of the Beta with either the same tensorShape as specified for this vertex or a scalar :param beta: the beta of the Beta with either the same tensorShape as specified for this vertex or a scalar """ return Double(context.jvm_view().BetaVertex, label, cast_to_double_vertex(alpha), cast_to_double_vertex(beta))