language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def filter_record(self, record): """ Filter record, dropping any that don't meet minimum length """ if len(record) >= self.min_length: return record else: raise FailedFilter(len(record))
python
async def async_get_current_transfer_rates(self, use_cache=True): """Gets current transfer rates calculated in per second in bytes.""" now = datetime.utcnow() data = await self.async_get_bytes_total(use_cache) if self._rx_latest is None or self._tx_latest is None: self._latest_transfer_check = now self._rx_latest = data[0] self._tx_latest = data[1] return self._latest_transfer_data time_diff = now - self._latest_transfer_check if time_diff.total_seconds() < 30: return self._latest_transfer_data if data[0] < self._rx_latest: rx = data[0] else: rx = data[0] - self._rx_latest if data[1] < self._tx_latest: tx = data[1] else: tx = data[1] - self._tx_latest self._latest_transfer_check = now self._rx_latest = data[0] self._tx_latest = data[1] self._latest_transfer_data = ( math.ceil(rx / time_diff.total_seconds()) if rx > 0 else 0, math.ceil(tx / time_diff.total_seconds()) if tx > 0 else 0) return self._latest_transfer_data
java
@Override public int read(byte[] buffer, int offset, int len) throws IOException { return this.in.read(buffer, offset, len); }
java
public Pair<List<Node>, List<Node>> splitIntervalPattern(String raw) { List<Node> pattern = parse(raw); Set<Character> seen = new HashSet<>(); List<Node> fst = new ArrayList<>(); List<Node> snd = new ArrayList<>(); // Indicates we've seen a repeated field. boolean boundary = false; for (Node node : pattern) { if (node instanceof Field) { char ch = ((Field) node).ch(); if (seen.contains(ch)) { boundary = true; } else { seen.add(ch); } } if (boundary) { snd.add(node); } else { fst.add(node); } } return Pair.pair(fst, snd); }
python
def main(): """Wdb entry point""" sys.path.insert(0, os.getcwd()) args, extrargs = parser.parse_known_args() sys.argv = ['wdb'] + args.args + extrargs if args.file: file = os.path.join(os.getcwd(), args.file) if args.source: print('The source argument cannot be used with file.') sys.exit(1) if not os.path.exists(file): print('Error:', file, 'does not exist') sys.exit(1) if args.trace: Wdb.get().run_file(file) else: def wdb_pm(xtype, value, traceback): sys.__excepthook__(xtype, value, traceback) wdb = Wdb.get() wdb.reset() wdb.interaction(None, traceback, post_mortem=True) sys.excepthook = wdb_pm with open(file) as f: code = compile(f.read(), file, 'exec') execute(code, globals(), globals()) else: source = None if args.source: source = os.path.join(os.getcwd(), args.source) if not os.path.exists(source): print('Error:', source, 'does not exist') sys.exit(1) Wdb.get().shell(source)
java
public JvmAnnotationReference findAnnotation(/* @NonNull */ JvmAnnotationTarget annotationTarget, /* @NonNull */ Class<? extends Annotation> lookupType) { // avoid creating an empty list for all given targets but check for #eIsSet first if (annotationTarget.eIsSet(TypesPackage.Literals.JVM_ANNOTATION_TARGET__ANNOTATIONS)) { for(JvmAnnotationReference annotation: annotationTarget.getAnnotations()) { JvmAnnotationType annotationType = annotation.getAnnotation(); if (annotationType != null && lookupType.getCanonicalName().equals(annotationType.getQualifiedName())) { return annotation; } } } return null; }
python
def notequal(x, y): """ Return True if x != y and False otherwise. This function returns True whenever x and/or y is a NaN. """ x = BigFloat._implicit_convert(x) y = BigFloat._implicit_convert(y) return not mpfr.mpfr_equal_p(x, y)
java
public static boolean unregister(IEventHandler who) { final Handle handle = Handle.getInstance(); return handle._unregister(who); }
python
def build_latex(hyp): """ Parameters ---------- hyp : dict {'segmentation': [[0, 3], [1, 2]], 'symbols': [{'symbol': ID, 'probability': 0.12}], 'geometry': {'symbol': index, 'bottom': None or dict, 'subscript': None or dict, 'right': None or dict, 'superscript': None or dict, 'top': None or dict}, 'probability': 0.123 } """ latex = [] for symbol in hyp['symbols']: latex.append(symbol['symbol'].split(";")[1]) return " ".join(latex)
java
public ToolScreen addToolbars() { ToolScreen screen = new ToolScreen(null, this, null, ScreenConstants.DONT_DISPLAY_FIELD_DESC, null); new SCannedBox(screen.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.SET_ANCHOR), screen, null, ScreenConstants.DEFAULT_DISPLAY, MenuConstants.SUBMIT); new SCannedBox(screen.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.SET_ANCHOR), screen, null, ScreenConstants.DEFAULT_DISPLAY, MenuConstants.RESET); String strDesc = "Create account"; new SCannedBox(screen.getNextLocation(ScreenConstants.NEXT_LOGICAL, ScreenConstants.SET_ANCHOR), screen, null, ScreenConstants.DEFAULT_DISPLAY, null, strDesc, MenuConstants.FORM, MenuConstants.FORM, MenuConstants.FORM + "Tip"); return screen; }
python
def deconvolution(inp, outmaps, kernel, pad=None, stride=None, dilation=None, group=1, w_init=None, b_init=None, base_axis=1, fix_parameters=False, rng=None, with_bias=True, apply_w=None, apply_b=None): """ Deconvolution layer. Args: inp (~nnabla.Variable): N-D array. outmaps (int): Number of deconvolution kernels (which is equal to the number of output channels). For example, to apply deconvolution on an input with 16 types of filters, specify 16. kernel (:obj:`tuple` of :obj:`int`): Convolution kernel size. For example, to apply deconvolution on an image with a 3 (height) by 5 (width) two-dimensional kernel, specify (3,5). pad (:obj:`tuple` of :obj:`int`): Padding sizes for dimensions. stride (:obj:`tuple` of :obj:`int`): Stride sizes for dimensions. dilation (:obj:`tuple` of :obj:`int`): Dilation sizes for dimensions. group (int): Number of groups of channels. This makes connections across channels sparser by grouping connections along map direction. w_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for weight. By default, it is initialized with :obj:`nnabla.initializer.UniformInitializer` within the range determined by :obj:`nnabla.initializer.calc_uniform_lim_glorot`. b_init (:obj:`nnabla.initializer.BaseInitializer` or :obj:`numpy.ndarray`): Initializer for bias. By default, it is initialized with zeros if `with_bias` is `True`. base_axis (int): Dimensions up to `base_axis` are treated as the sample dimensions. fix_parameters (bool): When set to `True`, the weights and biases will not be updated. rng (numpy.random.RandomState): Random generator for Initializer. with_bias (bool): Specify whether to include the bias term. apply_w (function): Lambda, function, or callable object applied to the weights. apply_b (function): Lambda, function, or callable object applied to the bias. Returns: :class:`~nnabla.Variable`: N-D array. See :obj:`~nnabla.functions.deconvolution` for the output shape. """ if w_init is None: w_init = UniformInitializer( calc_uniform_lim_glorot(outmaps, inp.shape[base_axis], tuple(kernel)), rng=rng) if with_bias and b_init is None: b_init = ConstantInitializer() w = get_parameter_or_create( "W", (inp.shape[base_axis], outmaps // group) + tuple(kernel), w_init, True, not fix_parameters) if apply_w is not None: w = apply_w(w) b = None if with_bias: b = get_parameter_or_create( "b", (outmaps,), b_init, True, not fix_parameters) if apply_b is not None: b = apply_b(b) return F.deconvolution(inp, w, b, base_axis, pad, stride, dilation, group)
java
@Override public final boolean hasNext() { resetToLastKey(); while (mAxis.hasNext()) { mAxis.next(); boolean filterResult = true; for (final AbsFilter filter : mAxisFilter) { filterResult = filterResult && filter.filter(); } if (filterResult) { return true; } } resetToStartKey(); return false; }
java
public void replaceOrAdd(String name, String value) { boolean found = false; for (Param param : params) { if (param.getKey().equals(name)) { param.setValue(value); found = true; break; } } if (!found) { addParam(name, value); } }
python
def _prepare_data_dir(self, data): """Prepare destination directory where the data will live. :param data: The :class:`~resolwe.flow.models.Data` object for which to prepare the private execution directory. :return: The prepared data directory path. :rtype: str """ logger.debug(__("Preparing data directory for Data with id {}.", data.id)) with transaction.atomic(): # Create a temporary random location and then override it with data # location id since object has to be created first. # TODO Find a better solution, e.g. defer the database constraint. temporary_location_string = uuid.uuid4().hex[:10] data_location = DataLocation.objects.create(subpath=temporary_location_string) data_location.subpath = str(data_location.id) data_location.save() data_location.data.add(data) output_path = self._get_per_data_dir('DATA_DIR', data_location.subpath) dir_mode = self.settings_actual.get('FLOW_EXECUTOR', {}).get('DATA_DIR_MODE', 0o755) os.mkdir(output_path, mode=dir_mode) # os.mkdir is not guaranteed to set the given mode os.chmod(output_path, dir_mode) return output_path
java
@SuppressWarnings("unchecked") public T withInterval(Duration interval) { Assert.notNull(interval, "interval"); Assert.state(maxInterval == null, "Backoff intervals have already been set"); this.interval = interval; return (T) this; }
java
public void dragUpdate(final INodeXYEvent event) { m_evtx = event.getX(); m_evty = event.getY(); m_dstx = m_evtx - m_begx; m_dsty = m_evty - m_begy; final Point2D p2 = new Point2D(0, 0); m_gtol.transform(new Point2D(m_dstx, m_dsty), p2); m_lclp.setX(p2.getX() - m_pref.getX()); m_lclp.setY(p2.getY() - m_pref.getY()); // Let the constraints adjust the location if necessary if (m_drag != null) { m_drag.adjust(m_lclp); } save(); }
java
public void enableDTLS() { if (!this.dtls) { this.rtpChannel.enableSRTP(); if (!this.rtcpMux) { rtcpChannel.enableSRTCP(); } this.dtls = true; if (logger.isDebugEnabled()) { logger.debug(this.mediaType + " channel " + this.ssrc + " enabled DTLS"); } } }
python
def wrap_inference_results(inference_result_proto): """Returns packaged inference results from the provided proto. Args: inference_result_proto: The classification or regression response proto. Returns: An InferenceResult proto with the result from the response. """ inference_proto = inference_pb2.InferenceResult() if isinstance(inference_result_proto, classification_pb2.ClassificationResponse): inference_proto.classification_result.CopyFrom( inference_result_proto.result) elif isinstance(inference_result_proto, regression_pb2.RegressionResponse): inference_proto.regression_result.CopyFrom(inference_result_proto.result) return inference_proto
java
public static Point2d WSG84_L4(double lambda, double phi) { final Point2d ntfLambdaPhi = WSG84_NTFLamdaPhi(lambda, phi); return NTFLambdaPhi_NTFLambert( ntfLambdaPhi.getX(), ntfLambdaPhi.getY(), LAMBERT_4_N, LAMBERT_4_C, LAMBERT_4_XS, LAMBERT_4_YS); }
java
public ProjectCalendar addDefaultBaseCalendar() { ProjectCalendar calendar = add(); calendar.setName(ProjectCalendar.DEFAULT_BASE_CALENDAR_NAME); calendar.setWorkingDay(Day.SUNDAY, false); calendar.setWorkingDay(Day.MONDAY, true); calendar.setWorkingDay(Day.TUESDAY, true); calendar.setWorkingDay(Day.WEDNESDAY, true); calendar.setWorkingDay(Day.THURSDAY, true); calendar.setWorkingDay(Day.FRIDAY, true); calendar.setWorkingDay(Day.SATURDAY, false); calendar.addDefaultCalendarHours(); return (calendar); }
java
public void marshall(RelationalDatabaseDataSourceConfig relationalDatabaseDataSourceConfig, ProtocolMarshaller protocolMarshaller) { if (relationalDatabaseDataSourceConfig == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(relationalDatabaseDataSourceConfig.getRelationalDatabaseSourceType(), RELATIONALDATABASESOURCETYPE_BINDING); protocolMarshaller.marshall(relationalDatabaseDataSourceConfig.getRdsHttpEndpointConfig(), RDSHTTPENDPOINTCONFIG_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static String partiallyUnqualify(String name, String qualifierBase) { if (name == null || !name.startsWith(qualifierBase)) { return name; } return name.substring(qualifierBase.length() + 1); // +1 to start after the following '.' }
java
@Nonnull public static <IDTYPE extends Serializable> TypedObject <IDTYPE> create (@Nonnull final ObjectType aObjectType, @Nonnull final IDTYPE aID) { return new TypedObject <> (aObjectType, aID); }
java
private void processPropertyPlaceHolders() { Map<String, PropertyResourceConfigurer> prcs = applicationContext.getBeansOfType(PropertyResourceConfigurer.class); if (!prcs.isEmpty() && applicationContext instanceof ConfigurableApplicationContext) { BeanDefinition mapperScannerBean = ((ConfigurableApplicationContext) applicationContext) .getBeanFactory().getBeanDefinition(beanName); // PropertyResourceConfigurer does not expose any methods to explicitly perform // property placeholder substitution. Instead, create a BeanFactory that just // contains this mapper scanner and post process the factory. DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); factory.registerBeanDefinition(beanName, mapperScannerBean); for (PropertyResourceConfigurer prc : prcs.values()) { prc.postProcessBeanFactory(factory); } PropertyValues values = mapperScannerBean.getPropertyValues(); this.basePackage = updatePropertyValue("basePackage", values); this.sqlSessionFactoryBeanName = updatePropertyValue("sqlSessionFactoryBeanName", values); this.sqlSessionTemplateBeanName = updatePropertyValue("sqlSessionTemplateBeanName", values); } }
java
@Override public void log(Exception e, Object... elements) { StringBuilder sb = buildMsg(Level.ERROR, elements); context.log(sb.toString(),e); }
python
def sample(self, bqm, beta_range=None, num_reads=10, num_sweeps=1000): """Sample from low-energy spin states using simulated annealing. Args: bqm (:obj:`.BinaryQuadraticModel`): Binary quadratic model to be sampled from. beta_range (tuple, optional): Beginning and end of the beta schedule (beta is the inverse temperature) as a 2-tuple. The schedule is applied linearly in beta. Default is chosen based on the total bias associated with each node. num_reads (int, optional, default=10): Number of reads. Each sample is the result of a single run of the simulated annealing algorithm. num_sweeps (int, optional, default=1000): Number of sweeps or steps. Returns: :obj:`.SampleSet` Note: This is a reference implementation, not optimized for speed and therefore not an appropriate sampler for benchmarking. """ # input checking # h, J are handled by the @ising decorator # beta_range, sweeps are handled by ising_simulated_annealing if not isinstance(num_reads, int): raise TypeError("'samples' should be a positive integer") if num_reads < 1: raise ValueError("'samples' should be a positive integer") h, J, offset = bqm.to_ising() # run the simulated annealing algorithm samples = [] energies = [] for __ in range(num_reads): sample, energy = ising_simulated_annealing(h, J, beta_range, num_sweeps) samples.append(sample) energies.append(energy) response = SampleSet.from_samples(samples, Vartype.SPIN, energies) response.change_vartype(bqm.vartype, offset, inplace=True) return response
java
protected void createClassProperties(HibernatePersistentEntity domainClass, PersistentClass persistentClass, InFlightMetadataCollector mappings, String sessionFactoryBeanName) { final List<PersistentProperty> persistentProperties = domainClass.getPersistentProperties(); Table table = persistentClass.getTable(); Mapping gormMapping = domainClass.getMapping().getMappedForm(); if (gormMapping != null) { table.setComment(gormMapping.getComment()); } List<Embedded> embedded = new ArrayList<>(); for (PersistentProperty currentGrailsProp : persistentProperties) { // if its inherited skip if (currentGrailsProp.isInherited()) { continue; } if(currentGrailsProp.getName().equals(GormProperties.VERSION) ) continue; if (isCompositeIdProperty(gormMapping, currentGrailsProp)) continue; if (isIdentityProperty(gormMapping, currentGrailsProp)) continue; if (LOG.isDebugEnabled()) { LOG.debug("[GrailsDomainBinder] Binding persistent property [" + currentGrailsProp.getName() + "]"); } Value value = null; // see if it's a collection type CollectionType collectionType = CT.collectionTypeForClass(currentGrailsProp.getType()); Class<?> userType = getUserType(currentGrailsProp); if (userType != null && !UserCollectionType.class.isAssignableFrom(userType)) { if (LOG.isDebugEnabled()) { LOG.debug("[GrailsDomainBinder] Binding property [" + currentGrailsProp.getName() + "] as SimpleValue"); } value = new SimpleValue(metadataBuildingContext, table); bindSimpleValue(currentGrailsProp, null, (SimpleValue) value, EMPTY_PATH, mappings, sessionFactoryBeanName); } else if (collectionType != null) { String typeName = getTypeName(currentGrailsProp, getPropertyConfig(currentGrailsProp),gormMapping); if ("serializable".equals(typeName)) { value = new SimpleValue(metadataBuildingContext, table); bindSimpleValue(typeName, (SimpleValue) value, currentGrailsProp.isNullable(), getColumnNameForPropertyAndPath(currentGrailsProp, EMPTY_PATH, null, sessionFactoryBeanName), mappings); } else { // create collection Collection collection = collectionType.create((ToMany) currentGrailsProp, persistentClass, EMPTY_PATH, mappings, sessionFactoryBeanName); mappings.addCollectionBinding(collection); value = collection; } } else if (currentGrailsProp.getType().isEnum()) { value = new SimpleValue(metadataBuildingContext, table); bindEnumType(currentGrailsProp, (SimpleValue) value, EMPTY_PATH, sessionFactoryBeanName); } else if(currentGrailsProp instanceof Association) { Association association = (Association) currentGrailsProp; if (currentGrailsProp instanceof org.grails.datastore.mapping.model.types.ManyToOne) { if (LOG.isDebugEnabled()) LOG.debug("[GrailsDomainBinder] Binding property [" + currentGrailsProp.getName() + "] as ManyToOne"); value = new ManyToOne(metadataBuildingContext, table); bindManyToOne((Association) currentGrailsProp, (ManyToOne) value, EMPTY_PATH, mappings, sessionFactoryBeanName); } else if (currentGrailsProp instanceof org.grails.datastore.mapping.model.types.OneToOne && userType == null) { if (LOG.isDebugEnabled()) { LOG.debug("[GrailsDomainBinder] Binding property [" + currentGrailsProp.getName() + "] as OneToOne"); } final boolean isHasOne = isHasOne(association); if (isHasOne && !association.isBidirectional()) { throw new MappingException("hasOne property [" + currentGrailsProp.getOwner().getName() + "." + currentGrailsProp.getName() + "] is not bidirectional. Specify the other side of the relationship!"); } else if (canBindOneToOneWithSingleColumnAndForeignKey((Association) currentGrailsProp)) { value = new OneToOne(metadataBuildingContext, table, persistentClass); bindOneToOne((org.grails.datastore.mapping.model.types.OneToOne) currentGrailsProp, (OneToOne) value, EMPTY_PATH, sessionFactoryBeanName); } else { if (isHasOne && association.isBidirectional()) { value = new OneToOne(metadataBuildingContext, table, persistentClass); bindOneToOne((org.grails.datastore.mapping.model.types.OneToOne) currentGrailsProp, (OneToOne) value, EMPTY_PATH, sessionFactoryBeanName); } else { value = new ManyToOne(metadataBuildingContext, table); bindManyToOne((Association) currentGrailsProp, (ManyToOne) value, EMPTY_PATH, mappings, sessionFactoryBeanName); } } } else if (currentGrailsProp instanceof Embedded) { embedded.add((Embedded)currentGrailsProp); continue; } } // work out what type of relationship it is and bind value else { if (LOG.isDebugEnabled()) { LOG.debug("[GrailsDomainBinder] Binding property [" + currentGrailsProp.getName() + "] as SimpleValue"); } value = new SimpleValue(metadataBuildingContext, table); bindSimpleValue(currentGrailsProp, null, (SimpleValue) value, EMPTY_PATH, mappings, sessionFactoryBeanName); } if (value != null) { Property property = createProperty(value, persistentClass, currentGrailsProp, mappings); persistentClass.addProperty(property); } } for (Embedded association : embedded) { Value value = new Component(metadataBuildingContext, persistentClass); bindComponent((Component) value, association, true, mappings, sessionFactoryBeanName); Property property = createProperty(value, persistentClass, association, mappings); persistentClass.addProperty(property); } bindNaturalIdentifier(table, gormMapping, persistentClass); }
java
protected static MethodType replaceWithMoreSpecificType(Object[] args, MethodType callSiteType) { for (int i=0; i<args.length; i++) { // if argument null, take the static type if (args[i]==null) continue; if (callSiteType.parameterType(i).isPrimitive()) continue; Class argClass = args[i].getClass(); callSiteType = callSiteType.changeParameterType(i, argClass); } return callSiteType; }
python
def _init_worker(X, X_shape, X_dtype): """Initializer for pool for _mprotate""" # Using a dictionary is not strictly necessary. You can also # use global variables. mprotate_dict["X"] = X mprotate_dict["X_shape"] = X_shape mprotate_dict["X_dtype"] = X_dtype
java
@JSFFaceletAttribute public String getFor() { TagAttribute forAttribute = getAttribute("for"); if (forAttribute == null) { return null; } else { return forAttribute.getValue(); } }
java
public static ArrayList<String> split(String source, String separator, boolean removeEmpty) { if (source == null || source.isEmpty()) return null; ArrayList<String> values = new ArrayList<String>(); if (separator == null || separator.isEmpty()) { values.add(source); return values; } String tmpStr = new String(source); int idx = 0; String tt; while (true) { int tmp = tmpStr.indexOf(separator, idx); if (tmp == -1) { tt = tmpStr.substring(idx); if (tt != null && !tt.isEmpty()) values.add(tmpStr.substring(idx)); break; } tt = tmpStr.substring(idx, tmp); if (tt != null && !tt.isEmpty()) values.add(tmpStr.substring(idx, tmp)); idx = tmp + separator.length(); } return values; }
java
@Override public IRxSessionData getAppSessionData(Class<? extends AppSession> clazz, String sessionId) { if (clazz.equals(ClientRxSession.class)) { ClientRxSessionDataReplicatedImpl data = new ClientRxSessionDataReplicatedImpl(sessionId, this.mobicentsCluster, this.replicatedSessionDataSource.getContainer()); return data; } else if (clazz.equals(ServerRxSession.class)) { ServerRxSessionDataReplicatedImpl data = new ServerRxSessionDataReplicatedImpl(sessionId, this.mobicentsCluster); return data; } throw new IllegalArgumentException(); }
python
def patch_namespaced_service(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_service # noqa: E501 partially update the specified Service # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_service(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Service (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Service If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_service_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.patch_namespaced_service_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data
java
public boolean satisfies(String expr) { Parser<Expression> parser = ExpressionParser.newInstance(); return parser.parse(expr).interpret(this); }
java
public final Set<String> getBundleDependencies() { final Set<String> bundles = new TreeSet<>(); updateBundleList(bundles); return bundles; }
python
def boot(cls, *args, **kwargs): """ Function creates the instance of accessor with dynamic positional & keyword arguments. Args ---- args (positional arguments): the positional arguments that are passed to the class of accessor. kwargs (keyword arguments): the keyword arguments that are passed to the class of accessor. """ if cls.accessor is not None: if cls.instance is None: cls.instance = cls.accessor(*args, **kwargs)
java
public GetIntegrationResponseResult withResponseParameters(java.util.Map<String, String> responseParameters) { setResponseParameters(responseParameters); return this; }
java
@Override public Array getArray(int index) { synchronized (lock) { final Object obj = getMValue(internalArray, index).asNative(internalArray); return obj instanceof Array ? (Array) obj : null; } }
python
def table_lookup(image, table, border_value, iterations = None): '''Perform a morphological transform on an image, directed by its neighbors image - a binary image table - a 512-element table giving the transform of each pixel given the values of that pixel and its 8-connected neighbors. border_value - the value of pixels beyond the border of the image. This should test as True or False. The pixels are numbered like this: 0 1 2 3 4 5 6 7 8 The index at a pixel is the sum of 2**<pixel-number> for pixels that evaluate to true. ''' # # Test for a table that never transforms a zero into a one: # center_is_zero = np.array([(x & 2**4) == 0 for x in range(2**9)]) use_index_trick = False if (not np.any(table[center_is_zero]) and (np.issubdtype(image.dtype, bool) or np.issubdtype(image.dtype, int))): # Use the index trick use_index_trick = True invert = False elif (np.all(table[~center_is_zero]) and np.issubdtype(image.dtype, bool)): # All ones stay ones, invert the table and the image and do the trick use_index_trick = True invert = True image = ~ image # table index 0 -> 511 and the output is reversed table = ~ table[511-np.arange(512)] border_value = not border_value if use_index_trick: orig_image = image index_i, index_j, image = prepare_for_index_lookup(image, border_value) index_i, index_j = index_lookup(index_i, index_j, image, table, iterations) image = extract_from_image_lookup(orig_image, index_i, index_j) if invert: image = ~ image return image counter = 0 while counter != iterations: counter += 1 # # We accumulate into the indexer to get the index into the table # at each point in the image # if image.shape[0] < 3 or image.shape[1] < 3: image = image.astype(bool) indexer = np.zeros(image.shape,int) indexer[1:,1:] += image[:-1,:-1] * 2**0 indexer[1:,:] += image[:-1,:] * 2**1 indexer[1:,:-1] += image[:-1,1:] * 2**2 indexer[:,1:] += image[:,:-1] * 2**3 indexer[:,:] += image[:,:] * 2**4 indexer[:,:-1] += image[:,1:] * 2**5 indexer[:-1,1:] += image[1:,:-1] * 2**6 indexer[:-1,:] += image[1:,:] * 2**7 indexer[:-1,:-1] += image[1:,1:] * 2**8 else: indexer = table_lookup_index(np.ascontiguousarray(image,np.uint8)) if border_value: indexer[0,:] |= 2**0 + 2**1 + 2**2 indexer[-1,:] |= 2**6 + 2**7 + 2**8 indexer[:,0] |= 2**0 + 2**3 + 2**6 indexer[:,-1] |= 2**2 + 2**5 + 2**8 new_image = table[indexer] if np.all(new_image == image): break image = new_image return image
python
def on_tape(*files): """Determine whether any of the given files are on tape Parameters ---------- *files : `str` one or more paths to GWF files Returns ------- True/False : `bool` `True` if any of the files are determined to be on tape, otherwise `False` """ for path in files: try: if os.stat(path).st_blocks == 0: return True except AttributeError: # windows doesn't have st_blocks return False return False
java
public Observable<OperationStatusResponseInner> beginRedeployAsync(String resourceGroupName, String vmName) { return beginRedeployWithServiceResponseAsync(resourceGroupName, vmName).map(new Func1<ServiceResponse<OperationStatusResponseInner>, OperationStatusResponseInner>() { @Override public OperationStatusResponseInner call(ServiceResponse<OperationStatusResponseInner> response) { return response.body(); } }); }
java
public static List<CommercePriceList> findByCommerceCurrencyId( long commerceCurrencyId, int start, int end, OrderByComparator<CommercePriceList> orderByComparator) { return getPersistence() .findByCommerceCurrencyId(commerceCurrencyId, start, end, orderByComparator); }
java
protected void replaceHandler(ChannelHandlerContext ctx, String hostname, SslContext sslContext) throws Exception { SslHandler sslHandler = null; try { sslHandler = newSslHandler(sslContext, ctx.alloc()); ctx.pipeline().replace(this, SslHandler.class.getName(), sslHandler); sslHandler = null; } finally { // Since the SslHandler was not inserted into the pipeline the ownership of the SSLEngine was not // transferred to the SslHandler. // See https://github.com/netty/netty/issues/5678 if (sslHandler != null) { ReferenceCountUtil.safeRelease(sslHandler.engine()); } } }
java
public Object get(ManagedObject mo, String propName) { return get(mo.getMOR(), propName); }
python
def clean(self): """ Make sure there is at least a translation has been filled in. If a default language has been specified, make sure that it exists amongst translations. """ # First make sure the super's clean method is called upon. super(TranslationFormSet, self).clean() if settings.HIDE_LANGUAGE: return if len(self.forms) > 0: # If a default language has been provided, make sure a translation # is available if settings.DEFAULT_LANGUAGE and not any(self.errors): # Don't bother validating the formset unless each form is # valid on its own. Reference: # http://docs.djangoproject.com/en/dev/topics/forms/formsets/#custom-formset-validation for form in self.forms: language_code = form.cleaned_data.get( 'language_code', None ) if language_code == settings.DEFAULT_LANGUAGE: # All is good, don't bother checking any further return raise forms.ValidationError(_( 'No translation provided for default language \'%s\'.' ) % settings.DEFAULT_LANGUAGE) else: raise forms.ValidationError( _('At least one translation should be provided.') )
java
public static ChunkHeader read(IoBuffer in) { int remaining = in.remaining(); if (remaining > 0) { byte headerByte = in.get(); ChunkHeader h = new ChunkHeader(); // going to check highest 2 bits h.format = (byte) ((0b11000000 & headerByte) >> 6); int fmt = headerByte & 0x3f; switch (fmt) { case 0: // two byte header h.size = 2; if (remaining < 2) { throw new ProtocolException("Bad chunk header, at least 2 bytes are expected"); } h.channelId = 64 + (in.get() & 0xff); break; case 1: // three byte header h.size = 3; if (remaining < 3) { throw new ProtocolException("Bad chunk header, at least 3 bytes are expected"); } byte b1 = in.get(); byte b2 = in.get(); h.channelId = 64 + ((b2 & 0xff) << 8 | (b1 & 0xff)); break; default: // single byte header h.size = 1; h.channelId = 0x3f & headerByte; break; } // check channel id is valid if (h.channelId < 0) { throw new ProtocolException("Bad channel id: " + h.channelId); } log.trace("CHUNK header byte {}, count {}, header {}, channel {}", String.format("%02x", headerByte), h.size, 0, h.channelId); return h; } else { // at least one byte for valid decode throw new ProtocolException("Bad chunk header, at least 1 byte is expected"); } }
java
public void verify(String... filters) throws ManifestVerifyException { if (filters != null) { this.filters = Arrays.asList(filters); logFilters(); } verify(); }
python
def solve(self, value, filter_): """Returns the value of an attribute of the value, or the result of a call to a function. Arguments --------- value : ? A value to solve in combination with the given filter. filter_ : dataql.resource.Filter An instance of ``Filter`` to solve with the given value. Returns ------- Depending on the source, the filter may ask for an attribute of the value, or for the result of a call to a standalone function taking the value as first argument. This method returns this attribute or result. Example ------- >>> from dataql.solvers.registry import Registry >>> registry = Registry() >>> from datetime import date >>> registry.register(date, ['day', 'strftime']) >>> solver = FilterSolver(registry) >>> solver.solve(date(2015, 6, 1), Filter(name='day')) 1 >>> from dataql.resources import PosArg >>> solver.solve(date(2015, 6, 1), Filter(name='strftime', args=[PosArg('%F')])) '2015-06-01' """ args, kwargs = filter_.get_args_and_kwargs() source = self.registry[value] return source.solve(value, filter_.name, args, kwargs)
java
public static String toOriginal(Span span) { String id = span.getId(); if (span.clientSpan()) { int suffixIndex = id.lastIndexOf(CLIENT_ID_SUFFIX); if (suffixIndex > 0) { id = id.substring(0, suffixIndex); } } return id; }
python
def encodeCodon(seq_vec, ignore_stop_codons=True, maxlen=None, seq_align="start", encode_type="one_hot"): """Convert the Codon sequence into 1-hot-encoding numpy array # Arguments seq_vec: List of strings/DNA sequences ignore_stop_codons: boolean; if True, STOP_CODONS are omitted from one-hot encoding. maxlen: Maximum sequence length. See `pad_sequences` for more detail seq_align: How to align the sequences of variable lengths. See `pad_sequences` for more detail encode_type: can be `"one_hot"` or `token` for token encoding of codons (incremental integer ). # Returns numpy.ndarray of shape `(len(seq_vec), maxlen / 3, 61 if ignore_stop_codons else 64)` """ if ignore_stop_codons: vocab = CODONS neutral_vocab = STOP_CODONS + ["NNN"] else: vocab = CODONS + STOP_CODONS neutral_vocab = ["NNN"] # replace all U's with A's? seq_vec = [str(seq).replace("U", "T") for seq in seq_vec] return encodeSequence(seq_vec, vocab=vocab, neutral_vocab=neutral_vocab, maxlen=maxlen, seq_align=seq_align, pad_value="NNN", encode_type=encode_type)
python
def get_instance_for_uuid(self, uuid, project_id): """Return instance name for given uuid of an instance and project. :uuid: Instance's UUID :project_id: UUID of project (tenant) """ instance_name = self._inst_info_cache.get((uuid, project_id)) if instance_name: return instance_name instances = self._get_instances_for_project(project_id) for inst in instances: if inst.id.replace('-', '') == uuid: LOG.debug('get_instance_for_uuid: name=%s', inst.name) instance_name = inst.name self._inst_info_cache[(uuid, project_id)] = instance_name return instance_name return instance_name
python
def validateNodeMsg(self, wrappedMsg): """ Validate another node's message sent to this node. :param wrappedMsg: Tuple of message and the name of the node that sent the message :return: Tuple of message from node and name of the node """ msg, frm = wrappedMsg if self.isNodeBlacklisted(frm): self.discard(str(msg)[:256], "received from blacklisted node {}".format(frm), logger.display) return None with self.metrics.measure_time(MetricsName.INT_VALIDATE_NODE_MSG_TIME): try: message = node_message_factory.get_instance(**msg) except (MissingNodeOp, InvalidNodeOp) as ex: raise ex except Exception as ex: raise InvalidNodeMsg(str(ex)) try: self.verifySignature(message) except BaseExc as ex: raise SuspiciousNode(frm, ex, message) from ex logger.debug("{} received node message from {}: {}".format(self, frm, message), extra={"cli": False}) return message, frm
python
def add_view_info(self, view_info: ViewInfo): '''Adds view information to error message''' try: next(info for info in self._view_infos if info.view == view_info.view) except StopIteration: indent = len(self._view_infos) * '\t' self._view_infos.append(view_info) info = 'Line {0} in "{1}"'.format(view_info.line, view_info.view) self.add_info(indent + 'View info', info)
java
public void encode(ByteBuf buf) { buf.writeByte('*'); CommandArgs.IntegerArgument.writeInteger(buf, 1 + (args != null ? args.count() : 0)); buf.writeBytes(CommandArgs.CRLF); CommandArgs.BytesArgument.writeBytes(buf, type.getBytes()); if (args != null) { args.encode(buf); } }
python
def logging_syslog_server_syslogip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logging = ET.SubElement(config, "logging", xmlns="urn:brocade.com:mgmt:brocade-ras") syslog_server = ET.SubElement(logging, "syslog-server") use_vrf_key = ET.SubElement(syslog_server, "use-vrf") use_vrf_key.text = kwargs.pop('use_vrf') syslogip = ET.SubElement(syslog_server, "syslogip") syslogip.text = kwargs.pop('syslogip') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def size(self, fileToCheck, connId='default'): """ Checks size of a file on FTP server. Returns size of a file in bytes (integer). Parameters: - fileToCheck - file name or path to a file on FTP server - connId(optional) - connection identifier. By default equals 'default' Example: | ${file1size} = | size | /home/myname/tmp/uu.txt | connId=ftp1 | | Should Be Equal As Numbers | ${file1size} | 31 | | Note that the SIZE command is not standardized, but is supported by many common server implementations. """ thisConn = self.__getConnection(connId) outputMsg = "" try: tmpSize = thisConn.size(fileToCheck) outputMsg += str(tmpSize) except ftplib.all_errors as e: raise FtpLibraryError(str(e)) if self.printOutput: logger.info(outputMsg) return outputMsg
java
@XmlElementDecl(namespace = "http://www.opengis.net/gml", name = "origin") public JAXBElement<XMLGregorianCalendar> createOrigin(XMLGregorianCalendar value) { return new JAXBElement<XMLGregorianCalendar>(_Origin_QNAME, XMLGregorianCalendar.class, null, value); }
java
protected void configure(String[] args) throws IOException { for (String arg : args) { if (arg.startsWith("--input=")) { FileRef ref = createInput(); ref.setPath(arg.substring(arg.indexOf('=') + 1)); } else if (arg.startsWith("--output=")) { FileRef ref = createOutput(); ref.setPath(arg.substring(arg.indexOf('=') + 1)); } else if (arg.startsWith("--libdefined=")) { FunctionNamePredicate libdefined = new FunctionNamePredicate(); libdefined.setPattern(arg.substring(arg.indexOf('=') + 1)); addConfiguredLibdefined(libdefined); } else { throw new IllegalArgumentException(arg); } } }
java
public void add(final int[] e) { if (size == list.length) list = Array.copyOf(list, newSize()); list[size++] = e; }
java
public void linkCallbacks(Object... callbackHandlers) { if (callbackHandlers != null) { for (TaskHandler proxyTask : new ArrayList<TaskHandler>(proxyTasks)) { proxyTask.clearCallbacks(); proxyTask.appendCallbacks(callbackHandlers); } } }
python
def _load(self, titles=[], descriptions=[], images=[], urls=[], **kwargs): """ Loads extracted data into Summary. Performs validation and filtering on-the-fly, and sets the non-plural fields to the best specific item so far. If GET_ALL_DATA is False, it gets only the first valid item. """ enough = lambda items: items # len(items) >= MAX_ITEMS if config.GET_ALL_DATA or not enough(self.titles): titles = filter(None, map(self._clean_text, titles)) self.titles.extend(titles) if config.GET_ALL_DATA or not enough(self.descriptions): descriptions = filter(None, map(self._clean_text, descriptions)) self.descriptions.extend(descriptions) ## Never mind the urls, they can be bad not worth it # if config.GET_ALL_DATA or not enough(self.urls): # # urls = [self._clean_url(u) for u in urls] # urls = filter(None, map(self._clean_url, urls)) # self.urls.extend(urls) if config.GET_ALL_DATA: # images = [i for i in [self._filter_image(i) for i in images] if i] images = filter(None, map(self._filter_image, images)) self.images.extend(images) elif not enough(self.images): for i in images: image = self._filter_image(i) if image: self.images.append(image) if enough(self.images): break
java
public String getReadMethod() { StringBuffer sb = new StringBuffer(); if (getType().equals("boolean")) sb.append("is"); else sb.append("get"); sb.append(getAccessorName()); return sb.toString(); }
java
public SDVariable exponential(String name, double lambda, SDVariable shape) { validateInteger("exponential random", shape); SDVariable ret = f().randomExponential(lambda, shape); return updateVariableNameAndReference(ret, name); }
java
public Closure getClosure(String key) { if (closures != null && !closures.isEmpty()) { return closures.get(key); } return null; }
python
def cnst_A0T(self, Y0): r"""Compute :math:`A_0^T \mathbf{y}_0` component of :math:`A^T \mathbf{y}` (see :meth:`.ADMMTwoBlockCnstrnt.cnst_AT`). """ # This calculation involves non-negligible computational cost. It # should be possible to disable relevant diagnostic information # (dual residual) to avoid this cost. Y0f = sl.rfftn(Y0, None, self.cri.axisN) return sl.irfftn(sl.inner(np.conj(self.Zf), Y0f, axis=self.cri.axisK), self.cri.Nv, self.cri.axisN)
java
protected boolean hasTimeStampHeader() { String originTime = request.getHeader(RestMessageHeaders.X_VOLD_REQUEST_ORIGIN_TIME_MS); boolean result = false; if(originTime != null) { try { // TODO: remove the originTime field from request header, // because coordinator should not accept the request origin time // from the client.. In this commit, we only changed // "this.parsedRequestOriginTimeInMs" from // "Long.parseLong(originTime)" to current system time, // The reason that we did not remove the field from request // header right now, is because this commit is a quick fix for // internal performance test to be available as soon as // possible. this.parsedRequestOriginTimeInMs = System.currentTimeMillis(); if(this.parsedRequestOriginTimeInMs < 0) { RestErrorHandler.writeErrorResponse(messageEvent, HttpResponseStatus.BAD_REQUEST, "Origin time cannot be negative "); } else { result = true; } } catch(NumberFormatException nfe) { logger.error("Exception when validating request. Incorrect origin time parameter. Cannot parse this to long: " + originTime, nfe); RestErrorHandler.writeErrorResponse(this.messageEvent, HttpResponseStatus.BAD_REQUEST, "Incorrect origin time parameter. Cannot parse this to long: " + originTime); } } else { logger.error("Error when validating request. Missing origin time parameter."); RestErrorHandler.writeErrorResponse(this.messageEvent, HttpResponseStatus.BAD_REQUEST, "Missing origin time parameter."); } return result; }
java
public static boolean isClientAbortException(IOException e) { String exceptionClassName = e.getClass().getName(); return exceptionClassName.endsWith(".EofException") || exceptionClassName.endsWith(".ClientAbortException"); }
java
public final ListInstancesPagedResponse listInstances(ProjectName parent) { ListInstancesRequest request = ListInstancesRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listInstances(request); }
python
def clip_action(action, space): """Called to clip actions to the specified range of this policy. Arguments: action: Single action. space: Action space the actions should be present in. Returns: Clipped batch of actions. """ if isinstance(space, gym.spaces.Box): return np.clip(action, space.low, space.high) elif isinstance(space, gym.spaces.Tuple): if type(action) not in (tuple, list): raise ValueError("Expected tuple space for actions {}: {}".format( action, space)) out = [] for a, s in zip(action, space.spaces): out.append(clip_action(a, s)) return out else: return action
java
public long removeAll(final String... members) { return doWithJedis(new JedisCallable<Long>() { @Override public Long call(Jedis jedis) { return jedis.srem(getKey(), members); } }); }
java
@SuppressWarnings("unchecked") public static ConnectionObserver childConnectionObserver(ServerBootstrap b) { Objects.requireNonNull(b, "bootstrap"); ConnectionObserver obs = (ConnectionObserver) b.config() .childOptions() .get(OBSERVER_OPTION); if (obs == null) { return ConnectionObserver.emptyListener(); //will not be triggered in } b.childOption(OBSERVER_OPTION, null); return obs; }
java
private boolean checkInetAddress(InetAddress inetAddress) { // Check if not local host if (! inetAddress.getCanonicalHostName().startsWith("local")) { // Check if name is not the address (???) if (!inetAddress.getCanonicalHostName().equalsIgnoreCase(inetAddress.getHostAddress())) { addresses.add(inetAddress.getHostAddress()); // Check if IPV 4 address if (isIPV4address(inetAddress.getHostAddress())) { name = inetAddress.getCanonicalHostName(); address = inetAddress.getHostAddress(); //System.out.println(name+": " + address); return true; } } else if (trace) System.err.println( "Warning: at least one getCanonicalHostName() returns " + inetAddress.getCanonicalHostName() + "\n Check files /etc/resolv.conf and /etc/nsswitch.conf "); } return false; }
java
@Override public CreateNotificationResult createNotification(CreateNotificationRequest request) { request = beforeClientExecution(request); return executeCreateNotification(request); }
java
public Type unboxedType(Type t) { for (int i=0; i<syms.boxedName.length; i++) { Name box = syms.boxedName[i]; if (box != null && asSuper(t, syms.enterClass(syms.java_base, box)) != null) return syms.typeOfTag[i]; } return Type.noType; }
python
def setitem(self, indexer, value): """Set the value inplace, returning a a maybe different typed block. Parameters ---------- indexer : tuple, list-like, array-like, slice The subset of self.values to set value : object The value being set Returns ------- Block Notes ----- `indexer` is a direct slice/positional indexer. `value` must be a compatible shape. """ # coerce None values, if appropriate if value is None: if self.is_numeric: value = np.nan # coerce if block dtype can store value values = self.values try: values, value = self._try_coerce_args(values, value) # can keep its own dtype if hasattr(value, 'dtype') and is_dtype_equal(values.dtype, value.dtype): dtype = self.dtype else: dtype = 'infer' except (TypeError, ValueError): # current dtype cannot store value, coerce to common dtype find_dtype = False if hasattr(value, 'dtype'): dtype = value.dtype find_dtype = True elif lib.is_scalar(value): if isna(value): # NaN promotion is handled in latter path dtype = False else: dtype, _ = infer_dtype_from_scalar(value, pandas_dtype=True) find_dtype = True else: dtype = 'infer' if find_dtype: dtype = find_common_type([values.dtype, dtype]) if not is_dtype_equal(self.dtype, dtype): b = self.astype(dtype) return b.setitem(indexer, value) # value must be storeable at this moment arr_value = np.array(value) # cast the values to a type that can hold nan (if necessary) if not self._can_hold_element(value): dtype, _ = maybe_promote(arr_value.dtype) values = values.astype(dtype) transf = (lambda x: x.T) if self.ndim == 2 else (lambda x: x) values = transf(values) # length checking check_setitem_lengths(indexer, value, values) def _is_scalar_indexer(indexer): # return True if we are all scalar indexers if arr_value.ndim == 1: if not isinstance(indexer, tuple): indexer = tuple([indexer]) return any(isinstance(idx, np.ndarray) and len(idx) == 0 for idx in indexer) return False def _is_empty_indexer(indexer): # return a boolean if we have an empty indexer if is_list_like(indexer) and not len(indexer): return True if arr_value.ndim == 1: if not isinstance(indexer, tuple): indexer = tuple([indexer]) return any(isinstance(idx, np.ndarray) and len(idx) == 0 for idx in indexer) return False # empty indexers # 8669 (empty) if _is_empty_indexer(indexer): pass # setting a single element for each dim and with a rhs that could # be say a list # GH 6043 elif _is_scalar_indexer(indexer): values[indexer] = value # if we are an exact match (ex-broadcasting), # then use the resultant dtype elif (len(arr_value.shape) and arr_value.shape[0] == values.shape[0] and np.prod(arr_value.shape) == np.prod(values.shape)): values[indexer] = value try: values = values.astype(arr_value.dtype) except ValueError: pass # set else: values[indexer] = value # coerce and try to infer the dtypes of the result values = self._try_coerce_and_cast_result(values, dtype) block = self.make_block(transf(values)) return block
python
def get_sonos_favorites(self, start=0, max_items=100): """Get Sonos favorites. See :meth:`get_favorite_radio_shows` for return type and remarks. """ message = 'The output type of this method will probably change in '\ 'the future to use SoCo data structures' warnings.warn(message, stacklevel=2) return self.__get_favorites(SONOS_FAVORITES, start, max_items)
python
def annotate(self, fname, tables, feature_strand=False, in_memory=False, header=None, out=sys.stdout, parallel=False): """ annotate a file with a number of tables Parameters ---------- fname : str or file file name or file-handle tables : list list of tables with which to annotate `fname` feature_strand : bool if this is True, then the up/downstream designations are based on the features in `tables` rather than the features in `fname` in_memoory : bool if True, then tables are read into memory. This usually makes the annotation much faster if there are more than 500 features in `fname` and the number of features in the table is less than 100K. header : str header to print out (if True, use existing header) out : file where to print output parallel : bool if True, use multiprocessing library to execute the annotation of each chromosome in parallel. Uses more memory. """ from .annotate import annotate return annotate(self, fname, tables, feature_strand, in_memory, header=header, out=out, parallel=parallel)
java
private void processOverReplicatedBlock(Block block, short replication, DatanodeDescriptor addedNode, DatanodeDescriptor delNodeHint) { List<DatanodeID> excessReplicateTmp = new ArrayList<DatanodeID>(); List<DatanodeID> originalDatanodes = new ArrayList<DatanodeID>(); // find all replicas that can possibly be deleted. // The results are returned in excessReplicateTmp. findOverReplicatedReplicas(block, replication, addedNode, delNodeHint, excessReplicateTmp, originalDatanodes); if (excessReplicateTmp.size() <= 0) { return; } writeLock(); // acquire write lock, try { BlockInfo storedBlock = blocksMap.getBlockInfo(block); INodeFile inode = (storedBlock == null) ? null : storedBlock.getINode(); if (inode == null) { return; // file has been deleted already, nothing to do. } // // if the state of replicas of this block has changed since the time // when we released and reacquired the lock, then all the decisions // that we have made so far might not be correct. Do not delete excess // replicas in this case. int live = 0; Collection<DatanodeDescriptor> nodesCorrupt = corruptReplicas.getNodes(block); for (Iterator<DatanodeDescriptor> it = blocksMap.nodeIterator(block); it.hasNext();) { DatanodeDescriptor node = it.next(); if (((nodesCorrupt != null) && (nodesCorrupt.contains(node))) || node.isDecommissionInProgress() || node.isDecommissioned()) { // do nothing } else { live++; // number of live nodes originalDatanodes.remove(node); } } if (originalDatanodes.size() > 0) { NameNode.stateChangeLog.info("Unable to delete excess replicas for block " + block + " because the state of the original replicas have changed." + " Will retry later."); overReplicatedBlocks.add(block); return; } short blockReplication = inode.getBlockReplication(storedBlock); // loop through datanodes that have excess-replicas of this block for (ListIterator<DatanodeID> iter = excessReplicateTmp.listIterator(); iter.hasNext();) { DatanodeID datanodeId = iter.next(); // re-check that block still has excess replicas. // If not, then there is nothing more to do. if (live <= blockReplication) { break; } // find the DatanodeDescriptor for this datanode DatanodeDescriptor datanode = null; try { datanode = getDatanode(datanodeId); } catch (IOException e) { } if (datanode == null) { NameNode.stateChangeLog.info("No datanode found while processing " + "overreplicated block " + block); continue; // dead datanode? } // insert into excessReplicateMap LightWeightHashSet<Block> excessBlocks = excessReplicateMap.get(datanodeId.getStorageID()); if (excessBlocks == null) { excessBlocks = new LightWeightHashSet<Block>(); excessReplicateMap.put(datanodeId.getStorageID(), excessBlocks); } if (excessBlocks.add(block)) { excessBlocksCount++; if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("BLOCK* NameSystem.chooseExcessReplicates: " + "(" + datanodeId.getName() + ", " + block + ") is added to excessReplicateMap"); } } // // The 'excessblocks' tracks blocks until we get confirmation // that the datanode has deleted them; the only way we remove them // is when we get a "removeBlock" message. // // The 'invalidate' list is used to inform the datanode the block // should be deleted. Items are removed from the invalidate list // upon giving instructions to the namenode. // addToInvalidatesNoLog(block, datanode, true); live--; if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("BLOCK* NameSystem.chooseExcessReplicates: " + "(" + datanode.getName() + ", " + block + ") is added to recentInvalidateSets"); } } } finally { writeUnlock(); } }
java
protected void processHookFinally(ActionHook hook) { if (hook == null) { return; } showFinally(runtime); try { hook.hookFinally(runtime); } finally { hook.godHandEpilogue(runtime); } }
python
def retrieveVals(self): """Retrieve values for graphs.""" apcinfo = APCinfo(self._host, self._port, self._user, self._password, self._monpath, self._ssl, self._extras) stats = apcinfo.getAllStats() if self.hasGraph('php_apc_memory') and stats: filecache = stats['cache_sys']['mem_size'] usercache = stats['cache_user']['mem_size'] total = stats['memory']['seg_size'] * stats['memory']['num_seg'] free = stats['memory']['avail_mem'] other = total - free - filecache - usercache self.setGraphVal('php_apc_memory', 'filecache', filecache) self.setGraphVal('php_apc_memory', 'usercache', usercache) self.setGraphVal('php_apc_memory', 'other', other) self.setGraphVal('php_apc_memory', 'free', free) if self.hasGraph('php_apc_items') and stats: self.setGraphVal('php_apc_items', 'filecache', stats['cache_sys']['num_entries']) self.setGraphVal('php_apc_items', 'usercache', stats['cache_user']['num_entries']) if self.hasGraph('php_apc_reqs_filecache') and stats: self.setGraphVal('php_apc_reqs_filecache', 'hits', stats['cache_sys']['num_hits']) self.setGraphVal('php_apc_reqs_filecache', 'misses', stats['cache_sys']['num_misses']) self.setGraphVal('php_apc_reqs_filecache', 'inserts', stats['cache_sys']['num_inserts']) if self.hasGraph('php_apc_reqs_usercache') and stats: self.setGraphVal('php_apc_reqs_usercache', 'hits', stats['cache_user']['num_hits']) self.setGraphVal('php_apc_reqs_usercache', 'misses', stats['cache_user']['num_misses']) self.setGraphVal('php_apc_reqs_usercache', 'inserts', stats['cache_user']['num_inserts']) if self.hasGraph('php_apc_expunge') and stats: self.setGraphVal('php_apc_expunge', 'filecache', stats['cache_sys']['expunges']) self.setGraphVal('php_apc_expunge', 'usercache', stats['cache_user']['expunges']) if self.hasGraph('php_apc_mem_util_frag'): self.setGraphVal('php_apc_mem_util_frag', 'util', stats['memory']['utilization_ratio'] * 100) self.setGraphVal('php_apc_mem_util_frag', 'frag', stats['memory']['fragmentation_ratio'] * 100) if self.hasGraph('php_apc_mem_frag_count'): self.setGraphVal('php_apc_mem_frag_count', 'num', stats['memory']['fragment_count']) if self.hasGraph('php_apc_mem_frag_avgsize'): self.setGraphVal('php_apc_mem_frag_avgsize', 'size', stats['memory']['fragment_avg_size'])
python
def get_string_from_view(self, request, view_name, url_kwargs, render_type='string'): """ Returns a string that is a rendering of the view given a request, view_name, and the original url_kwargs. Makes the following changes the view before rendering: * Sets can_submit to False. * Adds action_url to the context. This is the url where \ this view actually lives. * Sets the default base_template to be 'cms/partial.html' This will always call GET and never POST as any actions that modify data should take place on the original url and not like this. :param request: The request object. :param view_name: The name of the view that you want. :param url_kwargs: The url keyword arguments that came \ with the request object. The view itself is responsible \ to remove arguments that would not be part of a normal match \ for that view. This is done by calling the `get_url_kwargs` \ method on the view. :param render_type: The render type to use. Defaults to \ 'string'. """ response = "" try: view, name = self.get_initialized_view_and_name(view_name, render_type=render_type, can_submit=False, base_template='cms/partial.html', request=request, kwargs=url_kwargs) if isinstance(view, URLAlias): view_name = view.get_view_name(view_name) bundle = view.get_bundle(self, url_kwargs, {}) if bundle and isinstance(bundle, Bundle): return bundle.get_string_from_view(request, view_name, url_kwargs, render_type=render_type) elif view: if view and name and view.can_view(request.user): response = self._render_view_as_string(view, name, request, url_kwargs) except http.Http404: pass return response
java
public JSONObject getJson() throws JSONException { JSONObject json = create(); if (getDescription() != null && !getDescription().isEmpty()) json.put("description", getDescription()); if (attributes != null && !attributes.isEmpty()) { json.put("attributes", Attribute.getAttributesJson(attributes)); } if (activities != null && !activities.isEmpty()) { JSONArray activitiesJson = new JSONArray(); for (Activity activity : activities) { JSONObject activityJson = activity.getJson(); List<Transition> transitions = getAllTransitions(activity.getId()); if (transitions != null && !transitions.isEmpty()) { JSONArray transitionsJson = new JSONArray(); for (Transition transition : transitions) { JSONObject transitionJson = transition.getJson(); if (transition.getToId() < 0) // newly created transitionJson.put("to", getActivityVO(transition.getToId()).getLogicalId()); transitionsJson.put(transitionJson); } activityJson.put("transitions", transitionsJson); } activitiesJson.put(activityJson); } json.put("activities", activitiesJson); } if (subprocesses != null && !subprocesses.isEmpty()) { JSONArray subprocsJson = new JSONArray(); for (Process subproc : subprocesses) { JSONObject subprocJson = subproc.getJson(); String logicalId = subproc.getAttribute(WorkAttributeConstant.LOGICAL_ID); subprocJson.put("id", logicalId); subprocJson.put("name", subproc.getName()); if (subprocJson.has("version")) subprocJson.remove("version"); subprocsJson.put(subprocJson); } json.put("subprocesses", subprocsJson); } if (textNotes != null && !textNotes.isEmpty()) { JSONArray textNotesJson = new JSONArray(); for (TextNote textNote : textNotes) textNotesJson.put(textNote.getJson()); json.put("textNotes", textNotesJson); } if (variables != null && !variables.isEmpty()) { JSONObject variablesJson = create(); for (Variable variable : variables) variablesJson.put(variable.getJsonName(), variable.getJson()); json.put("variables", variablesJson); } return json; }
python
def render(self, element): """Renders the given element to string. :param element: a element to be rendered. :returns: the output string or any values. """ # Store the root node to provide some context to render functions if not self.root_node: self.root_node = element render_func = getattr( self, self._cls_to_func_name(element.__class__), None) if not render_func: render_func = self.render_children return render_func(element)
python
def evaluate_course(self, kcdm, jxbh, r101=1, r102=1, r103=1, r104=1, r105=1, r106=1, r107=1, r108=1, r109=1, r201=3, r202=3, advice=''): """ 课程评价, 数值为 1-5, r1 类选项 1 为最好, 5 为最差, r2 类选项程度由深到浅, 3 为最好. 默认都是最好的选项 :param kcdm: 课程代码 :param jxbh: 教学班号 :param r101: 教学态度认真,课前准备充分 :param r102: 教授内容充实,要点重点突出 :param r103: 理论联系实际,反映最新成果 :param r104: 教学方法灵活,师生互动得当 :param r105: 运用现代技术,教学手段多样 :param r106: 注重因材施教,加强能力培养 :param r107: 严格要求管理,关心爱护学生 :param r108: 处处为人师表,注重教书育人 :param r109: 教学综合效果 :param r201: 课程内容 :param r202: 课程负担 :param advice: 其他建议,不能超过120字且不能使用分号,单引号,都好 :return: """ return self.query(EvaluateCourse( kcdm, jxbh, r101, r102, r103, r104, r105, r106, r107, r108, r109, r201, r202, advice ))
python
def get_infos_with_id(self, uid): """Get info about a user based on his id. :return: JSON """ _logid = uid _user_info_url = USER_INFO_URL.format(logid=_logid) return self._request_api(url=_user_info_url).json()
java
protected void addModulePackagesList(Map<ModuleElement, Set<PackageElement>> modules, String text, String tableSummary, Content body, ModuleElement mdle) { Content profNameContent = new StringContent(mdle.getQualifiedName().toString()); Content heading = HtmlTree.HEADING(HtmlConstants.PACKAGE_HEADING, true, getTargetModuleLink("classFrame", profNameContent, mdle)); heading.addContent(Contents.SPACE); heading.addContent(contents.packagesLabel); HtmlTree htmlTree = (configuration.allowTag(HtmlTag.MAIN)) ? HtmlTree.MAIN(HtmlStyle.indexContainer, heading) : HtmlTree.DIV(HtmlStyle.indexContainer, heading); HtmlTree ul = new HtmlTree(HtmlTag.UL); ul.setTitle(contents.packagesLabel); List<PackageElement> packages = new ArrayList<>(modules.get(mdle)); for (PackageElement pkg : packages) { if ((!(configuration.nodeprecated && utils.isDeprecated(pkg)))) { ul.addContent(getPackage(pkg, mdle)); } } htmlTree.addContent(ul); body.addContent(htmlTree); }
python
def connectDropzone( self, rect, slot, color = None, style = None, name = '', toolTip = '' ): """ Connects the inputed dropzone to the given slot at the defined rect. :param rect | <QRectF> slot | <method> || <function> :return <XNodeHotspot> """ if not color: color = self.hotspotColor() if not style: style = self.hotspotStyle() hotspot = XNodeHotspot(rect, slot, name, toolTip) hotspot.setColor(color) hotspot.setStyle(style) self._dropzones.append(hotspot) return hotspot
java
private void buildNewInstanceMethodForBuilder( MethodSpec.Builder newInstanceMethodBuilder ) { newInstanceMethodBuilder.addStatement( "return new $T(builderDeserializer.deserializeInline(reader, ctx, params, null, null, null, bufferedProperties).build(), bufferedProperties)", parameterizedName( Instance.class, beanInfo.getType() ) ); }
java
private void sortList(ArrayList candidateList) { java.util.Collections.sort(candidateList, new java.util.Comparator() { public int compare(Object o1, Object o2) { double scoreT = ( (TagLink.Candidates) o1).getScore(); double scoreU = ( (TagLink.Candidates) o2).getScore(); if(scoreU > scoreT){ return 1; } if(scoreU > scoreT){ return -1; } return 0; } } ); }
python
def display_output(arguments): '''Display the ASCII art from the image.''' global _ASCII if arguments['--alt-chars']: _ASCII=_ASCII_2 try: im = Image.open(arguments['FILE']) except: raise IOError('Unable to open the file.') im = im.convert("RGBA") aspect_ratio = float(im.size[0])/im.size[1] scaled_height = _WIDTH / aspect_ratio scaled_width = _HEIGHT * aspect_ratio*2 if scaled_width > _WIDTH: width = int(_WIDTH) height = int(scaled_height/2) elif scaled_height > _HEIGHT: width = int(scaled_width) height = int(_HEIGHT) if arguments['--width']: width = int(arguments['--width']) height = int(width / aspect_ratio / 2) elif arguments['--height']: height = int(arguments['--height']) width = int(height * aspect_ratio * 2) if arguments['--width'] and arguments['--height']: height = int(arguments['--height']) width = int(arguments['--width']) im = im.resize((width,height),resample=Image.ANTIALIAS) enhancer = ImageEnhance.Contrast(im) im = enhancer.enhance(float(arguments['--contrast'])) img = im.getdata() im = im.convert('L') #Grayscale if not arguments['--invert']: _ASCII = _ASCII[::-1] bg=None; if arguments['--bg']=='BLACK': bg=rgb(0,0,0) fg=rgb(5,5,5) elif arguments['--bg']=='WHITE': bg=rgb(5,5,5) fg=rgb(0,0,0) row_len=0 if arguments['--bold']: bold=True else: bold=False for count,i in enumerate(im.getdata()): ascii_char = _ASCII[int(((i/255.0))*(len((_ASCII))-1))] try: if not arguments['--colors']: raise Exception color = rgb(int((img[count][0]/255.0)*5),int((img[count][1]/255.0)*5),int((img[count][2]/255.0)*5)) print_color(ascii_char, end='', fg=color, bg=bg, bold=bold) except: if bg and bold: print_color(ascii_char, end='', fg=fg, bg=bg, bold=bold) elif bold: print_color(ascii_char, end='', bold=bold) elif bg: print_color(ascii_char, end='', fg=fg, bg=bg) else: print(ascii_char, end='') row_len+=1 if row_len==width: row_len=0 print('')
python
def is_valid(self): """ Only retain SNPs or single indels, and are bi-allelic """ return len(self.ref) == 1 and \ len(self.alt) == 1 and \ len(self.alt[0]) == 1
java
public static void setMultiSelectEditorValue( CmsObject cms, Map<String, String[]> formParameters, I_CmsWidgetDialog widgetDialog, I_CmsWidgetParameter param) { String[] values = formParameters.get(param.getId()); if ((values != null) && (values.length > 0)) { StringBuffer value = new StringBuffer(128); for (int i = 0; i < values.length; i++) { if (i > 0) { value.append(','); } value.append(values[i]); } // set the value param.setStringValue(cms, value.toString()); } else { // erase: param.setStringValue(cms, ""); } }
java
@Override protected Connection proxyConnection(Connection target, ConnectionCallback callback) { return new ConnectionDecorator(target, callback); }
java
private WebSocketMessage<?> receive(WebSocketEndpointConfiguration config, long timeout) { long timeLeft = timeout; WebSocketMessage<?> message = config.getHandler().getMessage(); String path = endpointConfiguration.getEndpointUri(); while (message == null && timeLeft > 0) { timeLeft -= endpointConfiguration.getPollingInterval(); long sleep = timeLeft > 0 ? endpointConfiguration.getPollingInterval() : endpointConfiguration.getPollingInterval() + timeLeft; if (LOG.isDebugEnabled()) { String msg = "Waiting for message on '%s' - retrying in %s ms"; LOG.debug(String.format(msg, path, (sleep))); } try { Thread.sleep(sleep); } catch (InterruptedException e) { LOG.warn(String.format("Thread interrupted while waiting for message on '%s'", path), e); } message = config.getHandler().getMessage(); } if (message == null) { throw new ActionTimeoutException(String.format("Action timed out while receiving message on '%s'", path)); } return message; }
java
protected void setBeanPropertyValue(final String property, final Object bean, final Serializable value) { if (bean == null) { return; } if (".".equals(property)) { LOG.error("Set of entire bean is not supported by this model"); return; } try { PropertyUtils.setProperty(bean, property, value); } catch (Exception e) { LOG.error("Failed to set bean property " + property + " on " + bean, e); } }
python
def update(self, arr, mask=None): """ update moving average (and variance) with new ndarray (of the same shape as the init array) and an optional mask """ if mask is not None: refI = np.logical_and(mask, self.n == 0) else: refI = self.n == 0 if refI.any(): # fill areas of the reference array that where empty before # create initial average value: self.avg[refI] = arr[refI] # the density of the marked array increases by one: self.n[mask] += 1 # only consider filled areas: if mask is not None: i = mask # np.logical_and(mask,self.n>0) else: i = self.n > 0 # current value: xn = arr[i] # initial value: x0 = self.avg[i] n = self.n[i] # calculate the new average: new_Avg = self.avg[i] + ((xn - x0) / n) if self.var is not None: t = (xn - new_Avg + x0 - self.avg[i]) * (xn - x0) / (n - 1) t = np.nan_to_num(t) self.var[i] += t # assign the new average now to remain the old average # for calculating variance above: self.avg[i] = new_Avg
java
public byte[] unwrap(byte []inBuf, int off, int len, MessageProp prop) throws GSSException { checkContext(); logger.debug("enter unwrap"); byte [] token = null; /* * see if the token is a straight SSL packet or * one of ours made by wrap using get_mic */ if (inBuf[off] == GSI_WRAP && inBuf[off+1] == 3 && inBuf[off+2] == 0) { throw new GSSException(GSSException.UNAVAILABLE); /*DEL int micLen = SSLUtil.toShort(inBuf[off+3], inBuf[off+4]); int msgLen = len - 5 - micLen; if (micLen > len-5 || msgLen < 0) { throw new GSSException(GSSException.DEFECTIVE_TOKEN); } verifyMIC(inBuf, off+5, micLen, inBuf, off+5+micLen, msgLen, null); if (prop != null) { prop.setPrivacy(false); prop.setQOP(GSSConstants.GSI_BIG); } // extract the data token = new byte[msgLen]; System.arraycopy(inBuf, off+5+micLen, token, 0, msgLen); */ } else { token = unwrap(inBuf, off, len); if (prop != null) { prop.setPrivacy(this.encryption); prop.setQOP(0); } } logger.debug("exit unwrap"); return token; }
java
@Nonnull @ReturnsMutableCopy public ICommonsList <CSSViewportRule> getAllViewportRules () { return m_aRules.getAllMapped (r -> r instanceof CSSViewportRule, r -> (CSSViewportRule) r); }
java
@Nonnull public FineUploader5Session addParam (@Nonnull @Nonempty final String sKey, @Nonnull final String sValue) { ValueEnforcer.notEmpty (sKey, "Key"); ValueEnforcer.notNull (sValue, "Value"); m_aSessionParams.put (sKey, sValue); return this; }
python
def update(self, new_games): """ new_games is a list of .tfrecord.zz new game records. """ new_games.sort(key=os.path.basename) first_new_game = None for idx, game in enumerate(new_games): timestamp = file_timestamp(game) if timestamp <= self.examples[-1][0]: continue elif first_new_game is None: first_new_game = idx num_new_games = len(new_games) - idx print("Found {}/{} new games".format( num_new_games, len(new_games))) self.total_updates += num_new_games self.examples.extend(self.func(game)) if first_new_game is None: print("No new games", file_timestamp( new_games[-1]), self.examples[-1][0])