language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public void removeResourceFromProject(String resourcename) throws CmsException { // TODO: this should be also possible if the resource has been deleted CmsResource resource = readResource(resourcename, CmsResourceFilter.ALL); getResourceType(resource).removeResourceFromProject(this, m_securityManager, resource); }
python
def handle_lut(self, pkt): """This part of the protocol is used by IRAF to set the frame number. """ self.logger.debug("handle lut") if pkt.subunit & COMMAND: data_type = str(pkt.nbytes / 2) + 'h' #size = struct.calcsize(data_type) line = pkt.datain.read(pkt.nbytes) n = len(line) if (n < pkt.nbytes): return try: x = struct.unpack(data_type, line) except Exception as e: self.logger.error("Error unpacking struct: %s" % (str(e))) return if len(x) < 14: # pad it with zeroes y = [] for i in range(14): try: y.append(x[i]) except Exception: y.append(0) x = y del(y) if len(x) == 14: z = int(x[0]) # frames start from 1, we start from 0 self.frame = self.decode_frameno(z) - 1 if (self.frame > MAX_FRAMES): self.logger.error("attempt to select non existing frame.") return # init the framebuffer #self.server.controller.init_frame(self.frame) try: self.server.controller.get_frame(self.frame) except KeyError: self.server.controller.init_frame(self.frame) return self.logger.error("unable to select a frame.") return self.logger.error("what shall I do?")
python
def bake(self): """ Bake a ``gilt`` command so it's ready to execute and returns None. :return: None """ self._sh_command = getattr(sh, self.command) self._sh_command = self._sh_command.bake( self.options, 'overlay', _env=self.env, _out=LOG.out, _err=LOG.error)
java
@SuppressWarnings("unchecked") public <T> T processElement(Element element) throws ConfigurationException { String namespace = $(element).namespaceURI(); String tagName = $(element).tag(); ElementHandler<?> handler = handlers.get(new HandlerId(namespace, tagName)); if (handler != null) { Object o = handler.processElement(this, element); return (T) o; } throw new ConfigurationException("No Handler registered for element named [" + tagName + "] in namespace: [" + namespace + "]"); }
python
def discriminator1(ndf, no_bias=True, fix_gamma=True, eps=1e-5 + 1e-12): '''First part of the discriminator which takes a 32x32 image as input and output a convolutional feature map, this is required to calculate the layer loss''' BatchNorm = mx.sym.BatchNorm data = mx.sym.Variable('data') d1 = mx.sym.Convolution(data, name='d1', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf, no_bias=no_bias) dact1 = mx.sym.LeakyReLU(d1, name='dact1', act_type='leaky', slope=0.2) d2 = mx.sym.Convolution(dact1, name='d2', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf*2, no_bias=no_bias) dbn2 = BatchNorm(d2, name='dbn2', fix_gamma=fix_gamma, eps=eps) dact2 = mx.sym.LeakyReLU(dbn2, name='dact2', act_type='leaky', slope=0.2) d3 = mx.sym.Convolution(dact2, name='d3', kernel=(5,5), stride=(2,2), pad=(2,2), num_filter=ndf*4, no_bias=no_bias) dbn3 = BatchNorm(d3, name='dbn3', fix_gamma=fix_gamma, eps=eps) dact3 = mx.sym.LeakyReLU(dbn3, name='dact3', act_type='leaky', slope=0.2) return dact3
python
def _ppf(self, q, left, right, cache): """ Point percentile function. Example: >>> print(chaospy.Uniform().inv([0.1, 0.2, 0.9])) [0.1 0.2 0.9] >>> print(chaospy.Pow(chaospy.Uniform(), 2).inv([0.1, 0.2, 0.9])) [0.01 0.04 0.81] >>> print(chaospy.Pow(chaospy.Uniform(1, 2), -1).inv([0.1, 0.2, 0.9])) [0.52631579 0.55555556 0.90909091] >>> print(chaospy.Pow(2, chaospy.Uniform()).inv([0.1, 0.2, 0.9])) [1.07177346 1.14869835 1.86606598] >>> print(chaospy.Pow(2, chaospy.Uniform(-1, 0)).inv([0.1, 0.2, 0.9])) [0.53588673 0.57434918 0.93303299] >>> print(chaospy.Pow(2, 3).inv([0.1, 0.2, 0.9])) [8. 8. 8.] """ left = evaluation.get_inverse_cache(left, cache) right = evaluation.get_inverse_cache(right, cache) if isinstance(left, Dist): if isinstance(right, Dist): raise StochasticallyDependentError( "under-defined distribution {} or {}".format(left, right)) elif not isinstance(right, Dist): return left**right else: out = evaluation.evaluate_inverse(right, q, cache=cache) out = numpy.where(left < 0, 1-out, out) out = left**out return out right = right + numpy.zeros(q.shape) q = numpy.where(right < 0, 1-q, q) out = evaluation.evaluate_inverse(left, q, cache=cache)**right return out
java
public static void initInstance(Context context, String meteorServerHostname, boolean useSsl) { initInstance(context, meteorServerHostname, sMeteorPort, useSsl); }
python
def PopState(self, **unused_kwargs): """Pop the previous state from the stack.""" try: self.state = self.state_stack.pop() logging.debug('Returned state to {0:s}'.format(self.state)) return self.state except IndexError: self.Error( 'Tried to pop the state but failed - possible recursion error')
python
def jump_statement(self): """ jump_statement: 'return' expression_statement """ self._process(Nature.RETURN) return ReturnStatement(expression=self.expression_statement())
python
def get(self, page=0, size=10): """Get dashboard meta info from in page `page` and page size is `size`. Args: page: page number. size: size number. Returns: list of dict containing the dash_id and accordingly meta info. maybe empty list [] when page * size > total dashes in db. that's reasonable. """ dash_list = r_db.zrevrange(config.DASH_ID_KEY, 0, -1, True) id_list = dash_list[page * size : page * size + size] dash_meta = [] data = [] if id_list: dash_meta = r_db.hmget(config.DASH_META_KEY, [i[0] for i in id_list]) data = [json.loads(i) for i in dash_meta] return build_response(dict(data=data, code=200))
java
public static void disableCaching(final WebResponse response) { response.setLastModifiedTime(Time.now()); final HttpServletResponse httpServletResponse = getHttpServletResponse(response); if (httpServletResponse != null) { httpServletResponse.addHeader("Cache-Control", "max-age=0"); httpServletResponse.setDateHeader("Expires", 0); } }
python
def _get(self, ndef_message, timeout=1.0): """Get an NDEF message from the server. Temporarily connects to the default SNEP server if the client is not yet connected. """ if not self.socket: try: self.connect('urn:nfc:sn:snep') except nfc.llcp.ConnectRefused: return None else: self.release_connection = True else: self.release_connection = False try: snep_request = b'\x10\x01' snep_request += struct.pack('>L', 4 + len(str(ndef_message))) snep_request += struct.pack('>L', self.acceptable_length) snep_request += str(ndef_message) if send_request(self.socket, snep_request, self.send_miu): response = recv_response( self.socket, self.acceptable_length, timeout) if response is not None: if response[1] != 0x81: raise SnepError(response[1]) return response[6:] finally: if self.release_connection: self.close()
java
public Observable<StreamingJobInner> getByResourceGroupAsync(String resourceGroupName, String jobName) { return getByResourceGroupWithServiceResponseAsync(resourceGroupName, jobName).map(new Func1<ServiceResponseWithHeaders<StreamingJobInner, StreamingJobsGetHeaders>, StreamingJobInner>() { @Override public StreamingJobInner call(ServiceResponseWithHeaders<StreamingJobInner, StreamingJobsGetHeaders> response) { return response.body(); } }); }
python
def autoconfig_url_from_preferences(): """ Get the PAC ``AutoConfigURL`` value from the macOS System Preferences. This setting is visible as the "URL" field in System Preferences > Network > Advanced... > Proxies > Automatic Proxy Configuration. :return: The value from the registry, or None if the value isn't configured or available. Note that it may be local filesystem path instead of a URL. :rtype: str|None :raises NotDarwinError: If called on a non-macOS/OSX platform. """ if not ON_DARWIN: raise NotDarwinError() try: config = SystemConfiguration.SCDynamicStoreCopyProxies(None) except AttributeError: return # Key or value not found. if all(('ProxyAutoConfigEnable' in config, 'ProxyAutoConfigURLString' in config, not config.get('ProxyAutoDiscoveryEnable', 0))): # Only return a value if it is enabled, not empty, and auto discovery is disabled. return str(config['ProxyAutoConfigURLString'])
python
def neg_loglikelihood(y, mean, scale, shape, skewness): """ Negative loglikelihood function for this distribution Parameters ---------- y : np.ndarray univariate time series mean : np.ndarray array of location parameters for the Cauchy distribution scale : float scale parameter for the Cauchy distribution shape : float tail thickness parameter for the Cauchy distribution skewness : float skewness parameter for the Cauchy distribution Returns ---------- - Negative loglikelihood of the Cauchy family """ return -np.sum(ss.cauchy.logpdf(y, loc=mean, scale=scale))
python
def set_primary_contact(self, email): """assigns the primary contact for this client""" params = {"email": email} response = self._put(self.uri_for('primarycontact'), params=params) return json_to_py(response)
python
def iquant(val, u=Ellipsis): ''' iquant(...) is equivalent to quant(...) except that the magnitude of the return value is always a read-only numpy array object. ''' if u is not Ellipsis and u is not None: u = unit(u) if is_quantity(val): uu = unit(val) if u is Ellipsis or u == uu: # no conversion necessary; might be able to reuse old array m = mag(val) mm = imm_array(m) if m is not mm or isinstance(val, tuple) or val._REGISTRY is not units: val = units.Quantity(mm, uu) return val else: # we convert to another type first, then make an imm array if isinstance(val, tuple) or val._REGISTRY is not units: val = units.Quantity(mag(val), uu) v = val.to(u) return units.Quantity(imm_array(v.m), v.u) else: return units.Quantity(imm_array(val), units.dimensionless if u is Ellipsis else unit(u))
python
def to_representation(self, value): """Convert to natural key.""" content_type = ContentType.objects.get_for_id(value) return "_".join(content_type.natural_key())
java
public static String getDate(int days, int months, int years, String format) { return getDate(days, months, years, format, Locale.ENGLISH); }
python
def download(): """ Download all files from an FTP share """ ftp = ftplib.FTP(SITE) ftp.set_debuglevel(DEBUG) ftp.login(USER, PASSWD) ftp.cwd(DIR) filelist = ftp.nlst() filecounter = MANAGER.counter(total=len(filelist), desc='Downloading', unit='files') for filename in filelist: with Writer(filename, ftp.size(filename), DEST) as writer: ftp.retrbinary('RETR %s' % filename, writer.write) print(filename) filecounter.update() ftp.close()
java
private long adjustForZoneAndDaylightSavingsTime( int tzMask, long utcTimeInMillis, TimeZone zone) { // The following don't actually need to be initialized because they are always set before // they are used but the compiler cannot detect that. int zoneOffset = 0; int dstOffset = 0; // If either of the ZONE_OFFSET or DST_OFFSET fields are not set then get the information // from the TimeZone. if (tzMask != (ZONE_OFFSET_MASK|DST_OFFSET_MASK)) { if (zoneOffsets == null) { zoneOffsets = new int[2]; } int gmtOffset = isFieldSet(tzMask, ZONE_OFFSET) ? internalGet(ZONE_OFFSET) : zone.getRawOffset(); // Calculate the standard time (no DST) in the supplied zone. This is a ballpark figure // and not used in the final calculation as the offset used here may not be the same as // the actual offset the time zone requires be used for this time. This is to handle // situations like Honolulu, where its raw offset changed from GMT-10:30 to GMT-10:00 // in 1947. The TimeZone always uses a raw offset of -10:00 but will return -10:30 // for dates before the change over. long standardTimeInZone = utcTimeInMillis - gmtOffset; // Retrieve the correct zone and DST offsets from the time zone. // J2ObjC modified: Use NativeTimeZone instead of ZoneInfo. if (zone instanceof NativeTimeZone) { ((NativeTimeZone) zone).getOffsetsByUtcTime(standardTimeInZone, zoneOffsets); } else { zone.getOffsets(standardTimeInZone, zoneOffsets); } zoneOffset = zoneOffsets[0]; dstOffset = zoneOffsets[1]; // If necessary adjust the DST offset to handle an invalid wall clock sensibly. dstOffset = adjustDstOffsetForInvalidWallClock(standardTimeInZone, zone, dstOffset); } // If either ZONE_OFFSET of DST_OFFSET fields are set then get the information from the // fields, potentially overriding information from the TimeZone. if (tzMask != 0) { if (isFieldSet(tzMask, ZONE_OFFSET)) { zoneOffset = internalGet(ZONE_OFFSET); } if (isFieldSet(tzMask, DST_OFFSET)) { dstOffset = internalGet(DST_OFFSET); } } // Adjust the time zone offset values to get the UTC time. long standardTimeInZone = utcTimeInMillis - zoneOffset; return standardTimeInZone - dstOffset; }
python
def p12d_local(vertices, lame, mu): """Local stiffness matrix for P1 elements in 2d.""" assert(vertices.shape == (3, 2)) A = np.vstack((np.ones((1, 3)), vertices.T)) PhiGrad = inv(A)[:, 1:] # gradients of basis functions R = np.zeros((3, 6)) R[[[0], [2]], [0, 2, 4]] = PhiGrad.T R[[[2], [1]], [1, 3, 5]] = PhiGrad.T C = mu*np.array([[2, 0, 0], [0, 2, 0], [0, 0, 1]]) +\ lame*np.array([[1, 1, 0], [1, 1, 0], [0, 0, 0]]) K = det(A)/2.0*np.dot(np.dot(R.T, C), R) return K
java
public static <T> T runWithSleepThenReturnValue(long milliseconds, ReturningRunnable<T> runnable) { Assert.isTrue(milliseconds > 0, "Milliseconds [%d] must be greater than 0", milliseconds); T returnValue = runnable.run(); ThreadUtils.sleep(milliseconds, 0); return returnValue; }
python
def get_user(self, user_id, expand=False): """Returns Hacker News `User` object. Fetches data from the url: https://hacker-news.firebaseio.com/v0/user/<user_id>.json e.g. https://hacker-news.firebaseio.com/v0/user/pg.json Args: user_id (string): unique user id of a Hacker News user. expand (bool): Flag to indicate whether to transform all IDs into objects. Returns: `User` object representing a user on Hacker News. Raises: InvalidUserID: If no such user exists on Hacker News. """ url = urljoin(self.user_url, F"{user_id}.json") response = self._get_sync(url) if not response: raise InvalidUserID user = User(response) if expand and user.submitted: items = self.get_items_by_ids(user.submitted) user_opt = { 'stories': 'story', 'comments': 'comment', 'jobs': 'job', 'polls': 'poll', 'pollopts': 'pollopt' } for key, value in user_opt.items(): setattr( user, key, [i for i in items if i.item_type == value] ) return user
python
def opt_pairwise(n_items, data, alpha=1e-6, method="Newton-CG", initial_params=None, max_iter=None, tol=1e-5): """Compute the ML estimate of model parameters using ``scipy.optimize``. This function computes the maximum-likelihood estimate of model parameters given pairwise-comparison data (see :ref:`data-pairwise`), using optimizers provided by the ``scipy.optimize`` module. If ``alpha > 0``, the function returns the maximum a-posteriori (MAP) estimate under an isotropic Gaussian prior with variance ``1 / alpha``. See :ref:`regularization` for details. Parameters ---------- n_items : int Number of distinct items. data : list of lists Pairwise-comparison data. alpha : float, optional Regularization strength. method : str, optional Optimization method. Either "BFGS" or "Newton-CG". initial_params : array_like, optional Parameters used to initialize the iterative procedure. max_iter : int, optional Maximum number of iterations allowed. tol : float, optional Tolerance for termination (method-specific). Returns ------- params : numpy.ndarray The (penalized) ML estimate of model parameters. Raises ------ ValueError If the method is not "BFGS" or "Newton-CG". """ fcts = PairwiseFcts(data, alpha) return _opt(n_items, fcts, method, initial_params, max_iter, tol)
java
public static THttpService of(Map<String, ?> implementations, SerializationFormat defaultSerializationFormat) { return new THttpService(ThriftCallService.of(implementations), newAllowedSerializationFormats(defaultSerializationFormat, ThriftSerializationFormats.values())); }
java
public static <T> Function<Object, T> getConverter(final Class<T> clazz) { return object -> Convert.convert(object, clazz); }
python
def render(self, request, **kwargs): """ Renders this view. Adds cancel_url to the context. If the request get parameters contains 'popup' then the `render_type` is set to 'popup'. """ if request.GET.get('popup'): self.render_type = 'popup' kwargs['popup'] = 1 kwargs['cancel_url'] = self.get_cancel_url() if not self.object: kwargs['single_title'] = True return super(FormView, self).render(request, **kwargs)
python
def oldest_peer(peers): """Determines who the oldest peer is by comparing unit numbers.""" local_unit_no = int(os.getenv('JUJU_UNIT_NAME').split('/')[1]) for peer in peers: remote_unit_no = int(peer.split('/')[1]) if remote_unit_no < local_unit_no: return False return True
java
public static INDArray im2col(INDArray img, int kh, int kw, int sy, int sx, int ph, int pw, int pval, boolean isSameMode) { INDArray output = null; if (isSameMode) { int oH = (int) Math.ceil(img.size(2) * 1.f / sy); int oW = (int) Math.ceil(img.size(3) * 1.f / sx); output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } else { // FIXME: int cast int oH = ((int) img.size(2) - (kh + (kh - 1) * (1 - 1)) + 2 * ph) / sy + 1; int oW = ((int) img.size(3) - (kw + (kw - 1) * (1 - 1)) + 2 * pw) / sx + 1; output = Nd4j.createUninitialized(img.dataType(), new long[]{img.size(0), img.size(1), kh, kw, oH, oW}, 'c'); } Im2col im2col = Im2col.builder() .inputArrays(new INDArray[]{img}) .outputs(new INDArray[]{output}) .conv2DConfig(Conv2DConfig.builder() .pW(pw) .pH(ph) .sH(sy) .sW(sx) .kW(kw) .kH(kh) .dW(1) .dH(1) .isSameMode(isSameMode) .build()).build(); Nd4j.getExecutioner().execAndReturn(im2col); return im2col.outputArguments()[0]; }
java
private void setSelectedNavDrawerItem(int itemId) { for(DrawerItem item: mDrawerItems){ formatNavDrawerItem(item, itemId == item.getId()); } }
java
public com.google.protobuf.ByteString getRuntimeBytes() { java.lang.Object ref = runtime_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); runtime_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } }
java
public static RoaringBitmap naive_or(RoaringBitmap... bitmaps) { RoaringBitmap answer = new RoaringBitmap(); for (int k = 0; k < bitmaps.length; ++k) { answer.naivelazyor(bitmaps[k]); } answer.repairAfterLazy(); return answer; }
python
def _raise_error_if_disconnected(self) -> None: """ See if we're still connected, and if not, raise ``SMTPServerDisconnected``. """ if ( self.transport is None or self.protocol is None or self.transport.is_closing() ): self.close() raise SMTPServerDisconnected("Disconnected from SMTP server")
java
public ListLayersResult withLayers(LayersListItem... layers) { if (this.layers == null) { setLayers(new com.amazonaws.internal.SdkInternalList<LayersListItem>(layers.length)); } for (LayersListItem ele : layers) { this.layers.add(ele); } return this; }
python
def _get_comparison_spec(pkgver): ''' Return a tuple containing the comparison operator and the version. If no comparison operator was passed, the comparison is assumed to be an "equals" comparison, and "==" will be the operator returned. ''' oper, verstr = salt.utils.pkg.split_comparison(pkgver.strip()) if oper in ('=', ''): oper = '==' return oper, verstr
python
def info(self, callback=None, **kwargs): """ Get the basic info from the current cluster. """ self.client.fetch( self.mk_req('', method='GET', **kwargs), callback = callback )
python
def _makeIndentAsColumn(self, block, column, offset=0): """ Make indent equal to column indent. Shiftted by offset """ blockText = block.text() textBeforeColumn = blockText[:column] tabCount = textBeforeColumn.count('\t') visibleColumn = column + (tabCount * (self._indenter.width - 1)) return self._makeIndentFromWidth(visibleColumn + offset)
java
public static YamlConfiguration loadYamlConfiguration(Resource resource) throws DeployerConfigurationException { try { try (Reader reader = new BufferedReader(new InputStreamReader(resource.getInputStream(), "UTF-8"))) { return doLoadYamlConfiguration(reader); } } catch (Exception e) { throw new DeployerConfigurationException("Failed to load YAML configuration at " + resource, e); } }
java
public boolean isFriend(Jid provisioningServer, BareJid friendInQuestion) throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException { LruCache<BareJid, Void> cache = negativeFriendshipRequestCache.lookup(provisioningServer); if (cache != null && cache.containsKey(friendInQuestion)) { // We hit a cached negative isFriend response for this provisioning server. return false; } IoTIsFriend iotIsFriend = new IoTIsFriend(friendInQuestion); iotIsFriend.setTo(provisioningServer); IoTIsFriendResponse response = connection().createStanzaCollectorAndSend(iotIsFriend).nextResultOrThrow(); assert (response.getJid().equals(friendInQuestion)); boolean isFriend = response.getIsFriendResult(); if (!isFriend) { // Cache the negative is friend response. if (cache == null) { cache = new LruCache<>(1024); negativeFriendshipRequestCache.put(provisioningServer, cache); } cache.put(friendInQuestion, null); } return isFriend; }
java
private static String trackingNumberURL(String carrier, String trackingNumber) throws AuthenticationException, InvalidRequestException, APIConnectionException, APIException { try { return String.format("%s/%s/%s", classURL(Track.class), urlEncode(carrier), urlEncode(trackingNumber)); } catch (UnsupportedEncodingException e) { throw new InvalidRequestException("Unable to encode parameters to " + CHARSET + ". Please contact support@goshippo.com for assistance.", null, e); } }
python
def __convert_booleans(self, eitem): """ Convert True/False to 1/0 for better kibana processing """ for field in eitem.keys(): if isinstance(eitem[field], bool): if eitem[field]: eitem[field] = 1 else: eitem[field] = 0 return eitem
python
def link(self): """Resolve and link all types in the scope.""" type_specs = {} types = [] for name, type_spec in self.scope.type_specs.items(): type_spec = type_spec.link(self.scope) type_specs[name] = type_spec if type_spec.surface is not None: self.scope.add_surface(name, type_spec.surface) types.append(type_spec.surface) self.scope.type_specs = type_specs self.scope.add_surface('__types__', tuple(types))
python
def _run_command(self, command_constructor, args): """ Run command_constructor and call run(args) on the resulting object :param command_constructor: class of an object that implements run(args) :param args: object arguments for specific command created by CommandParser """ verify_terminal_encoding(sys.stdout.encoding) self._check_pypi_version() config = create_config(allow_insecure_config_file=args.allow_insecure_config_file) self.show_error_stack_trace = config.debug_mode command = command_constructor(config) command.run(args)
python
def encodeGsm7(plaintext, discardInvalid=False): """ GSM-7 text encoding algorithm Encodes the specified text string into GSM-7 octets (characters). This method does not pack the characters into septets. :param text: the text string to encode :param discardInvalid: if True, characters that cannot be encoded will be silently discarded :raise ValueError: if the text string cannot be encoded using GSM-7 encoding (unless discardInvalid == True) :return: A bytearray containing the string encoded in GSM-7 encoding :rtype: bytearray """ result = bytearray() if PYTHON_VERSION >= 3: plaintext = str(plaintext) for char in plaintext: idx = GSM7_BASIC.find(char) if idx != -1: result.append(idx) elif char in GSM7_EXTENDED: result.append(0x1B) # ESC - switch to extended table result.append(ord(GSM7_EXTENDED[char])) elif not discardInvalid: raise ValueError('Cannot encode char "{0}" using GSM-7 encoding'.format(char)) return result
java
@Override public void saveCheckOutHistory() { if (ThreadCacheContext.exists()) { // e.g. in action checkingOutRequestPath = ThreadCacheContext.findRequestPath(); checkingOutEntryExp = convertMethodToMethodExp(ThreadCacheContext.findEntryMethod()); checkingOutUserExp = convertUserBeanToUserExp(ThreadCacheContext.findUserBean()); checkingOutMillis = currentTimeMillis(); } super.saveCheckOutHistory(); }
java
public void setXpgSize(Integer newXpgSize) { Integer oldXpgSize = xpgSize; xpgSize = newXpgSize; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.PGD__XPG_SIZE, oldXpgSize, xpgSize)); }
java
public MutableDateTime toMutableDateTime(DateTimeZone zone) { Chronology chrono = DateTimeUtils.getChronology(getChronology()); chrono = chrono.withZone(zone); return new MutableDateTime(getMillis(), chrono); }
java
private void populateActivityCodes(Task task) { List<Integer> list = m_activityCodeAssignments.get(task.getUniqueID()); if (list != null) { for (Integer id : list) { ActivityCodeValue value = m_activityCodeMap.get(id); if (value != null) { task.addActivityCode(value); } } } }
java
public Observable<ServiceResponse<Page<UsageInner>>> listWebWorkerUsagesNextWithServiceResponseAsync(final String nextPageLink) { return listWebWorkerUsagesNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<UsageInner>>, Observable<ServiceResponse<Page<UsageInner>>>>() { @Override public Observable<ServiceResponse<Page<UsageInner>>> call(ServiceResponse<Page<UsageInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listWebWorkerUsagesNextWithServiceResponseAsync(nextPageLink)); } }); }
java
public static double elementSumAbs( DMatrixD1 mat ) { double total = 0; int size = mat.getNumElements(); for( int i = 0; i < size; i++ ) { total += Math.abs(mat.get(i)); } return total; }
java
public void marshall(CommandPlugin commandPlugin, ProtocolMarshaller protocolMarshaller) { if (commandPlugin == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(commandPlugin.getName(), NAME_BINDING); protocolMarshaller.marshall(commandPlugin.getStatus(), STATUS_BINDING); protocolMarshaller.marshall(commandPlugin.getStatusDetails(), STATUSDETAILS_BINDING); protocolMarshaller.marshall(commandPlugin.getResponseCode(), RESPONSECODE_BINDING); protocolMarshaller.marshall(commandPlugin.getResponseStartDateTime(), RESPONSESTARTDATETIME_BINDING); protocolMarshaller.marshall(commandPlugin.getResponseFinishDateTime(), RESPONSEFINISHDATETIME_BINDING); protocolMarshaller.marshall(commandPlugin.getOutput(), OUTPUT_BINDING); protocolMarshaller.marshall(commandPlugin.getStandardOutputUrl(), STANDARDOUTPUTURL_BINDING); protocolMarshaller.marshall(commandPlugin.getStandardErrorUrl(), STANDARDERRORURL_BINDING); protocolMarshaller.marshall(commandPlugin.getOutputS3Region(), OUTPUTS3REGION_BINDING); protocolMarshaller.marshall(commandPlugin.getOutputS3BucketName(), OUTPUTS3BUCKETNAME_BINDING); protocolMarshaller.marshall(commandPlugin.getOutputS3KeyPrefix(), OUTPUTS3KEYPREFIX_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static String format(final String aMessage, final String... aDetails) { int position = 0; int count = 0; while ((position = aMessage.indexOf("{}", position)) != -1) { position += 1; count += 1; } if (count != aDetails.length) { throw new IndexOutOfBoundsException(LOGGER.getI18n(MessageCodes.UTIL_043, count, aDetails.length)); } final String[] parts = aMessage.split("\\{\\}"); final StringBuilder builder = new StringBuilder(); if (count == 1 && parts.length == 0) { builder.append(aDetails[0]); } else { for (int index = 0; index < parts.length; index++) { builder.append(parts[index]); if (index < aDetails.length) { builder.append(aDetails[index]); } } } return builder.length() == 0 ? aMessage : builder.toString(); }
java
public java.util.List<TriggerConfig> getTriggerConfigurations() { if (triggerConfigurations == null) { triggerConfigurations = new com.amazonaws.internal.SdkInternalList<TriggerConfig>(); } return triggerConfigurations; }
python
def register_hook(self, hook, event_type=None): """ If ``event_type`` is provided, then ``hook`` will be called whenever that event is fired. If no ``event_type`` is specifid, but ``hook`` implements any methods with names matching an event hook, then those will be registered with their corresponding events. This allows for more stateful, class-based event handlers. """ if event_type is not None: assert type(event_type) is int, "register hooks with int values" return self.hooks[event_type].append(hook) for event_type in EventType._fields: func = getattr(hook, event_type, None) if callable(func): event_value = getattr(EventType, event_type) self.register_hook(func, event_value)
java
public int setState(boolean state, boolean bDisplayOption, int moveMode) { double value = 0; if (state) value = 1; return this.setValue(value, bDisplayOption, moveMode); // Move value to this field }
python
def find_eq_stress(strains, stresses, tol=1e-10): """ Finds stress corresponding to zero strain state in stress-strain list Args: strains (Nx3x3 array-like): array corresponding to strains stresses (Nx3x3 array-like): array corresponding to stresses tol (float): tolerance to find zero strain state """ stress_array = np.array(stresses) strain_array = np.array(strains) eq_stress = stress_array[np.all(abs(strain_array)<tol, axis=(1,2))] if eq_stress.size != 0: all_same = (abs(eq_stress - eq_stress[0]) < 1e-8).all() if len(eq_stress) > 1 and not all_same: raise ValueError("Multiple stresses found for equilibrium strain" " state, please specify equilibrium stress or " " remove extraneous stresses.") eq_stress = eq_stress[0] else: warnings.warn("No eq state found, returning zero voigt stress") eq_stress = Stress(np.zeros((3, 3))) return eq_stress
java
public static VarBinding create( IVarDef varDef, VarValueDef valueDef) { VarBinding binding = valueDef.isNA() ? new VarNaBinding( varDef.getPathName(), varDef.getType()) : new VarBinding( varDef.getPathName(), varDef.getType(), valueDef.getName()); binding.setValueValid( valueDef.isValid()); binding.setVarDef( varDef); return binding; }
java
@Override public void writeTo(final OutputStream outputStream, final Formatter formatter) throws IllegalArgumentException { this.getArchive().writeTo(outputStream, formatter); }
java
@Override public ListStreamProcessorsResult listStreamProcessors(ListStreamProcessorsRequest request) { request = beforeClientExecution(request); return executeListStreamProcessors(request); }
java
public void marshall(GetAddressBookRequest getAddressBookRequest, ProtocolMarshaller protocolMarshaller) { if (getAddressBookRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getAddressBookRequest.getAddressBookArn(), ADDRESSBOOKARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def call_only_once(func): ''' To be used as a decorator @call_only_once def func(): print 'Calling func only this time' Actually, in PyDev it must be called as: func = call_only_once(func) to support older versions of Python. ''' def new_func(*args, **kwargs): if not new_func._called: new_func._called = True return func(*args, **kwargs) new_func._called = False return new_func
python
def stop(self): """Stop stream.""" if self.stream and self.stream.session.state != STATE_STOPPED: self.stream.stop()
java
public UpdateUserPoolClientRequest withAllowedOAuthFlows(OAuthFlowType... allowedOAuthFlows) { java.util.ArrayList<String> allowedOAuthFlowsCopy = new java.util.ArrayList<String>(allowedOAuthFlows.length); for (OAuthFlowType value : allowedOAuthFlows) { allowedOAuthFlowsCopy.add(value.toString()); } if (getAllowedOAuthFlows() == null) { setAllowedOAuthFlows(allowedOAuthFlowsCopy); } else { getAllowedOAuthFlows().addAll(allowedOAuthFlowsCopy); } return this; }
python
def evaluate_inline(self, groups): """Evaluate inline comments on their own lines.""" # Consecutive lines with only comments with same leading whitespace # will be captured as a single block. if self.lines: if ( self.group_comments and self.line_num == self.prev_line + 1 and groups['leading_space'] == self.leading ): self.line_comments[-1][0] += '\n' + groups['line'][2:].replace('\\\n', '') else: self.line_comments.append( [groups['line'][2:].replace('\\\n', ''), self.line_num, self.current_encoding] ) self.leading = groups['leading_space'] self.prev_line = self.line_num
python
def smooth(x0, rho, gamma, axis=0): """ Proximal operator for a smoothing function enforced via the discrete laplacian operator Notes ----- Currently only works with matrices (2-D arrays) as input Parameters ---------- x0 : array_like The starting or initial point used in the proximal update step rho : float Momentum parameter for the proximal step (larger value -> stays closer to x0) gamma : float A constant that weights how strongly to enforce the constraint Returns ------- theta : array_like The parameter vector found after running the proximal update step """ # Apply Laplacian smoothing n = x0.shape[axis] lap_op = spdiags([(2 + rho / gamma) * np.ones(n), -1 * np.ones(n), -1 * np.ones(n)], [0, -1, 1], n, n, format='csc') x_out = np.rollaxis(spsolve(gamma * lap_op, rho * np.rollaxis(x0, axis, 0)), axis, 0) return x_out
java
protected Set<Class<? extends S>> findInstanceAbleScript(Set<Class<? extends S>> scriptClazzs) throws InstantiationException, IllegalAccessException { Set<Class<? extends S>> result=new HashSet<>(); for (Class<? extends S> scriptClazz : scriptClazzs) { if(isInstanceAble(scriptClazz)) { result.add(scriptClazz); } } return result; }
python
def serve(args): """Start a server which will watch .md and .rst files for changes. If a md file changes, the Home Documentation is rebuilt. If a .rst file changes, the updated sphinx project is rebuilt Args: args (ArgumentParser): flags from the CLI """ # Sever's parameters port = args.serve_port or PORT host = "0.0.0.0" # Current working directory dir_path = Path().absolute() web_dir = dir_path / "site" # Update routes utils.set_routes() # Offline mode if args.offline: os.environ["MKINX_OFFLINE"] = "true" _ = subprocess.check_output("mkdocs build > /dev/null", shell=True) utils.make_offline() class MkinxHTTPHandler(SimpleHTTPRequestHandler): """Class routing urls (paths) to projects (resources) """ def translate_path(self, path): # default root -> cwd location = str(web_dir) route = location if len(path) != 0 and path != "/": for key, loc in utils.get_routes(): if path.startswith(key): location = loc path = path[len(key) :] break if location[-1] == "/" or not path or path[0] == "/": route = location + path else: route = location + "/" + path return route.split("?")[0] # Serve as deamon thread success = False count = 0 print("Waiting for server port...") try: while not success: try: httpd = socketserver.TCPServer((host, port), MkinxHTTPHandler) success = True except OSError: count += 1 finally: if not success and count > 20: s = "port {} seems occupied. Try with {} ? (y/n)" if "y" in input(s.format(port, port + 1)): port += 1 count = 0 else: print("You can specify a custom port with mkinx serve -s") return time.sleep(0.5) except KeyboardInterrupt: print("Aborting.") return httpd.allow_reuse_address = True print("\nServing at http://{}:{}\n".format(host, port)) thread = threading.Thread(target=httpd.serve_forever) thread.daemon = True thread.start() # Watch for changes event_handler = utils.MkinxFileHandler( patterns=["*.rst", "*.md", "*.yml", "*.yaml"] ) observer = Observer() observer.schedule(event_handler, path=str(dir_path), recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() httpd.server_close() observer.join()
python
async def parse_get_revoc_reg_def_response(get_revoc_ref_def_response: str) -> (str, str): """ Parse a GET_REVOC_REG_DEF response to get Revocation Registry Definition in the format compatible with Anoncreds API. :param get_revoc_ref_def_response: response of GET_REVOC_REG_DEF request. :return: Revocation Registry Definition Id and Revocation Registry Definition json. { "id": string - ID of the Revocation Registry, "revocDefType": string - Revocation Registry type (only CL_ACCUM is supported for now), "tag": string - Unique descriptive ID of the Registry, "credDefId": string - ID of the corresponding CredentialDefinition, "value": Registry-specific data { "issuanceType": string - Type of Issuance(ISSUANCE_BY_DEFAULT or ISSUANCE_ON_DEMAND), "maxCredNum": number - Maximum number of credentials the Registry can serve. "tailsHash": string - Hash of tails. "tailsLocation": string - Location of tails file. "publicKeys": <public_keys> - Registry's public key. }, "ver": string - version of revocation registry definition json. } """ logger = logging.getLogger(__name__) logger.debug("parse_get_revoc_reg_def_response: >>> get_revoc_ref_def_response: %r", get_revoc_ref_def_response) if not hasattr(parse_get_revoc_reg_def_response, "cb"): logger.debug("parse_get_revoc_reg_def_response: Creating callback") parse_get_revoc_reg_def_response.cb = create_cb(CFUNCTYPE(None, c_int32, c_int32, c_char_p, c_char_p)) c_get_revoc_ref_def_response = c_char_p(get_revoc_ref_def_response.encode('utf-8')) (revoc_reg_def_id, revoc_reg_def_json) = await do_call('indy_parse_get_revoc_reg_def_response', c_get_revoc_ref_def_response, parse_get_revoc_reg_def_response.cb) res = (revoc_reg_def_id.decode(), revoc_reg_def_json.decode()) logger.debug("parse_get_revoc_reg_def_response: <<< res: %r", res) return res
java
public byte[] preProcess(ClassLoader classLoader, String slashedClassName, ProtectionDomain protectionDomain, byte[] bytes) { if (disabled) { return bytes; } // TODO need configurable debug here, ability to dump any code before/after for (Plugin plugin : getGlobalPlugins()) { if (plugin instanceof LoadtimeInstrumentationPlugin) { LoadtimeInstrumentationPlugin loadtimeInstrumentationPlugin = (LoadtimeInstrumentationPlugin) plugin; if (loadtimeInstrumentationPlugin.accept(slashedClassName, classLoader, protectionDomain, bytes)) { bytes = loadtimeInstrumentationPlugin.modify(slashedClassName, classLoader, bytes); } } } tryToEnsureSystemClassesInitialized(slashedClassName); TypeRegistry typeRegistry = TypeRegistry.getTypeRegistryFor(classLoader); if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { logPreProcess(classLoader, slashedClassName, typeRegistry); } if (typeRegistry == null) { // A null type registry indicates nothing is being made reloadable for the classloader if (classLoader == null && slashedClassName != null) { // Indicates loading of a system class if (systemClassesContainingReflection.contains(slashedClassName)) { try { // TODO [perf] why are we not using the cache here, is it because the list is so short? RewriteResult rr = SystemClassReflectionRewriter.rewrite(slashedClassName, bytes); if (GlobalConfiguration.verboseMode && log.isLoggable(Level.INFO)) { log.info("System class rewritten: name=" + slashedClassName + " rewrite summary=" + rr.summarize()); } systemClassesRequiringInitialization.put(slashedClassName, rr.bits); return rr.bytes; } catch (Exception re) { re.printStackTrace(); } } else if (slashedClassName.equals("java/lang/invoke/InnerClassLambdaMetafactory")) { bytes = Java8.enhanceInnerClassLambdaMetaFactory(bytes); return bytes; } else if ((GlobalConfiguration.investigateSystemClassReflection || GlobalConfiguration.rewriteAllSystemClasses) && SystemClassReflectionInvestigator.investigate(slashedClassName, bytes, GlobalConfiguration.investigateSystemClassReflection) > 0) { // This block can help when you suspect there is a system class using reflection and that // class isn't on the 'shortlist' (in systemClassesContainingReflection). Basically turn on the // options to trigger this investigation then add them to the shortlist if it looks like they need rewriting. RewriteResult rr = SystemClassReflectionRewriter.rewrite(slashedClassName, bytes); if (GlobalConfiguration.rewriteAllSystemClasses) { systemClassesRequiringInitialization.put(slashedClassName, rr.bits); return rr.bytes; } else { System.err.println("Type " + slashedClassName + " rewrite summary: " + rr.summarize()); return bytes; } } } return bytes; } // What happens here? The aim is to determine if the type should be made reloadable. // 1. If NO, but something in this classloader might be, then rewrite the call sites. // 2. If NO, and nothing in this classloader might be, return the original bytes. // 3. If YES, make the type reloadable (including rewriting call sites) ReloadableTypeNameDecision isReloadableTypeName = typeRegistry.isReloadableTypeName(slashedClassName, protectionDomain, bytes); if (isReloadableTypeName.isReloadable && GlobalConfiguration.explainMode && log.isLoggable(Level.INFO)) { log.info("[explanation] Based on the name, type " + slashedClassName + " is considered to be reloadable"); } // logging causes a ClassCircularity problem when reporting on: // SL: Type 'org/codehaus/groovy/grails/cli/logging/GrailsConsolePrintStream' is not being made reloadable // if (GlobalConfiguration.verboseMode && isReloadableTypeName) { // Log.log("Type '"+slashedClassName+"' is preliminarily being considered a reloadable type"); // } if (isReloadableTypeName.isReloadable) { if (!firstReloadableTypeHit) { firstReloadableTypeHit = true; // TODO move into the ctor for ReloadableType so that it can't block loading tryToEnsureSystemClassesInitialized(slashedClassName); } if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { log.info("processing " + slashedClassName + " as a reloadable type"); } try { // TODO decide one way or the other on slashed/dotted from preprocessor to infrastructure String dottedClassName = slashedClassName.replace('/', '.'); String watchPath = getWatchPathFromProtectionDomain(protectionDomain, slashedClassName); if (watchPath == null) { // For a CGLIB generated type, we may still need to make the type reloadable. For example: // type: com/vmware/rabbit/ApplicationContext$$EnhancerByCGLIB$$512eb60c // codesource determined to be: file:/Users/aclement/springsource/tc-server-developer-2.1.1.RELEASE/spring-insight-instance/wtpwebapps/hello-rabbit-client/WEB-INF/lib/cglib-nodep-2.2.jar <no signer certificates> // But if the type 'com/vmware/rabbit/ApplicationContext' is reloadable, then this should be too boolean makeReloadableAnyway = false; int cglibIndex = slashedClassName.indexOf("$$EnhancerBy"); if (cglibIndex != -1) { String originalType = slashedClassName.substring(0, cglibIndex); if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { log.info("Appears to be a CGLIB type, checking if type " + originalType + " is reloadable"); } TypeRegistry currentRegistry = typeRegistry; while (currentRegistry != null) { ReloadableType originalReloadable = currentRegistry.getReloadableType(originalType); if (originalReloadable != null) { makeReloadableAnyway = true; break; } currentRegistry = currentRegistry.getParentRegistry(); } // if (typeRegistry.isReloadableTypeName(originalType)) { // if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { // log.info("Type " + originalType + " is reloadable, so making CGLIB type " + slashedClassName // + " reloadable"); // } // makeReloadableAnyway = true; // } } int cglibIndex2 = makeReloadableAnyway ? -1 : slashedClassName.indexOf("$$FastClassByCGLIB"); if (cglibIndex2 != -1) { String originalType = slashedClassName.substring(0, cglibIndex2); if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { log.info("Appears to be a CGLIB FastClass type, checking if type " + originalType + " is reloadable"); } TypeRegistry currentRegistry = typeRegistry; while (currentRegistry != null) { ReloadableType originalReloadable = currentRegistry.getReloadableType(originalType); if (originalReloadable != null) { makeReloadableAnyway = true; break; } currentRegistry = currentRegistry.getParentRegistry(); } // if (typeRegistry.isReloadableTypeName(originalType)) { // if (GlobalConfiguration.isRuntimeLogging && log.isLoggable(Level.INFO)) { // log.info("Type " + originalType + " is reloadable, so making CGLIB type " + slashedClassName // + " reloadable"); // } // makeReloadableAnyway = true; // } } int proxyIndex = makeReloadableAnyway ? -1 : slashedClassName.indexOf("$Proxy"); if (proxyIndex == 0 || (proxyIndex > 0 && slashedClassName.charAt(proxyIndex - 1) == '/')) { // Determine if the interfaces being implemented are reloadable String[] interfacesImplemented = Utils.discoverInterfaces(bytes); if (interfacesImplemented != null) { for (int i = 0; i < interfacesImplemented.length; i++) { TypeRegistry currentRegistry = typeRegistry; while (currentRegistry != null) { ReloadableType originalReloadable = currentRegistry.getReloadableType( interfacesImplemented[i]); if (originalReloadable != null) { makeReloadableAnyway = true; break; } currentRegistry = currentRegistry.getParentRegistry(); } // if (typeRegistry.isReloadableTypeName(interfacesImplemented[i])) { // makeReloadableAnyway = true; // } } } } // GRAILS-8098 // The scaffolding loader will load stuff in this innerloader - if we don't make the types in it reloadable then they will clash // with the original (ordinary version) controller loaded by URLClassLoader (e.g. in an istcheck for some type we will // not find it in the InnerClassLoader, but find it in the super classloader, and it'll be the wrong one). // I wonder if the more general rule should be that // all classloaders below one loading reloadable stuff should also load reloadable stuff. if (!makeReloadableAnyway && classLoader.getClass().getName().endsWith("GroovyClassLoader$InnerLoader")) { makeReloadableAnyway = true; } if (!makeReloadableAnyway) { // can't watch it for updates (it comes from a jar perhaps) so just rewrite call sites and return // Not planning to watch this class so ordinarily do not make it reloadable. UNLESS the user // is specifying that it needs to be. This may happen with split packages - some classes in a jar // and some on disk. During type rewriting the top most reloadable types get fields inserted - // when split across jars we get confused by split packages. If we go by name (as the code does // right now) then we think we aren't the top most reloadable type but we don't realize that the // type above us comes from a jar. Hence this condition below. If the user did explicitly // specify types with this kind of name should be made reloadable we even make the ones from // the jar reloadable. (TODO: optimization, make a smarter isTopMostReloadableType test that // allows us to keep the jar loaded types as non reloadable). // if (isReloadableTypeName.extraInfo && isReloadableTypeName.explicitlyIncluded // && !GlobalConfiguration.InTestMode) { // // } // else { if (GlobalConfiguration.verboseMode) { Log.log("Cannot watch " + slashedClassName + ": not making it reloadable"); } if (needsClientSideRewriting(slashedClassName)) { bytes = typeRegistry.methodCallRewriteUseCacheIfAvailable(slashedClassName, bytes); } return bytes; // } } } ReloadableType rtype = typeRegistry.addType(dottedClassName, bytes); if (rtype == null && GlobalConfiguration.callsideRewritingOn) { // it is not a candidate for being made reloadable (maybe it is an annotation type) // but we still need to rewrite call sites. bytes = typeRegistry.methodCallRewrite(bytes); } else { if (GlobalConfiguration.fileSystemMonitoring && watchPath != null) { typeRegistry.monitorForUpdates(rtype, watchPath); } return rtype.bytesLoaded; } } catch (RuntimeException re) { log.throwing("SpringLoadedPreProcessor", "preProcess", re); throw re; } } else { try { // TODO what happens across classloader boundaries? (for regular code and reflective calls) // Skipping the CallSiteClassLoader here because types from there will already have been dealt // with due to GroovyPlugin class that intercepts define in that infrastructure if (needsClientSideRewriting(slashedClassName) && (classLoader == null || !classLoader.getClass().getName().equals( "org.codehaus.groovy.runtime.callsite.CallSiteClassLoader"))) { bytes = typeRegistry.methodCallRewriteUseCacheIfAvailable(slashedClassName, bytes); } } catch (Throwable t) { log.log(Level.SEVERE, "Unexpected problem transforming call sites", t); } } return bytes; }
python
def to_string(self, obj): """ Converts the given resource to a string representation and returns it. """ stream = NativeIO() self.to_stream(obj, stream) return text_(stream.getvalue(), encoding=self.encoding)
java
public boolean isIdentical(T a, double tol) { if( a.getType() != getType() ) return false; return ops.isIdentical(mat,a.mat,tol); }
java
public void marshall(DisableDomainAutoRenewRequest disableDomainAutoRenewRequest, ProtocolMarshaller protocolMarshaller) { if (disableDomainAutoRenewRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(disableDomainAutoRenewRequest.getDomainName(), DOMAINNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public static int asInteger(Object value) { if (value instanceof Number) { return ((Number) value).intValue(); } else if (value instanceof Numeric) { return ((Numeric) value).asInteger(); } else if (value instanceof Boolean) { return ((Boolean) value) ? 1 : 0; } else if (value instanceof CharSequence) { try { return Integer.parseInt(value.toString()); } catch (NumberFormatException nfe) { throw new IncompatibleValueException( "Unable to parse string \"" + Strings.escape(value.toString()) + "\" to an int", nfe); } } else if (value instanceof Date) { // Convert date timestamp to seconds since epoch. return (int) (((Date) value).getTime() / 1000); } throw new IncompatibleValueException("Unable to convert " + value.getClass().getSimpleName() + " to an int"); }
java
private static void setCampaignTargetingCriteria( Campaign campaign, AdWordsServicesInterface adWordsServices, AdWordsSession session) throws ApiException, RemoteException { // Get the CampaignCriterionService. CampaignCriterionServiceInterface campaignCriterionService = adWordsServices.get(session, CampaignCriterionServiceInterface.class); // Create locations. The IDs can be found in the documentation or // retrieved with the LocationCriterionService. Location california = new Location(); california.setId(21137L); Location mexico = new Location(); mexico.setId(2484L); // Create languages. The IDs can be found in the documentation or // retrieved with the ConstantDataService. Language english = new Language(); english.setId(1000L); Language spanish = new Language(); spanish.setId(1003L); List<Criterion> criteria = new ArrayList<>(Arrays.asList(california, mexico, english, spanish)); // Create operations to add each of the criteria above. List<CampaignCriterionOperation> operations = new ArrayList<>(); for (Criterion criterion : criteria) { CampaignCriterionOperation operation = new CampaignCriterionOperation(); CampaignCriterion campaignCriterion = new CampaignCriterion(); campaignCriterion.setCampaignId(campaign.getId()); campaignCriterion.setCriterion(criterion); operation.setOperand(campaignCriterion); operation.setOperator(Operator.ADD); operations.add(operation); } // Set the campaign targets. CampaignCriterionReturnValue returnValue = campaignCriterionService.mutate( operations.toArray(new CampaignCriterionOperation[operations.size()])); if (returnValue != null && returnValue.getValue() != null) { // Display added campaign targets. for (CampaignCriterion campaignCriterion : returnValue.getValue()) { System.out.printf( "Campaign criteria of type '%s' and ID %d was added.%n", campaignCriterion.getCriterion().getCriterionType(), campaignCriterion.getCriterion().getId()); } } }
python
def getAllChannelsAsPolygons(self, maptype=None): """Return slew the telescope and return the corners of the modules as Polygon objects. If a projection is supplied, the ras and decs are mapped onto x, y using that projection """ polyList = [] for ch in self.origin[:, 2]: poly = self.getChannelAsPolygon(ch, maptype) polyList.append(poly) return polyList
python
def COH(self): """Coherence. .. math:: \mathrm{COH}_{ij}(f) = \\frac{S_{ij}(f)} {\sqrt{S_{ii}(f) S_{jj}(f)}} References ---------- P. L. Nunez, R. Srinivasan, A. F. Westdorp, R. S. Wijesinghe, D. M. Tucker, R. B. Silverstein, P. J. Cadusch. EEG coherency I: statistics, reference electrode, volume conduction, Laplacians, cortical imaging, and interpretation at multiple scales. Electroenceph. Clin. Neurophysiol. 103(5): 499-515, 1997. """ S = self.S() # TODO: can we do that more efficiently? return S / np.sqrt(np.einsum('ii..., jj... ->ij...', S, S.conj()))
java
public Matrix4d set(Matrix3dc mat) { m00 = mat.m00(); m01 = mat.m01(); m02 = mat.m02(); m03 = 0.0; m10 = mat.m10(); m11 = mat.m11(); m12 = mat.m12(); m13 = 0.0; m20 = mat.m20(); m21 = mat.m21(); m22 = mat.m22(); m23 = 0.0; m30 = 0.0; m31 = 0.0; m32 = 0.0; m33 = 1.0; properties = PROPERTY_AFFINE; return this; }
python
def AnsiText(text, command_list=None, reset=True): """Wrap text in ANSI/SGR escape codes. Args: text: String to encase in sgr escape sequence. command_list: List of strings, each string represents an sgr value. e.g. 'fg_blue', 'bg_yellow' reset: Boolean, if to add a reset sequence to the suffix of the text. Returns: String with sgr characters added. """ command_list = command_list or ['reset'] if reset: return '%s%s%s' % (_AnsiCmd(command_list), text, _AnsiCmd(['reset'])) else: return '%s%s' % (_AnsiCmd(command_list), text)
python
def add_job(session, command_line, name = 'job', dependencies = [], array = None, exec_dir=None, log_dir = None, stop_on_failure = False, **kwargs): """Helper function to create a job, add the dependencies and the array jobs.""" job = Job(command_line=command_line, name=name, exec_dir=exec_dir, log_dir=log_dir, array_string=array, stop_on_failure=stop_on_failure, kwargs=kwargs) session.add(job) session.flush() session.refresh(job) # by default id and unique id are identical, but the id might be overwritten later on job.id = job.unique for d in dependencies: if d == job.unique: logger.warn("Adding self-dependency of job %d is not allowed" % d) continue depending = list(session.query(Job).filter(Job.unique == d)) if len(depending): session.add(JobDependence(job.unique, depending[0].unique)) else: logger.warn("Could not find dependent job with id %d in database" % d) if array: (start, stop, step) = array # add array jobs for i in range(start, stop+1, step): session.add(ArrayJob(i, job.unique)) session.commit() return job
java
public File getBackupFile(String path) { String rootDir = tempDir + JawrConstant.SPRITE_BACKUP_GENERATED_CSS_DIR; String fPath = null; if (jawrConfig.isWorkingDirectoryInWebApp()) { fPath = jawrConfig.getContext().getRealPath(rootDir + getCssPath(path)); } else { fPath = rootDir + getCssPath(path); } return new File(fPath); }
java
public static CommercePriceEntry fetchByC_C(long commercePriceListId, String CPInstanceUuid, boolean retrieveFromCache) { return getPersistence() .fetchByC_C(commercePriceListId, CPInstanceUuid, retrieveFromCache); }
python
def get_api(version: str, ui_version: str=None) -> API_1: """Get a versioned interface matching the given version and ui_version. version is a string in the form "1.0.2". """ ui_version = ui_version if ui_version else "~1.0" return _get_api_with_app(version, ui_version, ApplicationModule.app)
java
@Nullable public static String getFromLastExcl (@Nullable final String sStr, final char cSearch) { return _getFromLast (sStr, cSearch, false); }
python
def _set_class_(self, v, load=False): """ Setter method for class_, mapped from YANG variable /logging/auditlog/class (list) If this variable is read-only (config: false) in the source YANG file, then _set_class_ is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_class_() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("class_",class_.class_, yang_name="class", rest_name="class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='class', extensions={u'tailf-common': {u'info': u'Configure auditlog classes', u'cli-suppress-mode': None, u'callpoint': u'RASAuditCallPoint'}}), is_container='list', yang_name="class", rest_name="class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure auditlog classes', u'cli-suppress-mode': None, u'callpoint': u'RASAuditCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ras', defining_module='brocade-ras', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """class_ must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("class_",class_.class_, yang_name="class", rest_name="class", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='class', extensions={u'tailf-common': {u'info': u'Configure auditlog classes', u'cli-suppress-mode': None, u'callpoint': u'RASAuditCallPoint'}}), is_container='list', yang_name="class", rest_name="class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure auditlog classes', u'cli-suppress-mode': None, u'callpoint': u'RASAuditCallPoint'}}, namespace='urn:brocade.com:mgmt:brocade-ras', defining_module='brocade-ras', yang_type='list', is_config=True)""", }) self.__class_ = t if hasattr(self, '_set'): self._set()
java
public ReservedInstancesModification withReservedInstancesIds(ReservedInstancesId... reservedInstancesIds) { if (this.reservedInstancesIds == null) { setReservedInstancesIds(new com.amazonaws.internal.SdkInternalList<ReservedInstancesId>(reservedInstancesIds.length)); } for (ReservedInstancesId ele : reservedInstancesIds) { this.reservedInstancesIds.add(ele); } return this; }
python
def get_key_pair(self, keyname): """ Convenience method to retrieve a specific keypair (KeyPair). :type image_id: string :param image_id: the ID of the Image to retrieve :rtype: :class:`boto.ec2.keypair.KeyPair` :return: The KeyPair specified or None if it is not found """ try: return self.get_all_key_pairs(keynames=[keyname])[0] except self.ResponseError, e: if e.code == 'InvalidKeyPair.NotFound': return None else: raise
python
def from_dict(cls, d): """Instantiate a SemI from a dictionary representation.""" read = lambda cls: (lambda pair: (pair[0], cls.from_dict(pair[1]))) return cls( variables=map(read(Variable), d.get('variables', {}).items()), properties=map(read(Property), d.get('properties', {}).items()), roles=map(read(Role), d.get('roles', {}).items()), predicates=map(read(Predicate), d.get('predicates', {}).items()) )
python
def extend(self, other): """ extend signal with samples from another signal Parameters ---------- other : Signal Returns ------- signal : Signal new extended *Signal* """ if len(self.timestamps): last_stamp = self.timestamps[-1] else: last_stamp = 0 if len(other): other_first_sample = other.timestamps[0] if last_stamp >= other_first_sample: timestamps = other.timestamps + last_stamp else: timestamps = other.timestamps if self.invalidation_bits is None and other.invalidation_bits is None: invalidation_bits = None elif self.invalidation_bits is None and other.invalidation_bits is not None: invalidation_bits = np.concatenate( (np.zeros(len(self), dtype=bool), other.invalidation_bits) ) elif self.invalidation_bits is not None and other.invalidation_bits is None: invalidation_bits = np.concatenate( (self.invalidation_bits, np.zeros(len(other), dtype=bool)) ) else: invalidation_bits = np.append( self.invalidation_bits, other.invalidation_bits ) result = Signal( np.append(self.samples, other.samples, axis=0), np.append(self.timestamps, timestamps), self.unit, self.name, self.conversion, self.comment, self.raw, self.master_metadata, self.display_name, self.attachment, self.source, self.bit_count, self.stream_sync, invalidation_bits=invalidation_bits, encoding=self.encoding, ) else: result = self return result
java
public List<E> createAll(AnnotatedElement element) { List<E> result = new ArrayList<>(); for (Annotation annotation : element.getAnnotations()) { create(annotation).ifPresent(result::add); } return result; }
python
def create_job_id(self, data): """ Create a new job id and reference (refs/aetros/job/<id>) by creating a new commit with empty tree. That root commit is the actual job id. A reference is then created to the newest (head) commit of this commit history. The reference will always be updated once a new commit is added. """ self.add_file('aetros/job.json', simplejson.dumps(data, indent=4)) tree_id = self.write_tree() self.job_id = self.command_exec(['commit-tree', '-m', "JOB_CREATED", tree_id])[0].decode('utf-8').strip() out, code, err = self.command_exec(['show-ref', self.ref_head], allowed_to_fail=True) if not code: self.logger.warning("Generated job id already exists, because exact same experiment values given. Ref " + self.ref_head) self.command_exec(['update-ref', self.ref_head, self.job_id]) # make sure we have checkedout all files we have added until now. Important for simple models, so we have the # actual model.py and dataset scripts. if not os.path.exists(self.work_tree): os.makedirs(self.work_tree) # updates index and working tree # '--', '.' is important to not update HEAD self.command_exec(['--work-tree', self.work_tree, 'checkout', self.ref_head, '--', '.']) # every caller needs to make sure to call git.push return self.job_id
java
public void clearResults(long sequence) { if (sequence > commandLowWaterMark) { for (long i = commandLowWaterMark + 1; i <= sequence; i++) { results.remove(i); commandLowWaterMark = i; } } }
java
private static Properties parseArguments(String agentArgument, String separator) { Properties p = new Properties(); try { String argumentAsLines = agentArgument.replaceAll(separator, "\n"); p.load(new ByteArrayInputStream(argumentAsLines.getBytes())); } catch (IOException e) { String s = "Could not load arguments as properties"; throw new RuntimeException(s, e); } return p; }
python
def availability_set_absent(name, resource_group, connection_auth=None): ''' .. versionadded:: 2019.2.0 Ensure an availability set does not exist in a resource group. :param name: Name of the availability set. :param resource_group: Name of the resource group containing the availability set. :param connection_auth: A dict with subscription and authentication parameters to be used in connecting to the Azure Resource Manager API. ''' ret = { 'name': name, 'result': False, 'comment': '', 'changes': {} } if not isinstance(connection_auth, dict): ret['comment'] = 'Connection information must be specified via connection_auth dictionary!' return ret aset = __salt__['azurearm_compute.availability_set_get']( name, resource_group, azurearm_log_level='info', **connection_auth ) if 'error' in aset: ret['result'] = True ret['comment'] = 'Availability set {0} was not found.'.format(name) return ret elif __opts__['test']: ret['comment'] = 'Availability set {0} would be deleted.'.format(name) ret['result'] = None ret['changes'] = { 'old': aset, 'new': {}, } return ret deleted = __salt__['azurearm_compute.availability_set_delete'](name, resource_group, **connection_auth) if deleted: ret['result'] = True ret['comment'] = 'Availability set {0} has been deleted.'.format(name) ret['changes'] = { 'old': aset, 'new': {} } return ret ret['comment'] = 'Failed to delete availability set {0}!'.format(name) return ret
java
@Override public Request<DisassociateTransitGatewayRouteTableRequest> getDryRunRequest() { Request<DisassociateTransitGatewayRouteTableRequest> request = new DisassociateTransitGatewayRouteTableRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; }
python
def _from_python_type(self, obj, field, pytype): """Get schema definition from python type.""" json_schema = { 'title': field.attribute or field.name, } for key, val in TYPE_MAP[pytype].items(): json_schema[key] = val if field.dump_only: json_schema['readonly'] = True if field.default is not missing: json_schema['default'] = field.default # NOTE: doubled up to maintain backwards compatibility metadata = field.metadata.get('metadata', {}) metadata.update(field.metadata) for md_key, md_val in metadata.items(): if md_key == 'metadata': continue json_schema[md_key] = md_val if isinstance(field, fields.List): json_schema['items'] = self._get_schema_for_field( obj, field.container ) return json_schema
python
def SRLS(anchors, w, r2, rescale=False, z=None, print_out=False): '''Squared range least squares (SRLS) Algorithm written by A.Beck, P.Stoica in "Approximate and Exact solutions of Source Localization Problems". :param anchors: anchor points (Nxd) :param w: weights for the measurements (Nx1) :param r2: squared distances from anchors to point x. (Nx1) :param rescale: Optional parameter. When set to True, the algorithm will also identify if there is a global scaling of the measurements. Such a situation arise for example when the measurement units of the distance is unknown and different from that of the anchors locations (e.g. anchors are in meters, distance in centimeters). :param z: Optional parameter. Use to fix the z-coordinate of localized point. :param print_out: Optional parameter, prints extra information. :return: estimated position of point x. ''' def y_hat(_lambda): lhs = ATA + _lambda * D assert A.shape[0] == b.shape[0] assert A.shape[1] == f.shape[0], 'A {}, f {}'.format(A.shape, f.shape) rhs = (np.dot(A.T, b) - _lambda * f).reshape((-1,)) assert lhs.shape[0] == rhs.shape[0], 'lhs {}, rhs {}'.format( lhs.shape, rhs.shape) try: return np.linalg.solve(lhs, rhs) except: return np.zeros((lhs.shape[1],)) def phi(_lambda): yhat = y_hat(_lambda).reshape((-1, 1)) sol = np.dot(yhat.T, np.dot(D, yhat)) + 2 * np.dot(f.T, yhat) return sol.flatten() def phi_prime(_lambda): # TODO: test this. B = np.linalg.inv(ATA + _lambda * D) C = A.T.dot(b) - _lambda*f y_prime = -B.dot(D.dot(B.dot(C)) - f) y = y_hat(_lambda) return 2*y.T.dot(D).dot(y_prime) + 2*f.T.dot(y_prime) from scipy import optimize from scipy.linalg import sqrtm n, d = anchors.shape assert r2.shape[1] == 1 and r2.shape[0] == n, 'r2 has to be of shape Nx1' assert w.shape[1] == 1 and w.shape[0] == n, 'w has to be of shape Nx1' if z is not None: assert d == 3, 'Dimension of problem has to be 3 for fixing z.' if rescale and z is not None: raise NotImplementedError('Cannot run rescale for fixed z.') if rescale and n < d + 2: raise ValueError( 'A minimum of d + 2 ranges are necessary for rescaled ranging.') elif n < d + 1 and z is None: raise ValueError( 'A minimum of d + 1 ranges are necessary for ranging.') elif n < d: raise ValueError( 'A minimum of d ranges are necessary for ranging.') Sigma = np.diagflat(np.power(w, 0.5)) if rescale: A = np.c_[-2 * anchors, np.ones((n, 1)), -r2] else: if z is None: A = np.c_[-2 * anchors, np.ones((n, 1))] else: A = np.c_[-2 * anchors[:, :2], np.ones((n, 1))] A = Sigma.dot(A) if rescale: b = - np.power(np.linalg.norm(anchors, axis=1), 2).reshape(r2.shape) else: b = r2 - np.power(np.linalg.norm(anchors, axis=1), 2).reshape(r2.shape) if z is not None: b = b + 2 * anchors[:, 2].reshape((-1, 1)) * z - z**2 b = Sigma.dot(b) ATA = np.dot(A.T, A) if rescale: D = np.zeros((d + 2, d + 2)) D[:d, :d] = np.eye(d) else: if z is None: D = np.zeros((d + 1, d + 1)) else: D = np.zeros((d, d)) D[:-1, :-1] = np.eye(D.shape[0]-1) if rescale: f = np.c_[np.zeros((1, d)), -0.5, 0.].T elif z is None: f = np.c_[np.zeros((1, d)), -0.5].T else: f = np.c_[np.zeros((1, 2)), -0.5].T eig = np.sort(np.real(eigvalsh(a=D, b=ATA))) if (print_out): print('ATA:', ATA) print('rank:', np.linalg.matrix_rank(A)) print('eigvals:', eigvals(ATA)) print('condition number:', np.linalg.cond(ATA)) print('generalized eigenvalues:', eig) #eps = 0.01 if eig[-1] > 1e-10: lower_bound = - 1.0 / eig[-1] else: print('Warning: biggest eigenvalue is zero!') lower_bound = -1e-5 inf = 1e5 xtol = 1e-12 lambda_opt = 0 # We will look for the zero of phi between lower_bound and inf. # Therefore, the two have to be of different signs. if (phi(lower_bound) > 0) and (phi(inf) < 0): # brentq is considered the best rootfinding routine. try: lambda_opt = optimize.brentq(phi, lower_bound, inf, xtol=xtol) except: print('SRLS error: brentq did not converge even though we found an estimate for lower and upper bonud. Setting lambda to 0.') else: try: lambda_opt = optimize.newton(phi, lower_bound, fprime=phi_prime, maxiter=1000, tol=xtol, verbose=True) assert phi(lambda_opt) < xtol, 'did not find solution of phi(lambda)=0:={}'.format(phi(lambda_opt)) except: print('SRLS error: newton did not converge. Setting lambda to 0.') if (print_out): print('phi(lower_bound)', phi(lower_bound)) print('phi(inf)', phi(inf)) print('phi(lambda_opt)', phi(lambda_opt)) pos_definite = ATA + lambda_opt*D eig = np.sort(np.real(eigvals(pos_definite))) print('should be strictly bigger than 0:', eig) # Compute best estimate yhat = y_hat(lambda_opt) if print_out and rescale: print('Scaling factor :', yhat[-1]) if rescale: return yhat[:d], yhat[-1] elif z is None: return yhat[:d] else: return np.r_[yhat[0], yhat[1], z]
python
def all(self, paths, access=None, recursion=False): """ Iterates over `paths` (which may consist of files and/or directories). Removes duplicates and returns list of valid paths meeting access criteria. """ self.__init__() self.access = access self.filetype = 'all' self.paths = paths self.recursion = recursion return _sorter(self._generator_other())
java
public void setProxyport(Object oProxyport) throws PageException { if (StringUtil.isEmpty(oProxyport)) return; this.proxyport = Caster.toIntValue(oProxyport); }
python
def generate(env): """Add Builders and construction variables for Microsoft Visual Studio project files to an Environment.""" try: env['BUILDERS']['MSVSProject'] except KeyError: env['BUILDERS']['MSVSProject'] = projectBuilder try: env['BUILDERS']['MSVSSolution'] except KeyError: env['BUILDERS']['MSVSSolution'] = solutionBuilder env['MSVSPROJECTCOM'] = projectAction env['MSVSSOLUTIONCOM'] = solutionAction if SCons.Script.call_stack: # XXX Need to find a way to abstract this; the build engine # shouldn't depend on anything in SCons.Script. env['MSVSSCONSCRIPT'] = SCons.Script.call_stack[0].sconscript else: global default_MSVS_SConscript if default_MSVS_SConscript is None: default_MSVS_SConscript = env.File('SConstruct') env['MSVSSCONSCRIPT'] = default_MSVS_SConscript env['MSVSSCONS'] = '"%s" -c "%s"' % (python_executable, getExecScriptMain(env)) env['MSVSSCONSFLAGS'] = '-C "${MSVSSCONSCRIPT.dir.get_abspath()}" -f ${MSVSSCONSCRIPT.name}' env['MSVSSCONSCOM'] = '$MSVSSCONS $MSVSSCONSFLAGS' env['MSVSBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env['MSVSREBUILDCOM'] = '$MSVSSCONSCOM "$MSVSBUILDTARGET"' env['MSVSCLEANCOM'] = '$MSVSSCONSCOM -c "$MSVSBUILDTARGET"' # Set-up ms tools paths for default version msvc_setup_env_once(env) if 'MSVS_VERSION' in env: version_num, suite = msvs_parse_version(env['MSVS_VERSION']) else: (version_num, suite) = (7.0, None) # guess at a default if 'MSVS' not in env: env['MSVS'] = {} if (version_num < 7.0): env['MSVS']['PROJECTSUFFIX'] = '.dsp' env['MSVS']['SOLUTIONSUFFIX'] = '.dsw' elif (version_num < 10.0): env['MSVS']['PROJECTSUFFIX'] = '.vcproj' env['MSVS']['SOLUTIONSUFFIX'] = '.sln' else: env['MSVS']['PROJECTSUFFIX'] = '.vcxproj' env['MSVS']['SOLUTIONSUFFIX'] = '.sln' if (version_num >= 10.0): env['MSVSENCODING'] = 'utf-8' else: env['MSVSENCODING'] = 'Windows-1252' env['GET_MSVSPROJECTSUFFIX'] = GetMSVSProjectSuffix env['GET_MSVSSOLUTIONSUFFIX'] = GetMSVSSolutionSuffix env['MSVSPROJECTSUFFIX'] = '${GET_MSVSPROJECTSUFFIX}' env['MSVSSOLUTIONSUFFIX'] = '${GET_MSVSSOLUTIONSUFFIX}' env['SCONS_HOME'] = os.environ.get('SCONS_HOME')