language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def getVMstats(self): """Return stats for Virtual Memory Subsystem. @return: Dictionary of stats. """ info_dict = {} try: fp = open(vmstatFile, 'r') data = fp.read() fp.close() except: raise IOError('Failed reading stats from file: %s' % vmstatFile) for line in data.splitlines(): cols = line.split() if len(cols) == 2: info_dict[cols[0]] = cols[1] return info_dict
python
def _update_bordercolor(self, bordercolor): """Updates background color""" border_color = wx.SystemSettings_GetColour(wx.SYS_COLOUR_ACTIVEBORDER) border_color.SetRGB(bordercolor) self.linecolor_choice.SetColour(border_color)
java
public boolean couldContainAnnotationsOnClassDef(DataInput in, Set<String> byteCodeAnnotationsNames) throws IOException { /* According to Java VM Spec, each .class file contains * a single class or interface definition. The structure * definition is shown below: ClassFile { u4 magic; u2 minor_version; u2 major_version; u2 constant_pool_count; cp_info constant_pool[constant_pool_count-1]; u2 access_flags; u2 this_class; u2 super_class; u2 interfaces_count; u2 interfaces[interfaces_count]; u2 fields_count; field_info fields[fields_count]; u2 methods_count; method_info methods[methods_count]; u2 attributes_count; attribute_info attributes[attributes_count]; } * u1 = readUnsignedByte * u2 = readUnsignedShort * u4 = readInt * */ int magic = in.readInt(); //u4 if (magic != 0xCAFEBABE) { //the file is not recognized as a class file return false; } //u2 but since in java does not exists unsigned, //store on a bigger value int minorVersion = in.readUnsignedShort();//u2 int majorVersion = in.readUnsignedShort();//u2 if (majorVersion < 49) { //Compiled with jdk 1.4, so does not have annotations return false; } //constantsPoolCount is the number of entries + 1 //The index goes from 1 to constantsPoolCount-1 int constantsPoolCount = in.readUnsignedShort(); for (int i = 1; i < constantsPoolCount; i++) { // Format: // cp_info { // u1 tag; // u1 info[]; // } int tag = in.readUnsignedByte(); switch (tag) { case CP_INFO_UTF8: //u2 length //u1 bytes[length] //Check if the string is a annotation reference //name String name = in.readUTF(); if (byteCodeAnnotationsNames.contains(name)) { return true; } break; case CP_INFO_CLASS: //ignore //u2 name_index in.readUnsignedShort(); break; case CP_INFO_FIELD_REF: //ignore case CP_INFO_METHOD_REF: //ignore case CP_INFO_INTERFACE_REF: //ignore //u2 class_index //u2 name_and_type_index in.readUnsignedShort(); in.readUnsignedShort(); break; case CP_INFO_STRING: //ignore //u2 string_index in.readUnsignedShort(); break; case CP_INFO_INTEGER: //ignore case CP_INFO_FLOAT: //ignore //u4 bytes in.readInt(); break; case CP_INFO_LONG: //ignore case CP_INFO_DOUBLE: //ignore //u4 high_bytes //u4 low_bytes in.readInt(); in.readInt(); // this tag takes two entries in the constants pool i++; break; case CP_INFO_NAME_AND_TYPE: //ignore //u2 name_index //u2 descriptor_index in.readUnsignedShort(); in.readUnsignedShort(); break; case CP_INFO_METHOD_HANDLE: // Ignore // u1 reference_kind // u2 reference_index in.readUnsignedByte(); in.readUnsignedShort(); break; case CP_INFO_METHOD_TYPE: // Ignore // u2 descriptor_index in.readUnsignedShort(); break; case CP_INFO_INVOKE_DYNAMIC: // Ignore // u2 bootstrap_method_attr_index; // u2 name_and_type_index; in.readUnsignedShort(); in.readUnsignedShort(); break; default: // THIS SHOULD NOT HAPPEN! Log error info // and break for loop, because from this point // we are reading corrupt data. if (log.isLoggable(Level.WARNING)) { log.warning("Unknown tag in constants pool: " + tag); } i = constantsPoolCount; break; } } return false; }
python
def download(self, source, dest): """ Download an archive file. :param str source: URL pointing to an archive file. :param str dest: Local path location to download archive file to. """ # propagate all exceptions # URLError, OSError, etc proto, netloc, path, params, query, fragment = urlparse(source) if proto in ('http', 'https'): auth, barehost = splituser(netloc) if auth is not None: source = urlunparse((proto, barehost, path, params, query, fragment)) username, password = splitpasswd(auth) passman = HTTPPasswordMgrWithDefaultRealm() # Realm is set to None in add_password to force the username and password # to be used whatever the realm passman.add_password(None, source, username, password) authhandler = HTTPBasicAuthHandler(passman) opener = build_opener(authhandler) install_opener(opener) response = urlopen(source) try: with open(dest, 'wb') as dest_file: dest_file.write(response.read()) except Exception as e: if os.path.isfile(dest): os.unlink(dest) raise e
python
def _sqla_postgresql(self, uri, version=None, isolation_level="READ COMMITTED"): ''' expected uri form: postgresql+psycopg2://%s:%s@%s:%s/%s' % ( username, password, host, port, db) ''' isolation_level = isolation_level or "READ COMMITTED" kwargs = dict(isolation_level=isolation_level) # FIXME: version of postgresql < 9.2 don't have pg.JSON! # check and use JSONTypedLite instead # override default dict and list column types types = {list: pg.ARRAY, tuple: pg.ARRAY, set: pg.ARRAY, dict: JSONDict, datetime: UTCEpoch} self.type_map.update(types) bs = self.config['batch_size'] # 999 batch_size is default for sqlite, postgres handles more at once self.config['batch_size'] = 5000 if bs == 999 else bs self._lock_required = False # default schema name is 'public' for postgres dsn = self.config['db_schema'] self.config['db_schema'] = dsn or 'public' return uri, kwargs
python
def clear_java_home(): """Clear JAVA_HOME environment or reset to BCBIO_JAVA_HOME. Avoids accidental java injection but respects custom BCBIO_JAVA_HOME command. """ if os.environ.get("BCBIO_JAVA_HOME"): test_cmd = os.path.join(os.environ["BCBIO_JAVA_HOME"], "bin", "java") if os.path.exists(test_cmd): return "export JAVA_HOME=%s" % os.environ["BCBIO_JAVA_HOME"] return "unset JAVA_HOME"
java
public String getName(final Locale locale) { return messageResolver.getMessage(locale, String.format("locale.%s.name", hexId)); }
java
public boolean invokeProxyScript(ScriptWrapper script, HttpMessage msg, boolean request) { validateScriptType(script, TYPE_PROXY); Writer writer = getWriters(script); try { // Dont need to check if enabled as it can only be invoked manually ProxyScript s = this.getInterface(script, ProxyScript.class); if (s != null) { if (request) { return s.proxyRequest(msg); } else { return s.proxyResponse(msg); } } else { handleUnspecifiedScriptError(script, writer, Constant.messages.getString("script.interface.proxy.error")); } } catch (Exception e) { handleScriptException(script, writer, e); } // Return true so that the request is submitted - if we returned false all proxying would fail on script errors return true; }
java
public PondLife get(int timeoutMs) throws Exception { PondLife pl=null; // Defer to other threads before locking if (_available<_min) Thread.yield(); int new_id=-1; // Try to get pondlife without creating new one. synchronized(this) { // Wait if none available. if (_running>0 && _available==0 && _size==_pondLife.length && timeoutMs>0) wait(timeoutMs); // If still running if (_running>0) { // if pondlife available if (_available>0) { int id=_index[--_available]; pl=_pondLife[id]; } else if (_size<_pondLife.length) { // Reserve spot for a new one new_id=reservePondLife(false); } } // create reserved pondlife if (pl==null && new_id>=0) pl=newPondLife(new_id); } return pl; }
python
def modularity(matrix, clusters): """ Compute the modularity :param matrix: The adjacency matrix :param clusters: The clusters returned by get_clusters :returns: modularity value """ matrix = convert_to_adjacency_matrix(matrix) m = matrix.sum() if isspmatrix(matrix): matrix_2 = matrix.tocsr(copy=True) else : matrix_2 = matrix if is_undirected(matrix): expected = lambda i,j : (( matrix_2[i,:].sum() + matrix[:,i].sum() )* ( matrix[:,j].sum() + matrix_2[j,:].sum() )) else: expected = lambda i,j : ( matrix_2[i,:].sum()*matrix[:,j].sum() ) delta = delta_matrix(matrix, clusters) indices = np.array(delta.nonzero()) Q = sum( matrix[i, j] - expected(i, j)/m for i, j in indices.T )/m return Q
java
public Iterable<HistoryPageEntry<T>> getRenderList() { if(trimmed) { List<HistoryPageEntry<T>> pageEntries = toPageEntries(baseList); if(pageEntries.size() > THRESHOLD) { return updateFirstTransientBuildKey(pageEntries.subList(0,THRESHOLD)); } else { trimmed=false; return updateFirstTransientBuildKey(pageEntries); } } else { // to prevent baseList's concrete type from getting picked up by <j:forEach> in view return updateFirstTransientBuildKey(toPageEntries(baseList)); } }
python
async def close(self): """ Terminate the ICE agent, ending ICE processing and streams. """ if self.__isClosed: return self.__isClosed = True self.__setSignalingState('closed') # stop senders / receivers for transceiver in self.__transceivers: await transceiver.stop() if self.__sctp: await self.__sctp.stop() # stop transports for transceiver in self.__transceivers: await transceiver._transport.stop() await transceiver._transport.transport.stop() if self.__sctp: await self.__sctp.transport.stop() await self.__sctp.transport.transport.stop() self.__updateIceConnectionState() # no more events will be emitted, so remove all event listeners # to facilitate garbage collection. self.remove_all_listeners()
python
def p_namedblock_empty(self, p): 'namedblock : BEGIN COLON ID END' p[0] = Block((), p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
python
def update_xml_element(self): """ Updates the xml element contents to matches the instance contents. :returns: Updated XML element. :rtype: lxml.etree._Element """ super(Description, self).update_xml_element() if hasattr(self, 'lang'): self.xml_element.set( '{http://www.w3.org/XML/1998/namespace}lang', self.lang) if hasattr(self, 'override'): self.xml_element.set('override', str(self.override)) return self.xml_element
python
def _encode(cls, data, charsets): """Encode the data using the character sets in charsets. :param data: Data to be encoded. :param charsets: Sequence of charsets that are used to encode the barcode. Must be the exact amount of symbols needed to encode the data. :return: List of the symbol values representing the barcode. """ result = [] charset = charsets[0] start_symbol = cls._start_codes[charset] result.append(cls._sym2val[charset][start_symbol]) cur = 0 prev_charset = charsets[0] for symbol_num in range(len(charsets)): charset = charsets[symbol_num] if charset is not prev_charset: # Handle a special case of there being a single A in middle of two B's or the other way around, where # using a single shift character is more efficient than using two character set switches. next_charset = charsets[symbol_num + 1] if symbol_num + 1 < len(charsets) else None if charset == 'A' and prev_charset == next_charset == 'B': result.append(cls._sym2val[prev_charset][cls.Special.SHIFT_A]) elif charset == 'B' and prev_charset == next_charset == 'A': result.append(cls._sym2val[prev_charset][cls.Special.SHIFT_B]) else: # This is the normal case. charset_symbol = cls._char_codes[charset] result.append(cls._sym2val[prev_charset][charset_symbol]) prev_charset = charset nxt = cur + (2 if charset == 'C' else 1) symbol = data[cur:nxt] cur = nxt result.append(cls._sym2val[charset][symbol]) result.append(cls._calc_checksum(result)) result.append(cls._sym2val[charset][cls.Special.STOP]) return result
java
public Integer getMaxAge() { if (childNode.getTextValueForPatternName("max-age") != null && !childNode.getTextValueForPatternName("max-age").equals("null")) { return Integer.valueOf(childNode.getTextValueForPatternName("max-age")); } return null; }
java
private void appendLinebreaks(Tag tag, boolean open) { String name = tag.getTagName(); int pos = TAG_LIST.indexOf(name); switch (pos) { case 0: // H1 setMarker("=", open); setIndentation(2, open); appendLinebreak(2); break; case 1: // H2 setMarker("==", open); setIndentation(3, open); appendLinebreak(2); break; case 2: // H3 setMarker("===", open); setIndentation(4, open); appendLinebreak(2); break; case 3: // H4 setMarker("====", open); setIndentation(5, open); appendLinebreak(2); break; case 4: // H5 setMarker("=====", open); setIndentation(6, open); appendLinebreak(2); break; case 5: // H6 setMarker("=======", open); setIndentation(7, open); appendLinebreak(2); break; case 6: // P case 7: // DIV appendLinebreak(2); break; case 8: // SPAN break; case 9: // BR appendLinebreak(1, true); break; case 10: // OL case 11: // UL appendLinebreak(2); break; case 12: // LI setMarker("*", open); setIndentation(5, open); appendLinebreak(1); break; case 13: // TABLE setIndentation(5, open); appendLinebreak(2); if (open) { appendLinebreak(1); appendText("-----"); appendLinebreak(1); } break; case 14: // TD setMarker("--", open); appendLinebreak(2); break; case 15: // TR if (!open) { appendLinebreak(1); appendText("-----"); appendLinebreak(1); } break; case 16: // TH case 17: // THEAD case 18: // TBODY case 19: // TFOOT appendLinebreak(1); break; default: // unknown tag (ignore) } }
java
@Override public EClass getIfcFilter() { if (ifcFilterEClass == null) { ifcFilterEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers() .get(271); } return ifcFilterEClass; }
java
private Locale getContentLocale(ListConfigurationBean bean) { CmsObject cms = A_CmsUI.getCmsObject(); if (bean.getFolders().isEmpty()) { return OpenCms.getLocaleManager().getDefaultLocale(cms, "/"); } else { return OpenCms.getLocaleManager().getDefaultLocale( cms, cms.getRequestContext().removeSiteRoot(m_currentConfig.getFolders().get(0))); } }
java
public boolean isNodeType(InternalQName qName) throws RepositoryException { checkValid(); return session.getWorkspace().getNodeTypesHolder().isNodeType(qName, nodeData().getPrimaryTypeName(), nodeData().getMixinTypeNames()); }
java
private final void parseDocument(Node currentNode) { try { mutex.acquire(); String nodeName = currentNode.getNodeName(); if ((currentNode.getNodeType() == Node.ELEMENT_NODE) && (nodeName.equals("type")) ) { String typeName = ((Element) currentNode).getAttribute("name"); mapFilter.put( typeName + ".filter", new ArrayList() ); mapFilter.put( typeName + ".tag", new ArrayList() ); mapFilter.put( typeName + ".merge", new ArrayList() ); } if ((currentNode.getNodeType() == Node.ELEMENT_NODE) && (currentNode.getParentNode() != null) ) { if (nodeName.equals("filter")) { String typeName = ((Element) currentNode.getParentNode() .getParentNode()).getAttribute( "name" ); ArrayList vMapFilter = (ArrayList) mapFilter.get(typeName + ".filter"); ArrayList vFilter = new ArrayList(); int iOrder = ((Element) currentNode).getAttribute("order") .equals("") ? vMapFilter.size() : Integer.parseInt( ((Element) currentNode).getAttribute("order") ); String replacement = ""; NodeList nl = currentNode.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { Node childNode = nl.item(i); if (childNode.getNodeName() .equals("replacement")) { NodeList nl2 = childNode.getChildNodes(); for (int j = 0; j < nl2.getLength(); j++) { Node child = nl2.item(j); if (child.getNodeType() == Node.TEXT_NODE) { replacement = replacement + child.getNodeValue(); } } } else if (childNode.getNodeName() .equals("match")) { String match = ""; NodeList nl2 = childNode.getChildNodes(); for (int j = 0; j < nl2.getLength(); j++) { Node child = nl2.item(j); if (child.getNodeType() == Node.TEXT_NODE) { match = match + child.getNodeValue(); } } Pattern p = null; try { p = Pattern.compile( match, Pattern.MULTILINE | Pattern.DOTALL ); vFilter.add(p); } catch (Exception ignore) { } } } vMapFilter.add( iOrder, new Object[] { replacement, vFilter } ); } else if ((nodeName.equals("tag")) && ( currentNode.getParentNode().getNodeName().equals( "tags" ) ) ) { String typeName = ((Element) currentNode.getParentNode() .getParentNode() .getParentNode()).getAttribute( "name" ); String tagName = ((Element) currentNode).getAttribute("name"); ArrayList vMapTags = (ArrayList) mapFilter.get(typeName + ".tag"); ArrayList vAttributes = new ArrayList(); NodeList nl = currentNode.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { Node childNode = nl.item(i); if (childNode.getNodeName() .equals("attribute")) { vAttributes.add( (String) ((Element) childNode).getAttribute("name") ); } } vMapTags.add(new Object[] { tagName, vAttributes } ); } else if (nodeName.equals("source_attribute")) { String typeName = ((Element) currentNode.getParentNode() .getParentNode() .getParentNode() .getParentNode()).getAttribute( "name" ); String tagName = ((Element) currentNode.getParentNode()).getAttribute( "name" ); String attribName = ((Element) currentNode).getAttribute("name"); String srcAttribNameInDest = ((Element) currentNode).getAttribute("name_in_destination"); ArrayList vMapMerge = (ArrayList) mapFilter.get(typeName + ".merge"); String separator = null; String destAttribName = null; ArrayList match = new ArrayList(); ArrayList value = new ArrayList(); NodeList nl = currentNode.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { Node childNode = nl.item(i); if (childNode.getNodeName() .equals("destination_attribute")) { destAttribName = ((Element) childNode).getAttribute("name"); } else if (childNode.getNodeName() .equals("separator")) { separator = ((Element) childNode).getAttribute("value"); } else if (childNode.getNodeName() .equals("replace")) { match.add(((Element) childNode).getAttribute("match")); value.add(((Element) childNode).getAttribute("value")); } } vMapMerge.add( new Object[] { tagName, attribName, srcAttribNameInDest, destAttribName, separator, match, value } ); } } NodeList nl = currentNode.getChildNodes(); for (int i = 0; i < nl.getLength(); i++) { parseDocument(nl.item(i)); } } finally { try{mutex.release();}catch(Throwable ignore){} } }
java
private WebReply unprotectedSpecialURI(WebRequest webRequest, String uriName, String methodName) { LoginConfiguration loginConfig = webRequest.getLoginConfig(); if (loginConfig == null) return null; String authenticationMethod = loginConfig.getAuthenticationMethod(); FormLoginConfiguration formLoginConfig = loginConfig.getFormLoginConfiguration(); if (formLoginConfig == null || authenticationMethod == null) return null; String loginPage = formLoginConfig.getLoginPage(); String errorPage = formLoginConfig.getErrorPage(); // We check to see if we are either a FORM or CLIENT_CERT auth method. // These are the only valid auth methods supported (CLIENT_CERT can // fail over to FORM). if (isValidAuthMethodForFormLogin(authenticationMethod) && loginPage != null && errorPage != null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, " We have a custom login or error page request, web app login URL:[" + loginPage + "], errorPage URL:[" + errorPage + "], and the requested URI:[" + uriName + "]"); } if (loginPage.equals(uriName) || errorPage.equals(uriName)) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "authorize, login or error page[" + uriName + "] requested, permit: ", PERMIT_REPLY); return PERMIT_REPLY; } else if ((uriName != null && uriName.equals("/j_security_check")) && (methodName != null && methodName.equals("POST"))) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "authorize, login or error page[" + uriName + "] requested, permit: ", PERMIT_REPLY); return PERMIT_REPLY; } } else { if (webRequest.getHttpServletRequest().getDispatcherType().equals(DispatcherType.ERROR)) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "authorize, error page[" + uriName + "] requested, permit: ", PERMIT_REPLY); return PERMIT_REPLY; } } return null; }
java
public static String getPrimitiveObjCType(TypeMirror type) { return TypeUtil.isVoid(type) ? "void" : type.getKind().isPrimitive() ? "j" + TypeUtil.getName(type) : "id"; }
java
@Override public Collection<Instance> getInstanceList() throws Exception { List<Instance> instances = new ArrayList<>(); List<String> appNames = getApplications(); if (appNames == null || appNames.size() == 0) { log.info("No apps configured, returning an empty instance list"); return instances; } log.info("Fetching instance list for apps: " + appNames); for (String appName : appNames) { try { instances.addAll(getInstancesForApp(appName)); } catch (Exception ex) { log.error("Failed to fetch instances for app: " + appName + ", retrying once more", ex); try { instances.addAll(getInstancesForApp(appName)); } catch (Exception retryException) { log.error("Failed again to fetch instances for app: " + appName + ", giving up", ex); } } } return instances; }
java
@XmlElementDecl(namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/", name = "ACLPropagation", scope = ApplyACL.class) public JAXBElement<EnumACLPropagation> createApplyACLACLPropagation( EnumACLPropagation value) { return new JAXBElement<EnumACLPropagation>( _ApplyACLACLPropagation_QNAME, EnumACLPropagation.class, ApplyACL.class, value); }
python
def focusInEvent(self, event): """ When this widget loses focus, try to emit the record changed event signal. """ self._changedRecord = -1 super(XOrbRecordBox, self).focusInEvent(event)
java
public void register(K key, T connection) { connections.put(key, new Sync<>(key, connection)); }
java
public long[] getChannelIdArray() { long[] ret = new long[channelIds.size()]; for(int i = 0; i < channelIds.size(); i++) ret[i] = channelIds.get(i); return ret; }
java
public boolean isContractive(QualifiedName nid, Type type) { HashSet<QualifiedName> visited = new HashSet<>(); return isContractive(nid, type, visited); }
java
@Override public UnixSshPath subpath( int start, int end ) { String[] parts = new String[end - start]; for ( int i = start; i < end; i++ ) { parts[i] = getName( i ).toString(); } return new UnixSshPath( getFileSystem(), false, parts ); }
java
public static String defaultButtonHtml( CmsHtmlIconButtonStyleEnum style, String id, String name, String helpText, boolean enabled, String iconPath, String confirmationMessage, String onClick) { return defaultButtonHtml( style, id, id, name, helpText, enabled, iconPath, confirmationMessage, onClick, false, null); }
python
def p_load_code(p): """ statement : load_or_verify expr ID | load_or_verify expr CODE | load_or_verify expr CODE expr | load_or_verify expr CODE expr COMMA expr """ if p[2].type_ != TYPE.string: api.errmsg.syntax_error_expected_string(p.lineno(3), p[2].type_) if len(p) == 4: if p[3].upper() not in ('SCREEN', 'SCREEN$', 'CODE'): syntax_error(p.lineno(3), 'Unexpected "%s" ID. Expected "SCREEN$" instead' % p[3]) return None else: if p[3].upper() == 'CODE': # LOAD "..." CODE start = make_number(0, lineno=p.lineno(3)) length = make_number(0, lineno=p.lineno(3)) else: # SCREEN$ start = make_number(16384, lineno=p.lineno(3)) length = make_number(6912, lineno=p.lineno(3)) else: start = make_typecast(TYPE.uinteger, p[4], p.lineno(3)) if len(p) == 5: length = make_number(0, lineno=p.lineno(3)) else: length = make_typecast(TYPE.uinteger, p[6], p.lineno(5)) p[0] = make_sentence(p[1], p[2], start, length)
java
@Override public void writeFile(List lblSeqs, String filename) { String ret = writeString(lblSeqs); try{ BufferedWriter out = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), "UTF-8")); out.write(ret); out.close(); } catch (Exception e){ } }
java
public void setColor(float r, float g, float b, float a) { color.set(r, g, b, a); colorF = color.toFloatBits(); if (staticLight) dirty = true; }
python
def document(self, name, file_name, **kwargs): """Add Document data to Batch object. Args: name (str): The name for this Group. file_name (str): The name for the attached file for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. file_content (str;method, kwargs): The file contents or callback method to retrieve file content. malware (bool, kwargs): If true the file is considered malware. password (bool, kwargs): If malware is true a password for the zip archive is xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Document. """ group_obj = Document(name, file_name, **kwargs) return self._group(group_obj)
python
def _createIndexRti(self, index, nodeName): """ Auxiliary method that creates a PandasIndexRti. """ return PandasIndexRti(index=index, nodeName=nodeName, fileName=self.fileName, iconColor=self._iconColor)
python
def _merge_files(self, input_files, output_file): """Combine the input files to a big output file""" # we assume that all the input files have the same charset with open(output_file, mode='wb') as out: for input_file in input_files: out.write(open(input_file, mode='rb').read())
python
def merge(intervals): """ Merge two intervals into one. """ out = [] for i in sorted(intervals, key=lambda i: i.start): if out and i.start <= out[-1].end: out[-1].end = max(out[-1].end, i.end) else: out += i, return out
java
void removeToRelockList(final DeviceProxy dev) { // Check if admin device already exists. String adm; try { adm = dev.adm_name(); } catch (final DevFailed e) { // Give up return; } if (relockMap.containsKey(adm)) { final LockedDeviceAmin lda = relockMap.get(adm); lda.remove(dev.get_name()); } }
java
public void materialise(Session session) { PersistentStore store; // table constructors if (isDataExpression) { store = session.sessionData.getSubqueryRowStore(table); dataExpression.insertValuesIntoSubqueryTable(session, store); return; } Result result = queryExpression.getResult(session, isExistsPredicate ? 1 : 0); RowSetNavigatorData navigator = ((RowSetNavigatorData) result.getNavigator()); if (uniqueRows) { navigator.removeDuplicates(); } store = session.sessionData.getSubqueryRowStore(table); table.insertResult(store, result); result.getNavigator().close(); }
java
protected void kill( String pid ) throws IOException, InterruptedException { String os = System.getProperty( "os.name" ); String command = ( os.startsWith( "Windows" ) ) ? "taskkill /F /PID " + pid : "kill " + pid; Runtime.getRuntime().exec( command ).waitFor(); }
java
public static String removeAll(final String text, final Pattern regex) { return replaceAll(text, regex, N.EMPTY_STRING); }
java
Instance createInstance(String name, Cls cls) throws KnowledgeSourceReadException { return getFromProtege(new InstanceSpec(name, cls), INSTANCE_CREATOR); }
java
@Pure public VisualizationType getVisualizationType() { if (this.vizualizationType == null) { final AttributeValue val = getAttributeCollection().getAttribute(ATTR_VISUALIZATION_TYPE); if (val != null) { try { this.vizualizationType = val.getJavaObject(); } catch (Exception e) { // } } if (this.vizualizationType == null) { this.vizualizationType = VisualizationType.SHAPE_ONLY; } } return this.vizualizationType; }
java
public void add(String key, Collection<String> cves) throws VictimsException { key = hash(key); if (exists(key)) { delete(key); } String result = ""; if (cves != null) { result = StringUtils.join(cves, ","); } try { FileOutputStream fos = new FileOutputStream(FileUtils.getFile( location, key)); try { fos.write(result.getBytes()); } finally { fos.close(); } } catch (IOException e) { throw new VictimsException(String.format( "Could not add disk entry for key: %s", key), e); } }
python
def read_chunks(self, chunk_size, start, step, count) -> bytes: ''' Read the content. Read and concatenate the chunks of size chunk_size using offsets calculated from start, step and stop. Args: chunk_size (int): The chunk size. start (int): First offset. step (int): Offset increment. count (int): The number of offsets. Returns: bytes ''' return self.mglo.read_chunks(chunk_size, start, step, count)
python
def get_asns(self, privaddr=0): """Obtain associated AS Numbers for IPv4 Addreses. privaddr: 0 - Normal display of AS numbers, 1 - Do not show an associated AS Number bound box (cluster) on graph for a private IPv4 Address.""" ips = {} if privaddr: for k, v in self._ips.items(): if not is_private_addr(k): ips[k] = v else: ips = self._ips # # Special case for the loopback IP Address: 127.0.0.1 - Do not ASN resolve... if '127.0.0.1' in ips: del ips['127.0.0.1'] # # ASN Lookup... asnquerylist = dict.fromkeys(map(lambda x: x.rsplit(" ", 1)[0], ips)).keys() if self._asres is None: asnlist = [] else: try: asnlist = self._asres.resolve(*asnquerylist) except: pass for ip, asn, desc, in asnlist: if asn is None: continue iplist = self._asns.get(asn, []) # Get previous ASN value iplist.append(ip) # Append IP Address to previous ASN # # If ASN is a string Convert to a number: (i.e., 'AS3257' => 3257) if type(asn) == str: asn = asn.upper() asn = asn.replace('AS', '') try: asn = int(asn) self._asns[asn] = iplist self._asds[asn] = desc except: continue else: self._asns[asn] = iplist self._asds[asn] = desc
python
def decrypt(self, ciphertext): """Given ``ciphertext`` returns a ``plaintext`` decrypted using the keys specified in ``__init__``. Raises ``CiphertextTypeError`` if the input ``ciphertext`` is not a string. Raises ``RecoverableDecryptionError`` if the input ``ciphertext`` has a non-negative message length greater than the ciphertext length. Raises ``UnrecoverableDecryptionError`` if invalid padding is detected, or the the MAC is invalid. """ if not isinstance(ciphertext, str): raise CiphertextTypeError("Input ciphertext is not of type string") plaintext_length = self.getPlaintextLen(ciphertext) ciphertext_length = self.getCiphertextLen(ciphertext) ciphertext_complete = (len(ciphertext) >= ciphertext_length) if ciphertext_complete is False: raise RecoverableDecryptionError('Incomplete ciphertext: ('+str(len(ciphertext))+' of '+str(ciphertext_length)+').') ciphertext = ciphertext[:ciphertext_length] W1_start = 0 W1_end = AES.block_size W1 = ciphertext[W1_start:W1_end] W2_start = AES.block_size W2_end = AES.block_size + plaintext_length W2 = ciphertext[W2_start:W2_end] T_start = AES.block_size + plaintext_length T_end = AES.block_size + plaintext_length + Encrypter._MAC_LENGTH T_expected = ciphertext[T_start:T_end] mac = HMAC.new(self.K2, W1 + W2, SHA512) T_actual = mac.digest()[:Encrypter._MAC_LENGTH] if T_expected != T_actual: raise UnrecoverableDecryptionError('Failed to verify MAC.') iv2_bytes = '\x02' + self._ecb_enc_K1.decrypt(W1)[1:8] counter_val = fte.bit_ops.bytes_to_long(iv2_bytes) counter_length_in_bits = AES.block_size * 8 counter = Counter.new( counter_length_in_bits, initial_value=counter_val) ctr_enc = AES.new(key=self.K1, mode=AES.MODE_CTR, IV='\x00' * 8 + iv2_bytes, counter=counter) plaintext = ctr_enc.decrypt(W2) return plaintext
python
def run_task(func): """ Decorator to collect and return generator results, returning a list if there are multiple results """ def _wrapped(*a, **k): gen = func(*a, **k) return _consume_task(gen) return _wrapped
python
def persist(self): """Stores the current configuration for pushing to W&B""" # In dryrun mode, without wandb run, we don't # save config on initial load, because the run directory # may not be created yet (because we don't know if we're # being used in a run context, or as an API). # TODO: Defer saving somehow, maybe via an events system path = self._config_path() if path is None: return with open(path, "w") as conf_file: conf_file.write(str(self))
python
async def artwork_save(self): """Download artwork and save it to artwork.png.""" artwork = await self.atv.metadata.artwork() if artwork is not None: with open('artwork.png', 'wb') as file: file.write(artwork) else: print('No artwork is currently available.') return 1 return 0
python
def load_profile_from_files(filenames=None, profile=None): """Load a profile from a list of D-Wave Cloud Client configuration files. .. note:: This method is not standardly used to set up D-Wave Cloud Client configuration. It is recommended you use :meth:`.Client.from_config` or :meth:`.config.load_config` instead. Configuration files comply with standard Windows INI-like format, parsable with Python's :mod:`configparser`. Each file in the list is progressively searched until the first profile is found. This function does not input profile information from environment variables. Args: filenames (list[str], default=None): D-Wave cloud client configuration files (path and name). If ``None``, searches for existing configuration files in the standard directories of :func:`get_configfile_paths`. profile (str, default=None): Name of profile to return from reading the configuration from the specified configuration file(s). If ``None``, progressively falls back in the following order: (1) ``profile`` key following ``[defaults]`` section. (2) First non-``[defaults]`` section. (3) ``[defaults]`` section. Returns: dict: Mapping of configuration keys to values. If no valid config/profile is found, returns an empty dict. Raises: :exc:`~dwave.cloud.exceptions.ConfigFileReadError`: Config file specified or detected could not be opened or read. :exc:`~dwave.cloud.exceptions.ConfigFileParseError`: Config file parse failed. :exc:`ValueError`: Profile name not found. Examples: This example loads a profile based on configurations from two files. It finds the first profile, dw2000a, in the first file, dwave_a.conf, and adds to the values of the defaults section, overwriting the existing client value, while ignoring the profile in the second file, dwave_b.conf. The files, which are located in the current working directory, are (1) dwave_a.conf:: [defaults] endpoint = https://url.of.some.dwavesystem.com/sapi client = qpu token = ABC-123456789123456789123456789 [dw2000a] client = sw solver = EXAMPLE_2000Q_SYSTEM_A token = DEF-987654321987654321987654321 and (2) dwave_b.conf:: [dw2000b] endpoint = https://url.of.some.other.dwavesystem.com/sapi client = qpu solver = EXAMPLE_2000Q_SYSTEM_B The following example code loads profile values from parsing both these files, by default loading the first profile encountered or an explicitly specified profile. >>> import dwave.cloud as dc >>> dc.config.load_profile_from_files(["./dwave_a.conf", "./dwave_b.conf"]) # doctest: +SKIP {'client': u'sw', 'endpoint': u'https://url.of.some.dwavesystem.com/sapi', 'solver': u'EXAMPLE_2000Q_SYSTEM_A', 'token': u'DEF-987654321987654321987654321'} >>> dc.config.load_profile_from_files(["./dwave_a.conf", "./dwave_b.conf"], ... profile='dw2000b') # doctest: +SKIP {'client': u'qpu', 'endpoint': u'https://url.of.some.other.dwavesystem.com/sapi', 'solver': u'EXAMPLE_2000Q_SYSTEM_B', 'token': u'ABC-123456789123456789123456789'} """ # progressively build config from a file, or a list of auto-detected files # raises ConfigFileReadError/ConfigFileParseError on error config = load_config_from_files(filenames) # determine profile name fallback: # (1) profile key under [defaults], # (2) first non-[defaults] section # (3) [defaults] section first_section = next(iter(config.sections() + [None])) config_defaults = config.defaults() if not profile: profile = config_defaults.get('profile', first_section) if profile: try: section = dict(config[profile]) except KeyError: raise ValueError("Config profile {!r} not found".format(profile)) else: # as the very last resort (unspecified profile name and # no profiles defined in config), try to use [defaults] if config_defaults: section = config_defaults else: section = {} return section
java
public static appfwpolicylabel[] get_filtered(nitro_service service, String filter) throws Exception{ appfwpolicylabel obj = new appfwpolicylabel(); options option = new options(); option.set_filter(filter); appfwpolicylabel[] response = (appfwpolicylabel[]) obj.getfiltered(service, option); return response; }
java
public static void convolveAndTranspose(Kernel kernel, int[] inPixels, int[] outPixels, int width, int height, boolean alpha, boolean premultiply, boolean unpremultiply, int edgeAction) { float[] matrix = kernel.getKernelData(null); int cols = kernel.getWidth(); int cols2 = cols / 2; for (int y = 0; y < height; y++) { int index = y; int ioffset = y * width; for (int x = 0; x < width; x++) { float r = 0, g = 0, b = 0, a = 0; int moffset = cols2; for (int col = -cols2; col <= cols2; col++) { float f = matrix[moffset + col]; if (f != 0) { int ix = x + col; if (ix < 0) { if (edgeAction == CLAMP_EDGES) ix = 0; else if (edgeAction == WRAP_EDGES) ix = (x + width) % width; } else if (ix >= width) { if (edgeAction == CLAMP_EDGES) ix = width - 1; else if (edgeAction == WRAP_EDGES) ix = (x + width) % width; } int rgb = inPixels[ioffset + ix]; int pa = (rgb >> 24) & 0xff; int pr = (rgb >> 16) & 0xff; int pg = (rgb >> 8) & 0xff; int pb = rgb & 0xff; if (premultiply) { float a255 = pa * (1.0f / 255.0f); pr *= a255; pg *= a255; pb *= a255; } a += f * pa; r += f * pr; g += f * pg; b += f * pb; } } if (unpremultiply && a != 0 && a != 255) { float f = 255.0f / a; r *= f; g *= f; b *= f; } int ia = alpha ? PixelUtils.clamp((int) (a + 0.5)) : 0xff; int ir = PixelUtils.clamp((int) (r + 0.5)); int ig = PixelUtils.clamp((int) (g + 0.5)); int ib = PixelUtils.clamp((int) (b + 0.5)); outPixels[index] = (ia << 24) | (ir << 16) | (ig << 8) | ib; index += height; } } }
python
def add_to_submenu(self, submenu_path, item): ''' add an item to a submenu using a menu path array ''' for m in self.items: if m.name == submenu_path[0]: m.add_to_submenu(submenu_path[1:], item) return raise(ValueError("No submenu (%s) found" % (submenu_path[0])))
python
def _rule_value(self): """ Parses the production rule:: value : TERM (',' TERM)* Returns list of string terms. """ terms = [self._get_token()] # consume additional terms if available while self._lookahead_token() == ',': self._get_token() # chomp the comma terms.append(self._get_token()) return terms
java
public JsonMappingOption asFieldNaming(JsonFieldNaming fieldNaming) { if (fieldNaming == null) { throw new IllegalArgumentException("The argument 'fieldNaming' should not be null."); } this.fieldNaming = OptionalThing.of(fieldNaming); return this; }
python
def get_device_activity(self, type_p): """Gets the current activity type of given devices or device groups. in type_p of type :class:`DeviceType` return activity of type :class:`DeviceActivity` raises :class:`OleErrorInvalidarg` Invalid device type. """ if not isinstance(type_p, list): raise TypeError("type_p can only be an instance of type list") for a in type_p[:10]: if not isinstance(a, DeviceType): raise TypeError( "array can only contain objects of type DeviceType") activity = self._call("getDeviceActivity", in_p=[type_p]) activity = [DeviceActivity(a) for a in activity] return activity
java
<ResultT> AggregateOperation<ResultT> aggregate( final List<? extends Bson> pipeline, final Class<ResultT> resultClass) { return new AggregateOperation<>(namespace, dataSynchronizer, pipeline, resultClass); }
python
def bitonic_sort(arr, reverse=False): """ bitonic sort is sorting algorithm to use multiple process, but this code not containing parallel process It can sort only array that sizes power of 2 It can sort array in both increasing order and decreasing order by giving argument true(increasing) and false(decreasing) Worst-case in parallel: O(log(n)^2) Worst-case in non-parallel: O(nlog(n)^2) reference: https://en.wikipedia.org/wiki/Bitonic_sorter """ def compare(arr, reverse): n = len(arr)//2 for i in range(n): if reverse != (arr[i] > arr[i+n]): arr[i], arr[i+n] = arr[i+n], arr[i] return arr def bitonic_merge(arr, reverse): n = len(arr) if n <= 1: return arr arr = compare(arr, reverse) left = bitonic_merge(arr[:n // 2], reverse) right = bitonic_merge(arr[n // 2:], reverse) return left + right #end of function(compare and bitionic_merge) definition n = len(arr) if n <= 1: return arr # checks if n is power of two if not (n and (not(n & (n - 1))) ): raise ValueError("the size of input should be power of two") left = bitonic_sort(arr[:n // 2], True) right = bitonic_sort(arr[n // 2:], False) arr = bitonic_merge(left + right, reverse) return arr
java
public InventoryResultItem withContent(java.util.Map<String, String>... content) { if (this.content == null) { setContent(new com.amazonaws.internal.SdkInternalList<java.util.Map<String, String>>(content.length)); } for (java.util.Map<String, String> ele : content) { this.content.add(ele); } return this; }
java
public void init() { pocessorMap.clear(); //標準のフィールドプロセッサを登録する。 registerProcessor(XlsSheetName.class, new SheetNameProcessor()); registerProcessor(XlsCell.class, new CellProcessor()); registerProcessor(XlsLabelledCell.class, new LabelledCellProcessor()); registerProcessor(XlsHorizontalRecords.class, new HorizontalRecordsProcessor()); registerProcessor(XlsVerticalRecords.class, new VerticalRecordsProcessor()); registerProcessor(XlsIterateTables.class, new IterateTablesProcessor()); registerProcessor(XlsArrayCells.class, new ArrayCellsProcessor()); registerProcessor(XlsLabelledArrayCells.class, new LabelledArrayCellsProcessor()); }
java
Future<?> flushAllTableQueues() throws InterruptedException { return m_es.submit(new Callable<Boolean>() { @Override public Boolean call() throws Exception { loadTable(buildTable(), m_table); return true; } }); }
python
def get_curve_name(self, ecdh=False): """Return correct curve name for device operations.""" if ecdh: return formats.get_ecdh_curve_name(self.curve_name) else: return self.curve_name
python
def now(self): """ Function to return just the current timestep from this forecast """ # From the comments in issue 19: forecast.days[0] is dated for the # previous day shortly after midnight now = None # Set the time now to be in the same time zone as the first timestep in # the forecast. This shouldn't cause problems with daylight savings as # the change is far enough after midnight. d = datetime.datetime.now(tz=self.days[0].date.tzinfo) # d is something like datetime.datetime(2019, 1, 19, 17, 5, 28, 337439) # d.replace(...) is datetime.datetime(2019, 1, 19, 0, 0) # for_total_seconds is then: datetime.timedelta(seconds=61528, # microseconds=337439) # In this example, this is (17*60*60) + (5*60) + 28 = 61528 # this is the number of seconds through the day for_total_seconds = d - \ d.replace(hour=0, minute=0, second=0, microsecond=0) # In the example time, # for_total_seconds.total_seconds() = 61528 + 0.337439 # This is the number of seconds after midnight # msm is then the number of minutes after midnight msm = for_total_seconds.total_seconds() / 60 # If the date now and the date in the forecast are the same, proceed if self.days[0].date.strftime("%Y-%m-%dZ") == d.strftime("%Y-%m-%dZ"): # We have determined that the date in the forecast and the date now # are the same. # # Now, test if timestep.name is larger than the number of minutes # since midnight for each timestep. # The timestep we keep is the one with the largest timestep.name # which is less than the number of minutes since midnight for timestep in self.days[0].timesteps: if timestep.name > msm: # break here stops the for loop break # now is assigned to the last timestep that did not break the # loop now = timestep return now # Bodge to get around problems near midnight: # Previous method does not account for the end of the month. The test # trying to be evaluated is that the absolute difference between the # last timestep of the first day and the current time is less than 4 # hours. 4 hours is because the final timestep of the previous day is # for 21:00 elif abs(self.days[0].timesteps[-1].date - d).total_seconds() < 14400: # This is verbose to check that the returned data makes sense timestep_to_return = self.days[0].timesteps[-1] return timestep_to_return else: return False
python
def plot_pointings(self, pointings=None): """Plot pointings on canavs""" if pointings is None: pointings = self.pointings i = 0 for pointing in pointings: items = [] i = i + 1 label = {} label['text'] = pointing['label']['text'] for ccd in numpy.radians(pointing["camera"].geometry): if len(ccd) == 4: ccd = numpy.radians(numpy.array(ccd)) (x1, y1) = self.p2c((ccd[0], ccd[1])) (x2, y2) = self.p2c((ccd[2], ccd[3])) item = self.create_rectangle(x1, y1, x2, y2, stipple='gray25', fill=pointing.get('color', '')) else: (x1, y1) = self.p2c((ccd[0] - ccd[2]), ccd[1] - ccd[2]) (x2, y2) = self.p2c((ccd[0] + ccd[2]), ccd[1] + ccd[2]) item = self.create_oval(x1, y1, x2, y2) items.append(item) if self.show_labels.get() == 1: label['id'] = self.label(pointing["camera"].ra.radian, pointing["camera"].dec.radian, label['text']) pointing["items"] = items pointing["label"] = label
java
public Object parse(byte [] bytes, String charset) { if (bytes == null) { throw new IllegalArgumentException("bytes must not be null"); } if (charset == null) { throw new IllegalArgumentException("charset must not be null"); } Object content; content = createParser().parse(bytes, charset); return content; }
python
def build_or_install_bokehjs(): ''' Build a new BokehJS (and install it) or install a previously build BokehJS. If no options ``--build-js`` or ``--install-js`` are detected, the user is prompted for what to do. If ``--existing-js`` is detected, then this setup.py is being run from a packaged sdist, no action is taken. Note that ``-build-js`` is only compatible with the following ``setup.py`` commands: install, develop, sdist, egg_info, build Returns: str : one of 'built', 'installed', 'packaged' How (or if) BokehJS was installed into the python source tree ''' # This happens when building from inside a published, pre-packaged sdist # The --existing-js option is not otherwise documented if '--existing-js' in sys.argv: sys.argv.remove('--existing-js') return "packaged" if '--build-js' not in sys.argv and '--install-js' not in sys.argv: jsbuild = jsbuild_prompt() elif '--build-js' in sys.argv: jsbuild = True sys.argv.remove('--build-js') # must be "--install-js" else: jsbuild = False sys.argv.remove('--install-js') jsbuild_ok = ('install', 'develop', 'sdist', 'egg_info', 'build') if jsbuild and not any(arg in sys.argv for arg in jsbuild_ok): print("Error: Option '--build-js' only valid with 'install', 'develop', 'sdist', or 'build', exiting.") sys.exit(1) if jsbuild: build_js() install_js() return "built" else: install_js() return "installed"
java
public AwsSecurityFindingFilters withFirstObservedAt(DateFilter... firstObservedAt) { if (this.firstObservedAt == null) { setFirstObservedAt(new java.util.ArrayList<DateFilter>(firstObservedAt.length)); } for (DateFilter ele : firstObservedAt) { this.firstObservedAt.add(ele); } return this; }
java
public void copyResources( URI uri, ClassLoader classLoader, File target ) throws MojoExecutionException { URL url; String scheme = uri.getScheme(); if ( "classpath".equals( scheme ) ) { // get resource from class-path String path = uri.getPath(); if ( path == null ) { // can happen when using classpath:myFile path = uri.toString().substring( scheme.length() + 1 ); } if ( path.startsWith( "/" ) ) { // remove first car path = path.substring( 1 ); } url = classLoader.getResource( path ); } else { // classic url from uri try { url = uri.toURL(); } catch ( MalformedURLException e ) { throw new MojoExecutionException( "Bad uri syntax " + uri, e ); } } InputStream inputStream; try { inputStream = url.openStream(); } catch ( IOException e ) { throw new MojoExecutionException( "Could not open resource " + url, e ); } if ( inputStream == null ) { throw new MojoExecutionException( "Could not find resource " + url ); } try { OutputStream outputStream = null; try { outputStream = new FileOutputStream( target ); org.codehaus.plexus.util.IOUtil.copy( inputStream, outputStream ); outputStream.close(); inputStream.close(); } catch ( IOException e ) { throw new MojoExecutionException( "Could not copy resource from " + url + " to " + target, e ); } finally { if ( outputStream != null ) { org.codehaus.plexus.util.IOUtil.close( outputStream ); } } } finally { org.codehaus.plexus.util.IOUtil.close( inputStream ); } }
java
public void addTopicAcl(SIBUuid12 destName, TopicAcl acl) throws SIDiscriminatorSyntaxException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "addTopicAcl", new Object[] { destName, acl }); String discriminator = null; // Postpend # wildcard to the fully qualified topic supplied // TO DO check the topicsyntax String theTopic = ""; if(destName != null) { String destNameStr = destName.toString(); theTopic = buildAddTopicExpression(destNameStr, acl.getTopic()); } // Careful, only do this if the original topic is not null if(acl.getTopic() != null) { discriminator = theTopic + "//."; } else { discriminator = theTopic; } // Put the acl into the matchspace try { addTarget(acl, // N.B we use the raw CP as key discriminator, // wildcarded topic expression null, // selector string null, // selector domain null, // this'll pick up the default resolver acl, null, null); // selector properties } catch (QuerySyntaxException e) { // No FFDC code needed } catch (InvalidTopicSyntaxException e) { // No FFDC code needed SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "addTopicAcl", "SIDiscriminatorSyntaxException"); throw new SIDiscriminatorSyntaxException( nls.getFormattedMessage( "INVALID_TOPIC_ERROR_CWSIP0372", new Object[] { theTopic }, null)); } catch (MatchingException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.addTopicAcl", "1:1954:1.117.1.11", this); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "addTopicAcl", "SIErrorException"); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:1962:1.117.1.11", e }); throw new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching", "1:1970:1.117.1.11", e }, null), e); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "addTopicAcl"); }
java
public static SingularValueDecomposition_F64<DMatrixRMaj> svd(boolean needU , boolean needV , boolean compact ) { return svd(100,100,needU,needV,compact); }
python
def balance(self, as_of=None, raw=False, leg_query=None, **kwargs): """Get the balance for this account, including child accounts Args: as_of (Date): Only include transactions on or before this date raw (bool): If true the returned balance should not have its sign adjusted for display purposes. kwargs (dict): Will be used to filter the transaction legs Returns: Balance See Also: :meth:`simple_balance()` """ balances = [ account.simple_balance(as_of=as_of, raw=raw, leg_query=leg_query, **kwargs) for account in self.get_descendants(include_self=True) ] return sum(balances, Balance())
python
def units(self) -> typing.Iterator['BaseUnit']: """ Iterates over all units Returns: generator of Unit """ for group in self.groups: for unit in group.units: yield unit
python
def __send_retry_requests(self, last_send_failure_time): """Called via Timer from __send_ready to resend requests which might not have been sent due to transport failure. This can happen since the current transport implementation does not received acknowledgements for sent messages.""" # make sure multiple failures having set multiple times do not run concurrently with self.__send_retry_requests_lock: with self.__requests: # produce list instead of generator as requests mapping can change during subsequent loop retry_reqs = [req for req in self.__requests.values() if req._sent_without_response(last_send_failure_time)] retry_req_count = 0 # don't continue if another network failure has occured (which will trigger this function again) while retry_reqs and self.__amqplink.last_send_exc_time <= last_send_failure_time: req = retry_reqs.pop() # lock individuallly so incoming request handling does not 'pause' for too long with self.__requests: # might have received a response (or finished since) if not (req.id_ in self.__requests and req._sent_without_response(last_send_failure_time)): logger.debug('Not resending request %s (finished or has received response)', req.id_) continue logger.debug('Resending request %s', req.id_) if not self.__retry_enqueue(PreparedMessage(req._inner_msg_out, req.id_)): # client shutdown break retry_req_count += 1 if retry_req_count: logger.debug('Resending of %d request(s) complete (before %s)', retry_req_count, last_send_failure_time)
python
def request(self, method, url, **kwargs): """Constructs a :class:`requests.Request`, prepares it and sends it. Raises HTTPErrors by default. :param method: method for the new :class:`Request` object. :type method: :class:`str` :param url: URL for the new :class:`Request` object. :type url: :class:`str` :param kwargs: keyword arguments of :meth:`requests.Session.request` :returns: a resonse object :rtype: :class:`requests.Response` :raises: :class:`requests.HTTPError` """ if oauthlib.oauth2.is_secure_transport(url): m = super(OAuthSession, self).request else: m = super(requests_oauthlib.OAuth2Session, self).request log.debug("%s \"%s\" with %s", method, url, kwargs) response = m(method, url, **kwargs) response.raise_for_status() return response
python
def rank(self): """convert a list of integers so that the lowest integer is 0, the next lowest is 1 ... note: modifies list in place""" # XXX FIX ME, should the lowest value be 1 or 0? symclasses = self.symclasses stableSort = map(None, symclasses, range(len(symclasses))) stableSort.sort() last = None x = -1 for order, i in stableSort: if order != last: x += 1 last = order symclasses[i] = x
python
def _key_name(self): # type: () -> str """Return the key referring to this object The default value is the lower case version of the class name :rtype: str """ if self._key is not None: return self._key return self.__class__.__name__.lower()
python
def b58encode_check(v: bytes) -> str: '''Encode a string using Base58 with a 4 character checksum''' digest = sha256(sha256(v).digest()).digest() return b58encode(v + digest[:4])
java
protected boolean onBusItineraryRemoved(BusItinerary itinerary, int index) { if (this.autoUpdate.get()) { try { removeMapLayerAt(index); return true; } catch (Throwable exception) { // } } return false; }
java
private String getSetterGuidanceDoc() { StringBuilder docBuilder = new StringBuilder(); if (isJsonValue()) { docBuilder.append("<p>") .append(LINE_SEPARATOR) .append("This field's value must be valid JSON according to RFC 7159, including the opening and closing ") .append("braces. For example: '{\"key\": \"value\"}'.") .append(LINE_SEPARATOR) .append("</p>") .append(LINE_SEPARATOR); } boolean isByteBuffer = "java.nio.ByteBuffer".equals(this.getGetterModel().getReturnType()); if (isByteBuffer || isJsonValue()) { docBuilder.append("<p>") .append(LINE_SEPARATOR) .append("The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the ") .append("AWS service. Users of the SDK should not perform Base64 encoding on this field.") .append(LINE_SEPARATOR) .append("</p>") .append(LINE_SEPARATOR); } if (isByteBuffer) { docBuilder.append("<p>") .append(LINE_SEPARATOR) .append("Warning: ByteBuffers returned by the SDK are mutable. " + "Changes to the content or position of the byte buffer will be " + "seen by all objects that have a reference to this object. " + "It is recommended to call ByteBuffer.duplicate() or " + "ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. " + "This behavior will be changed in a future major version of the SDK.") .append(LINE_SEPARATOR) .append("</p>") .append(LINE_SEPARATOR); } return docBuilder.toString(); }
python
async def list(self, setname=None): """ Lists the existing ipsets. If setname is given, only lists this ipset. The resulting command looks like one of the following: * ``ipset list`` * ``ipset list ellis_blacklist4`` """ args = ['list'] if setname is not None: args.append(setname) return await self.start(__class__.CMD, *args)
java
@SuppressWarnings("unchecked") public <T extends IMetric> T registerMetric(String name, T metric, int timeBucketSizeInSecs) { if ((Boolean) _openOrPrepareWasCalled.deref()) { throw new RuntimeException("TopologyContext.registerMetric can only be called from within overridden " + "IBolt::prepare() or ISpout::open() method."); } if (metric == null) { throw new IllegalArgumentException("Cannot register a null metric"); } if (timeBucketSizeInSecs <= 0) { throw new IllegalArgumentException("TopologyContext.registerMetric can only be called with timeBucketSizeInSecs " + "greater than or equal to 1 second."); } if (getRegisteredMetricByName(name) != null) { throw new RuntimeException("The same metric name `" + name + "` was registered twice."); } Map m1 = _registeredMetrics; if (!m1.containsKey(timeBucketSizeInSecs)) { m1.put(timeBucketSizeInSecs, new HashMap()); } Map m2 = (Map) m1.get(timeBucketSizeInSecs); if (!m2.containsKey(_taskId)) { m2.put(_taskId, new HashMap()); } Map m3 = (Map) m2.get(_taskId); if (m3.containsKey(name)) { throw new RuntimeException("The same metric name `" + name + "` was registered twice."); } else { m3.put(name, metric); } return metric; }
java
private static Response createErrorResponse(S3Exception e) { S3Error errorResponse = new S3Error(e.getResource(), e.getErrorCode()); // Need to explicitly encode the string as XML because Jackson will not do it automatically. XmlMapper mapper = new XmlMapper(); try { return Response.status(e.getErrorCode().getStatus()) .entity(mapper.writeValueAsString(errorResponse)).build(); } catch (JsonProcessingException e2) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .entity("Failed to encode XML: " + e2.getMessage()).build(); } }
python
def _landsat_stats( band, address_prefix, metadata, overview_level=None, max_size=1024, percentiles=(2, 98), dst_crs=CRS({"init": "EPSG:4326"}), histogram_bins=10, histogram_range=None, ): """ Retrieve landsat dataset statistics. Attributes ---------- band : str Landsat band number address_prefix : str A Landsat AWS S3 dataset prefix. metadata : dict Landsat metadata overview_level : int, optional Overview (decimation) level to fetch. max_size: int, optional Maximum size of dataset to retrieve (will be used to calculate the overview level to fetch). percentiles : tulple, optional Percentile or sequence of percentiles to compute, which must be between 0 and 100 inclusive (default: (2, 98)). dst_crs: CRS or dict Target coordinate reference system (default: EPSG:4326). histogram_bins: int, optional Defines the number of equal-width histogram bins (default: 10). histogram_range: tuple or list, optional The lower and upper range of the bins. If not provided, range is simply the min and max of the array. Returns ------- out : dict (percentiles), min, max, stdev, histogram for each band, e.g. { "4": { 'pc': [15, 121], 'min': 1, 'max': 162, 'std': 27.22067722127997, 'histogram': [ [102934, 135489, 20981, 13548, 11406, 8799, 7351, 5622, 2985, 662] [1., 17.1, 33.2, 49.3, 65.4, 81.5, 97.6, 113.7, 129.8, 145.9, 162.] ] } } """ src_path = "{}_B{}.TIF".format(address_prefix, band) with rasterio.open(src_path) as src: levels = src.overviews(1) width = src.width height = src.height bounds = transform_bounds( *[src.crs, dst_crs] + list(src.bounds), densify_pts=21 ) if len(levels): if overview_level: decim = levels[overview_level] else: # determine which zoom level to read for ii, decim in enumerate(levels): if max(width // decim, height // decim) < max_size: break else: decim = 1 warnings.warn( "Dataset has no overviews, reading the full dataset", NoOverviewWarning ) out_shape = (height // decim, width // decim) vrt_params = dict( nodata=0, add_alpha=False, src_nodata=0, init_dest_nodata=False ) with WarpedVRT(src, **vrt_params) as vrt: arr = vrt.read(out_shape=out_shape, indexes=[1], masked=True) if band in ["10", "11"]: # TIRS multi_rad = metadata["RADIOMETRIC_RESCALING"].get( "RADIANCE_MULT_BAND_{}".format(band) ) add_rad = metadata["RADIOMETRIC_RESCALING"].get( "RADIANCE_ADD_BAND_{}".format(band) ) k1 = metadata["TIRS_THERMAL_CONSTANTS"].get("K1_CONSTANT_BAND_{}".format(band)) k2 = metadata["TIRS_THERMAL_CONSTANTS"].get("K2_CONSTANT_BAND_{}".format(band)) arr = brightness_temp.brightness_temp(arr, multi_rad, add_rad, k1, k2) else: multi_reflect = metadata["RADIOMETRIC_RESCALING"].get( "REFLECTANCE_MULT_BAND_{}".format(band) ) add_reflect = metadata["RADIOMETRIC_RESCALING"].get( "REFLECTANCE_ADD_BAND_{}".format(band) ) sun_elev = metadata["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"] arr = 10000 * reflectance.reflectance( arr, multi_reflect, add_reflect, sun_elev, src_nodata=0 ) params = {} if histogram_bins: params.update(dict(bins=histogram_bins)) if histogram_range: params.update(dict(range=histogram_range)) stats = {band: utils._stats(arr, percentiles=percentiles, **params)} return { "bounds": { "value": bounds, "crs": dst_crs.to_string() if isinstance(dst_crs, CRS) else dst_crs, }, "statistics": stats, }
python
def _ppf(self, qloc, cache, **kwargs): """ Example: >>> dist = chaospy.J(chaospy.Uniform(), chaospy.Normal()) >>> print(numpy.around(dist.inv([[0.1, 0.2, 0.3], [0.3, 0.3, 0.4]]), 4)) [[ 0.1 0.2 0.3 ] [-0.5244 -0.5244 -0.2533]] >>> d0 = chaospy.Uniform() >>> dist = chaospy.J(d0, d0+chaospy.Uniform()) >>> print(numpy.around(dist.inv([[0.1, 0.2, 0.3], [0.3, 0.3, 0.4]]), 4)) [[0.1 0.2 0.3] [0.4 0.5 0.7]] """ xloc = numpy.zeros(qloc.shape) for dist in evaluation.sorted_dependencies(self, reverse=True): if dist not in self.inverse_map: continue idx = self.inverse_map[dist] qloc_ = qloc[idx].reshape(1, -1) xloc[idx] = evaluation.evaluate_inverse( dist, qloc_, cache=cache)[0] return xloc
python
def abort(self, count=2, timeout=60): ''' Send an abort sequence using CAN bytes. ''' for counter in xrange(0, count): self.putc(CAN, timeout)
java
public Dict setIgnoreNull(String attr, Object value) { if (null != attr && null != value) { set(attr, value); } return this; }
java
public void setBytes(int index, byte[] source, int sourceIndex, int length) { checkPositionIndexes(sourceIndex, sourceIndex + length, source.length); copyMemory(source, (long) SizeOf.ARRAY_BYTE_BASE_OFFSET + sourceIndex, base, address + index, length); }
java
public boolean load() { if ((lastLoaded + cacheTTLMillis) <= System.currentTimeMillis()) { clearCache(); } if (!readCache()) { if (shouldReadDataFromFallback()) { return loadFallback(); } } return true; }
python
def search_script(self, script): """ Search a script's contents for import statements and check if they're currently prevent in the list of all installed pip modules. :param script: string :return: void """ if self.import_statement.search(script): for installed in self.libraries_installed: for found in set(self.import_statement.findall(script)): if found == installed.key: self.libraries_installed.remove(installed) self.libraries_found.append(installed)
java
private static Class<?> callerBuilder(Object caller, StringBuilder text) { Class<?> c = Debugger.class; if (caller != null) { if (caller instanceof Class) { c = (Class<?>) caller; text.append(c.getName()).append(": "); } else if (caller instanceof String) { text.append(caller ).append(": "); } else { c = caller.getClass(); text.append(c.getName()).append(": "); } } return c; }
python
def get_db_instance_info(self, dbid): ''' Get DB instance info ''' if not self.connect_to_aws_rds(): return False try: instances = self.rdsc.describe_db_instances(dbid).get('DBInstances') except: return False else: myinstance = instances[0] return myinstance
python
def wvcal_spectrum(sp, fxpeaks, poly_degree_wfit, wv_master, wv_ini_search=None, wv_end_search=None, wvmin_useful=None, wvmax_useful=None, geometry=None, debugplot=0): """Execute wavelength calibration of a spectrum using fixed line peaks. Parameters ---------- sp : 1d numpy array Spectrum to be wavelength calibrated. fxpeaks : 1d numpy array Refined location of peaks in array index scale, i.e, from 0 to naxis1 - 1. The wavelength calibration is performed using these line locations. poly_degree_wfit : int Degree for wavelength calibration polynomial. wv_master : 1d numpy array Array with arc line wavelengths. wv_ini_search : float or None Minimum expected wavelength in spectrum. wv_end_search : float or None Maximum expected wavelength in spectrum. wvmin_useful : float or None If not None, this value is used to clip detected lines below it. wvmax_useful : float or None If not None, this value is used to clip detected lines above it. geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the Qt backend geometry. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot. Returns ------- solution_wv : instance of SolutionArcCalibration Wavelength calibration solution. """ # check there are enough lines for fit if len(fxpeaks) <= poly_degree_wfit: print(">>> Warning: not enough lines to fit spectrum") return None # spectrum dimension naxis1 = sp.shape[0] wv_master_range = wv_master[-1] - wv_master[0] delta_wv_master_range = 0.20 * wv_master_range if wv_ini_search is None: wv_ini_search = wv_master[0] - delta_wv_master_range if wv_end_search is None: wv_end_search = wv_master[-1] + delta_wv_master_range # use channels (pixels from 1 to naxis1) xchannel = fxpeaks + 1.0 # wavelength calibration list_of_wvfeatures = arccalibration( wv_master=wv_master, xpos_arc=xchannel, naxis1_arc=naxis1, crpix1=1.0, wv_ini_search=wv_ini_search, wv_end_search=wv_end_search, wvmin_useful=wvmin_useful, wvmax_useful=wvmax_useful, error_xpos_arc=3, times_sigma_r=3.0, frac_triplets_for_sum=0.50, times_sigma_theil_sen=10.0, poly_degree_wfit=poly_degree_wfit, times_sigma_polfilt=10.0, times_sigma_cook=10.0, times_sigma_inclusion=10.0, geometry=geometry, debugplot=debugplot ) title = "Wavelength calibration" solution_wv = fit_list_of_wvfeatures( list_of_wvfeatures=list_of_wvfeatures, naxis1_arc=naxis1, crpix1=1.0, poly_degree_wfit=poly_degree_wfit, weighted=False, plot_title=title, geometry=geometry, debugplot=debugplot ) if abs(debugplot) % 10 != 0: # final plot with identified lines xplot = np.arange(1, naxis1 + 1, dtype=float) ax = ximplotxy(xplot, sp, title=title, show=False, xlabel='pixel (from 1 to NAXIS1)', ylabel='number of counts', geometry=geometry) ymin = sp.min() ymax = sp.max() dy = ymax-ymin ymin -= dy/20. ymax += dy/20. ax.set_ylim([ymin, ymax]) # plot wavelength of each identified line for feature in solution_wv.features: xpos = feature.xpos reference = feature.reference ax.text(xpos, sp[int(xpos+0.5)-1], str(reference), fontsize=8, horizontalalignment='center') # show plot print('Plot with identified lines') pause_debugplot(12, pltshow=True) # return the wavelength calibration solution return solution_wv
java
public void setCacheKeyParameters(java.util.Collection<String> cacheKeyParameters) { if (cacheKeyParameters == null) { this.cacheKeyParameters = null; return; } this.cacheKeyParameters = new java.util.ArrayList<String>(cacheKeyParameters); }
python
def save_batches(server_context, assay_id, batches): # type: (ServerContext, int, List[Batch]) -> Union[List[Batch], None] """ Saves a modified batches. :param server_context: A LabKey server context. See utils.create_server_context. :param assay_id: The assay protocol id. :param batches: The Batch(es) to save. :return: """ save_batch_url = server_context.build_url('assay', 'saveAssayBatch.api') json_batches = [] if batches is None: return None # Nothing to save for batch in batches: if isinstance(batch, Batch): json_batches.append(batch.to_json()) else: raise Exception('save_batch() "batches" expected to be a set Batch instances') payload = { 'assayId': assay_id, 'batches': json_batches } headers = { 'Content-type': 'application/json', 'Accept': 'text/plain' } json_body = server_context.make_request(save_batch_url, json_dumps(payload, sort_keys=True), headers=headers) if json_body is not None: resp_batches = json_body['batches'] return [Batch.from_data(resp_batch) for resp_batch in resp_batches] return None
python
def DoesIDExist(tag_name): """ Determines if a fully-qualified site.service.tag eDNA tag exists in any of the connected services. :param tag_name: fully-qualified (site.service.tag) eDNA tag :return: true if the point exists, false if the point does not exist Example: >>> DoesIDExist("Site.Service.Tag") """ # the eDNA API requires that the tag_name be specified in a binary format, # and the ctypes library must be used to create a C++ variable type. szPoint = c_char_p(tag_name.encode('utf-8')) result = bool(dna_dll.DoesIdExist(szPoint)) return result
python
def delete_alias(i): """ Input: { path - path to the entry data_uid - data UID (data_alias) - data alias (repo_dict) - repo cfg if available to check sync (share) - if 'yes', try to rm via GIT } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 } """ rd=i.get('repo_dict',{}) rshared=rd.get('shared','') rsync=rd.get('sync','') if i.get('share','')=='yes': rshared='git' p=i['path'] alias=i.get('data_alias','') uid='' if alias!='' and os.path.isdir(p): p0=os.path.join(p, cfg['subdir_ck_ext']) p9=cfg['file_alias_a'] + alias p1=os.path.join(p0, p9) if rshared!='': ppp=os.getcwd() os.chdir(p0) if os.path.isfile(p1): try: f=open(p1) uid=f.readline().strip() f.close() except Exception as e: None if rshared!='': ss=cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9) rx=os.system(ss) if os.path.isfile(p1): os.remove(p1) if uid=='': uid=i['data_uid'] if uid!='': p9=cfg['file_alias_u'] + uid p1=os.path.join(p0, p9) if os.path.isfile(p1): if rshared!='': ss=cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9) rx=os.system(ss) if os.path.isfile(p1): os.remove(p1) if rshared!='': os.chdir(ppp) return {'return':0}
python
def _update_function_transition_graph(self, src_node_key, dst_node_key, jumpkind='Ijk_Boring', ins_addr=None, stmt_idx=None, confirmed=None): """ Update transition graphs of functions in function manager based on information passed in. :param str jumpkind: Jumpkind. :param CFGNode src_node: Source CFGNode :param CFGNode dst_node: Destionation CFGNode :param int ret_addr: The theoretical return address for calls :return: None """ if dst_node_key is not None: dst_node = self._graph_get_node(dst_node_key, terminator_for_nonexistent_node=True) dst_node_addr = dst_node.addr dst_codenode = dst_node.to_codenode() dst_node_func_addr = dst_node.function_address else: dst_node = None dst_node_addr = None dst_codenode = None dst_node_func_addr = None if src_node_key is None: if dst_node is None: raise ValueError("Either src_node_key or dst_node_key must be specified.") self.kb.functions.function(dst_node.function_address, create=True)._register_nodes(True, dst_codenode ) return src_node = self._graph_get_node(src_node_key, terminator_for_nonexistent_node=True) # Update the transition graph of current function if jumpkind == "Ijk_Call": ret_addr = src_node.return_target ret_node = self.kb.functions.function( src_node.function_address, create=True )._get_block(ret_addr).codenode if ret_addr else None self.kb.functions._add_call_to( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_addr=dst_node_addr, retn_node=ret_node, syscall=False, ins_addr=ins_addr, stmt_idx=stmt_idx, ) if jumpkind.startswith('Ijk_Sys'): self.kb.functions._add_call_to( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_addr=dst_node_addr, retn_node=src_node.to_codenode(), # For syscalls, they are returning to the address of themselves syscall=True, ins_addr=ins_addr, stmt_idx=stmt_idx, ) elif jumpkind == 'Ijk_Ret': # Create a return site for current function self.kb.functions._add_return_from( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_node=dst_codenode, ) if dst_node is not None: # Create a returning edge in the caller function self.kb.functions._add_return_from_call( function_addr=dst_node_func_addr, src_function_addr=src_node.function_address, to_node=dst_codenode, ) elif jumpkind == 'Ijk_FakeRet': self.kb.functions._add_fakeret_to( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_node=dst_codenode, confirmed=confirmed, ) elif jumpkind in ('Ijk_Boring', 'Ijk_InvalICache'): src_obj = self.project.loader.find_object_containing(src_node.addr) dest_obj = self.project.loader.find_object_containing(dst_node.addr) if dst_node is not None else None if src_obj is dest_obj: # Jump/branch within the same object. Might be an outside jump. to_outside = src_node.function_address != dst_node_func_addr else: # Jump/branch between different objects. Must be an outside jump. to_outside = True if not to_outside: self.kb.functions._add_transition_to( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_node=dst_codenode, ins_addr=ins_addr, stmt_idx=stmt_idx, ) else: self.kb.functions._add_outside_transition_to( function_addr=src_node.function_address, from_node=src_node.to_codenode(), to_node=dst_codenode, to_function_addr=dst_node_func_addr, ins_addr=ins_addr, stmt_idx=stmt_idx, )
python
def disassociate_api_key_stagekeys(apiKey, stagekeyslist, region=None, key=None, keyid=None, profile=None): ''' disassociate the given stagekeyslist to the given apiKey. CLI Example: .. code-block:: bash salt myminion boto_apigateway.disassociate_stagekeys_api_key \\ api_key '["restapi id/stage name", ...]' ''' try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) pvlist = [('/stages', stagekey) for stagekey in stagekeyslist] response = _api_key_patch_remove(conn, apiKey, pvlist) return {'disassociated': True} except ClientError as e: return {'disassociated': False, 'error': __utils__['boto3.get_error'](e)}