code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { protected String getSerializerClass(String resource, String urlLastElement) { String serializerClass = null; String extension = this.utils.getExtension(urlLastElement); if (extension != null) { SerializerFinder finder = new SerializerFinder(config, extension); serializerClass = finder.findResource(resource); } return serializerClass; } }
public class class_name { protected String getSerializerClass(String resource, String urlLastElement) { String serializerClass = null; String extension = this.utils.getExtension(urlLastElement); if (extension != null) { SerializerFinder finder = new SerializerFinder(config, extension); serializerClass = finder.findResource(resource); // depends on control dependency: [if], data = [none] } return serializerClass; } }
public class class_name { public static String getInput(String prompt) { System.out.print(prompt); try { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); return in.readLine(); } catch(IOException e) { return null; } } }
public class class_name { public static String getInput(String prompt) { System.out.print(prompt); try { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); return in.readLine(); // depends on control dependency: [try], data = [none] } catch(IOException e) { return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean accept(Message message) { Content content = message.getContent(); if (!userPredicate.test(message.getSender())) { return false; } if (endPredicate != null && endPredicate.test(context, content)) { end(); return true; } if (content.getType() != currentPrompt.type()) { return false; } if (repliesOnly) { Message repliedTo = message.getRepliedTo(); if (repliedTo == null || repliedTo.getSender().getUsername().equals(context.getBot().getBotUsername())) { return false; } } if (!currentPrompt.process(context, content)) { if (promptIndex + 1 == prompts.size()) { end(); return true; } currentPrompt = prompts.get(++promptIndex); } SendableMessage promptMessage = currentPrompt.promptMessage(context); if (!silent && promptMessage != null) { sendMessage(promptMessage); } context.getHistory().history.add(message); return true; } }
public class class_name { public boolean accept(Message message) { Content content = message.getContent(); if (!userPredicate.test(message.getSender())) { return false; // depends on control dependency: [if], data = [none] } if (endPredicate != null && endPredicate.test(context, content)) { end(); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } if (content.getType() != currentPrompt.type()) { return false; // depends on control dependency: [if], data = [none] } if (repliesOnly) { Message repliedTo = message.getRepliedTo(); if (repliedTo == null || repliedTo.getSender().getUsername().equals(context.getBot().getBotUsername())) { return false; // depends on control dependency: [if], data = [none] } } if (!currentPrompt.process(context, content)) { if (promptIndex + 1 == prompts.size()) { end(); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } currentPrompt = prompts.get(++promptIndex); // depends on control dependency: [if], data = [none] } SendableMessage promptMessage = currentPrompt.promptMessage(context); if (!silent && promptMessage != null) { sendMessage(promptMessage); // depends on control dependency: [if], data = [none] } context.getHistory().history.add(message); return true; } }
public class class_name { @Override public void moveObject(final FedoraSession session, final String source, final String destination) { final Session jcrSession = getJcrSession(session); try { final FedoraResource srcResource = find(session, source); final Node sourceNode = getJcrNode(srcResource); final String name = sourceNode.getName(); final Node parent = sourceNode.getDepth() > 0 ? sourceNode.getParent() : null; jcrSession.getWorkspace().move(source, destination); if (parent != null) { createTombstone(parent, name); } touchLdpMembershipResource(getJcrNode(find(session, source))); touchLdpMembershipResource(getJcrNode(find(session, destination))); } catch (final RepositoryException e) { throw new RepositoryRuntimeException(e); } } }
public class class_name { @Override public void moveObject(final FedoraSession session, final String source, final String destination) { final Session jcrSession = getJcrSession(session); try { final FedoraResource srcResource = find(session, source); final Node sourceNode = getJcrNode(srcResource); final String name = sourceNode.getName(); final Node parent = sourceNode.getDepth() > 0 ? sourceNode.getParent() : null; jcrSession.getWorkspace().move(source, destination); // depends on control dependency: [try], data = [none] if (parent != null) { createTombstone(parent, name); // depends on control dependency: [if], data = [(parent] } touchLdpMembershipResource(getJcrNode(find(session, source))); // depends on control dependency: [try], data = [none] touchLdpMembershipResource(getJcrNode(find(session, destination))); // depends on control dependency: [try], data = [none] } catch (final RepositoryException e) { throw new RepositoryRuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean skipUntil(long txid) throws IOException { while (true) { FSEditLogOp op = readOp(); if (op == null) { return false; } if (op.getTransactionId() >= txid) { cachedOp = op; return true; } } } }
public class class_name { public boolean skipUntil(long txid) throws IOException { while (true) { FSEditLogOp op = readOp(); if (op == null) { return false; // depends on control dependency: [if], data = [none] } if (op.getTransactionId() >= txid) { cachedOp = op; // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void setDataFormat(final String pattern, final CellFormatter cellFormatter) { String currentPattern = POIUtils.getCellFormatPattern(cell, cellFormatter); if(currentPattern.equalsIgnoreCase(pattern)) { // 既に書式が同じ場合 return; } cloneStyle(); cell.getCellStyle().setDataFormat(POIUtils.getDataFormatIndex(cell.getSheet(), pattern)); } }
public class class_name { public void setDataFormat(final String pattern, final CellFormatter cellFormatter) { String currentPattern = POIUtils.getCellFormatPattern(cell, cellFormatter); if(currentPattern.equalsIgnoreCase(pattern)) { // 既に書式が同じ場合 return; // depends on control dependency: [if], data = [none] } cloneStyle(); cell.getCellStyle().setDataFormat(POIUtils.getDataFormatIndex(cell.getSheet(), pattern)); } }
public class class_name { public final BELScriptParser.argument_return argument() throws RecognitionException { BELScriptParser.argument_return retval = new BELScriptParser.argument_return(); retval.start = input.LT(1); Object root_0 = null; BELScriptParser.param_return param62 = null; BELScriptParser.term_return term63 = null; try { // BELScript.g:143:9: ( param | term ) int alt13=2; int LA13_0 = input.LA(1); if ( (LA13_0==OBJECT_IDENT||LA13_0==QUOTED_VALUE||LA13_0==NS_PREFIX) ) { alt13=1; } else if ( ((LA13_0>=44 && LA13_0<=102)) ) { alt13=2; } else { NoViableAltException nvae = new NoViableAltException("", 13, 0, input); throw nvae; } switch (alt13) { case 1 : // BELScript.g:144:5: param { root_0 = (Object)adaptor.nil(); pushFollow(FOLLOW_param_in_argument755); param62=param(); state._fsp--; adaptor.addChild(root_0, param62.getTree()); } break; case 2 : // BELScript.g:144:13: term { root_0 = (Object)adaptor.nil(); pushFollow(FOLLOW_term_in_argument759); term63=term(); state._fsp--; adaptor.addChild(root_0, term63.getTree()); } break; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { } return retval; } }
public class class_name { public final BELScriptParser.argument_return argument() throws RecognitionException { BELScriptParser.argument_return retval = new BELScriptParser.argument_return(); retval.start = input.LT(1); Object root_0 = null; BELScriptParser.param_return param62 = null; BELScriptParser.term_return term63 = null; try { // BELScript.g:143:9: ( param | term ) int alt13=2; int LA13_0 = input.LA(1); if ( (LA13_0==OBJECT_IDENT||LA13_0==QUOTED_VALUE||LA13_0==NS_PREFIX) ) { alt13=1; // depends on control dependency: [if], data = [none] } else if ( ((LA13_0>=44 && LA13_0<=102)) ) { alt13=2; // depends on control dependency: [if], data = [none] } else { NoViableAltException nvae = new NoViableAltException("", 13, 0, input); throw nvae; } switch (alt13) { case 1 : // BELScript.g:144:5: param { root_0 = (Object)adaptor.nil(); pushFollow(FOLLOW_param_in_argument755); param62=param(); state._fsp--; adaptor.addChild(root_0, param62.getTree()); } break; case 2 : // BELScript.g:144:13: term { root_0 = (Object)adaptor.nil(); pushFollow(FOLLOW_term_in_argument759); term63=term(); state._fsp--; adaptor.addChild(root_0, term63.getTree()); } break; } retval.stop = input.LT(-1); retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { } return retval; } }
public class class_name { @Override public Boolean parameterAsBoolean(String name, boolean defaultValue) { // We have to check if the map contains the key, as the retrieval method returns false on missing key. if (!request.params().contains(name)) { return defaultValue; } Boolean parameter = parameterAsBoolean(name); if (parameter == null) { return defaultValue; } return parameter; } }
public class class_name { @Override public Boolean parameterAsBoolean(String name, boolean defaultValue) { // We have to check if the map contains the key, as the retrieval method returns false on missing key. if (!request.params().contains(name)) { return defaultValue; // depends on control dependency: [if], data = [none] } Boolean parameter = parameterAsBoolean(name); if (parameter == null) { return defaultValue; // depends on control dependency: [if], data = [none] } return parameter; } }
public class class_name { static boolean arraySame(Object[] array1, Object[] array2) { if (array1 == null || array2 == null || array1.length != array2.length) { throw new IllegalArgumentException("array1 and array2 cannot be null and should have same length"); } for (int i = 0; i < array1.length; i++) { if (array1[i] != array2[i]) { return false; } } return true; } }
public class class_name { static boolean arraySame(Object[] array1, Object[] array2) { if (array1 == null || array2 == null || array1.length != array2.length) { throw new IllegalArgumentException("array1 and array2 cannot be null and should have same length"); } for (int i = 0; i < array1.length; i++) { if (array1[i] != array2[i]) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public static <S extends Sequence<S>> S wildcardsToRandomBasic(S sequence, long seed) { Alphabet<S> alphabet = sequence.getAlphabet(); SequenceBuilder<S> sequenceBuilder = alphabet.createBuilder().ensureCapacity(sequence.size()); for (int i = 0; i < sequence.size(); ++i) { byte code = sequence.codeAt(i); if (alphabet.isWildcard(code)) { seed = HashFunctions.JenkinWang64shift(seed + i); sequenceBuilder.append(alphabet.codeToWildcard(code).getUniformlyDistributedBasicCode(seed)); } else sequenceBuilder.append(code); } return sequenceBuilder.createAndDestroy(); } }
public class class_name { public static <S extends Sequence<S>> S wildcardsToRandomBasic(S sequence, long seed) { Alphabet<S> alphabet = sequence.getAlphabet(); SequenceBuilder<S> sequenceBuilder = alphabet.createBuilder().ensureCapacity(sequence.size()); for (int i = 0; i < sequence.size(); ++i) { byte code = sequence.codeAt(i); if (alphabet.isWildcard(code)) { seed = HashFunctions.JenkinWang64shift(seed + i); // depends on control dependency: [if], data = [none] sequenceBuilder.append(alphabet.codeToWildcard(code).getUniformlyDistributedBasicCode(seed)); // depends on control dependency: [if], data = [none] } else sequenceBuilder.append(code); } return sequenceBuilder.createAndDestroy(); } }
public class class_name { private void createQuickBox() { if (hasQuickSearch() || hasQuickFilter()) { m_quickSearch = new CmsTextBox(); // m_quickFilter.setVisible(hasQuickFilter()); m_quickSearch.addStyleName(DIALOG_CSS.quickFilterBox()); m_quickSearch.setTriggerChangeOnKeyPress(true); String message = hasQuickFilter() ? Messages.get().key(Messages.GUI_QUICK_FINDER_FILTER_0) : Messages.get().key(Messages.GUI_QUICK_FINDER_SEARCH_0); m_quickSearch.setGhostValue(message, true); m_quickSearch.setGhostModeClear(true); m_options.insert(m_quickSearch, 0); m_searchButton = new CmsPushButton(); m_searchButton.setImageClass(hasQuickFilter() ? I_CmsButton.FILTER : I_CmsButton.SEARCH_SMALL); m_searchButton.setButtonStyle(ButtonStyle.FONT_ICON, null); m_searchButton.getElement().getStyle().setFloat(Style.Float.RIGHT); m_searchButton.getElement().getStyle().setMarginTop(4, Unit.PX); m_options.insert(m_searchButton, 0); m_quickSearch.addValueChangeHandler(this); if (hasQuickFilter()) { m_filterTimer = new Timer() { @Override public void run() { getTabHandler().onSort( m_sortSelectBox.getFormValueAsString(), m_quickSearch.getFormValueAsString()); onContentChange(); } }; m_searchButton.setTitle(message); } else { m_quickSearch.addKeyPressHandler(new KeyPressHandler() { public void onKeyPress(KeyPressEvent event) { if (event.getNativeEvent().getKeyCode() == KeyCodes.KEY_ENTER) { quickSearch(); } } }); m_searchButton.addClickHandler(new ClickHandler() { public void onClick(ClickEvent arg0) { quickSearch(); } }); m_quickSearchRegistration = getTabHandler().addSearchChangeHandler( new ValueChangeHandler<CmsGallerySearchBean>() { public void onValueChange(ValueChangeEvent<CmsGallerySearchBean> event) { m_quickSearch.setFormValueAsString(event.getValue().getQuery()); } }); m_searchButton.setTitle(Messages.get().key(Messages.GUI_TAB_SEARCH_SEARCH_EXISTING_0)); } } } }
public class class_name { private void createQuickBox() { if (hasQuickSearch() || hasQuickFilter()) { m_quickSearch = new CmsTextBox(); // depends on control dependency: [if], data = [none] // m_quickFilter.setVisible(hasQuickFilter()); m_quickSearch.addStyleName(DIALOG_CSS.quickFilterBox()); // depends on control dependency: [if], data = [none] m_quickSearch.setTriggerChangeOnKeyPress(true); // depends on control dependency: [if], data = [none] String message = hasQuickFilter() ? Messages.get().key(Messages.GUI_QUICK_FINDER_FILTER_0) : Messages.get().key(Messages.GUI_QUICK_FINDER_SEARCH_0); m_quickSearch.setGhostValue(message, true); // depends on control dependency: [if], data = [none] m_quickSearch.setGhostModeClear(true); // depends on control dependency: [if], data = [none] m_options.insert(m_quickSearch, 0); // depends on control dependency: [if], data = [none] m_searchButton = new CmsPushButton(); // depends on control dependency: [if], data = [none] m_searchButton.setImageClass(hasQuickFilter() ? I_CmsButton.FILTER : I_CmsButton.SEARCH_SMALL); // depends on control dependency: [if], data = [none] m_searchButton.setButtonStyle(ButtonStyle.FONT_ICON, null); // depends on control dependency: [if], data = [none] m_searchButton.getElement().getStyle().setFloat(Style.Float.RIGHT); // depends on control dependency: [if], data = [none] m_searchButton.getElement().getStyle().setMarginTop(4, Unit.PX); // depends on control dependency: [if], data = [none] m_options.insert(m_searchButton, 0); // depends on control dependency: [if], data = [none] m_quickSearch.addValueChangeHandler(this); // depends on control dependency: [if], data = [none] if (hasQuickFilter()) { m_filterTimer = new Timer() { @Override public void run() { getTabHandler().onSort( m_sortSelectBox.getFormValueAsString(), m_quickSearch.getFormValueAsString()); onContentChange(); } }; // depends on control dependency: [if], data = [none] m_searchButton.setTitle(message); // depends on control dependency: [if], data = [none] } else { m_quickSearch.addKeyPressHandler(new KeyPressHandler() { public void onKeyPress(KeyPressEvent event) { if (event.getNativeEvent().getKeyCode() == KeyCodes.KEY_ENTER) { quickSearch(); // depends on control dependency: [if], data = [none] } } }); // depends on control dependency: [if], data = [none] m_searchButton.addClickHandler(new ClickHandler() { public void onClick(ClickEvent arg0) { quickSearch(); } }); // depends on control dependency: [if], data = [none] m_quickSearchRegistration = getTabHandler().addSearchChangeHandler( new ValueChangeHandler<CmsGallerySearchBean>() { public void onValueChange(ValueChangeEvent<CmsGallerySearchBean> event) { m_quickSearch.setFormValueAsString(event.getValue().getQuery()); } }); // depends on control dependency: [if], data = [none] m_searchButton.setTitle(Messages.get().key(Messages.GUI_TAB_SEARCH_SEARCH_EXISTING_0)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @SuppressWarnings("WeakerAccess") public static void reverse(int[] order, int offset, int length) { for (int i = 0; i < length / 2; i++) { int t = order[offset + i]; order[offset + i] = order[offset + length - i - 1]; order[offset + length - i - 1] = t; } } }
public class class_name { @SuppressWarnings("WeakerAccess") public static void reverse(int[] order, int offset, int length) { for (int i = 0; i < length / 2; i++) { int t = order[offset + i]; order[offset + i] = order[offset + length - i - 1]; // depends on control dependency: [for], data = [i] order[offset + length - i - 1] = t; // depends on control dependency: [for], data = [i] } } }
public class class_name { private void afterIndexFsync(Result<Boolean> result, FsyncType fsyncType, ArrayList<SegmentFsyncCallback> fsyncListeners) { try { // completePendingEntries(_position); if (fsyncType.isClose()) { _isClosed = true; _segment.finishWriting(); if (_pendingFlushEntries.size() > 0 || _pendingFsyncEntries.size() > 0) { System.out.println("BROKEN_PEND: flush=" + _pendingFlushEntries.size() + " fsync=" + _pendingFsyncEntries.size() + " " + _pendingFlushEntries); } _readWrite.afterSequenceClose(_segment.getSequence()); } for (SegmentFsyncCallback listener : _fsyncListeners) { listener.onFsync(); } result.ok(true); } catch (Throwable exn) { result.fail(exn); } } }
public class class_name { private void afterIndexFsync(Result<Boolean> result, FsyncType fsyncType, ArrayList<SegmentFsyncCallback> fsyncListeners) { try { // completePendingEntries(_position); if (fsyncType.isClose()) { _isClosed = true; // depends on control dependency: [if], data = [none] _segment.finishWriting(); // depends on control dependency: [if], data = [none] if (_pendingFlushEntries.size() > 0 || _pendingFsyncEntries.size() > 0) { System.out.println("BROKEN_PEND: flush=" + _pendingFlushEntries.size() + " fsync=" + _pendingFsyncEntries.size() + " " + _pendingFlushEntries); // depends on control dependency: [if], data = [none] } _readWrite.afterSequenceClose(_segment.getSequence()); // depends on control dependency: [if], data = [none] } for (SegmentFsyncCallback listener : _fsyncListeners) { listener.onFsync(); // depends on control dependency: [for], data = [listener] } result.ok(true); // depends on control dependency: [try], data = [none] } catch (Throwable exn) { result.fail(exn); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public StrPosition getBeforeOfWithDetails(String srcStr, String token) { final StrPosition retVal = new StrPosition(); if (isBlank(srcStr) || isBlank(token)) { return retVal; } int tokenStartIndex = srcStr.indexOf(token); if (tokenStartIndex < 0) { return null; } String beforeTokenStr = getLeftOf(srcStr, tokenStartIndex); retVal.startIndex = 0; retVal.endIndex = tokenStartIndex - 1; retVal.str = beforeTokenStr; return retVal; } }
public class class_name { public StrPosition getBeforeOfWithDetails(String srcStr, String token) { final StrPosition retVal = new StrPosition(); if (isBlank(srcStr) || isBlank(token)) { return retVal; // depends on control dependency: [if], data = [none] } int tokenStartIndex = srcStr.indexOf(token); if (tokenStartIndex < 0) { return null; // depends on control dependency: [if], data = [none] } String beforeTokenStr = getLeftOf(srcStr, tokenStartIndex); retVal.startIndex = 0; retVal.endIndex = tokenStartIndex - 1; retVal.str = beforeTokenStr; return retVal; } }
public class class_name { public static <T> Predicate<T> isEqual( final Object obj ) { if (obj == null) { return new Predicate<T>() { @Override public boolean test( T input ) { return input == null; } }; } return new Predicate<T>() { @Override public boolean test( T input ) { return Objects.equals(input, obj); } }; } }
public class class_name { public static <T> Predicate<T> isEqual( final Object obj ) { if (obj == null) { return new Predicate<T>() { @Override public boolean test( T input ) { return input == null; } }; // depends on control dependency: [if], data = [none] } return new Predicate<T>() { @Override public boolean test( T input ) { return Objects.equals(input, obj); } }; } }
public class class_name { public String ensureHeadingIds(String content, String idSeparator) { Element body = parseContent(content); // first find all existing IDs (to avoid generating duplicates) List<Element> idElems = body.select("*[id]"); Set<String> ids = new HashSet<String>(); boolean modified = false; for (Element idElem : idElems) { // fix all existing IDs - remove colon and other symbols which mess up jQuery String id = idElem.id(); idElem.attr("id", adaptSlug(id, idSeparator)); modified = true; ids.add(idElem.id()); } List<String> headNoIds = concat(HEADINGS, ":not([id])", true); // select all headings that do not have an ID List<Element> headingsNoId = body.select(StringUtil.join(headNoIds, ", ")); if (!headingsNoId.isEmpty() || modified) { for (Element heading : headingsNoId) { String headingText = heading.text(); String headingSlug = slug(headingText, idSeparator); // also limit slug to 50 symbols if (headingSlug.length() > 50) { headingSlug = headingSlug.substring(0, 50); } String headingId = generateUniqueId(ids, headingSlug); heading.attr("id", headingId); } return body.html(); } else { // nothing to update return content; } } }
public class class_name { public String ensureHeadingIds(String content, String idSeparator) { Element body = parseContent(content); // first find all existing IDs (to avoid generating duplicates) List<Element> idElems = body.select("*[id]"); Set<String> ids = new HashSet<String>(); boolean modified = false; for (Element idElem : idElems) { // fix all existing IDs - remove colon and other symbols which mess up jQuery String id = idElem.id(); idElem.attr("id", adaptSlug(id, idSeparator)); // depends on control dependency: [for], data = [idElem] modified = true; // depends on control dependency: [for], data = [none] ids.add(idElem.id()); // depends on control dependency: [for], data = [idElem] } List<String> headNoIds = concat(HEADINGS, ":not([id])", true); // select all headings that do not have an ID List<Element> headingsNoId = body.select(StringUtil.join(headNoIds, ", ")); if (!headingsNoId.isEmpty() || modified) { for (Element heading : headingsNoId) { String headingText = heading.text(); String headingSlug = slug(headingText, idSeparator); // also limit slug to 50 symbols if (headingSlug.length() > 50) { headingSlug = headingSlug.substring(0, 50); // depends on control dependency: [if], data = [50)] } String headingId = generateUniqueId(ids, headingSlug); heading.attr("id", headingId); // depends on control dependency: [for], data = [heading] } return body.html(); // depends on control dependency: [if], data = [none] } else { // nothing to update return content; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String readRequestBodyFromStream(final HttpServletRequest request) throws IOException { String charEncoding = request.getCharacterEncoding(); if (charEncoding == null) { charEncoding = JoddCore.encoding; } CharArrayWriter charArrayWriter = new CharArrayWriter(); BufferedReader bufferedReader = null; try { InputStream inputStream = request.getInputStream(); if (inputStream != null) { bufferedReader = new BufferedReader(new InputStreamReader(inputStream, charEncoding)); StreamUtil.copy(bufferedReader, charArrayWriter); } else { return StringPool.EMPTY; } } finally { StreamUtil.close(bufferedReader); } return charArrayWriter.toString(); } }
public class class_name { public static String readRequestBodyFromStream(final HttpServletRequest request) throws IOException { String charEncoding = request.getCharacterEncoding(); if (charEncoding == null) { charEncoding = JoddCore.encoding; } CharArrayWriter charArrayWriter = new CharArrayWriter(); BufferedReader bufferedReader = null; try { InputStream inputStream = request.getInputStream(); if (inputStream != null) { bufferedReader = new BufferedReader(new InputStreamReader(inputStream, charEncoding)); // depends on control dependency: [if], data = [(inputStream] StreamUtil.copy(bufferedReader, charArrayWriter); // depends on control dependency: [if], data = [none] } else { return StringPool.EMPTY; // depends on control dependency: [if], data = [none] } } finally { StreamUtil.close(bufferedReader); } return charArrayWriter.toString(); } }
public class class_name { @Override public void process(GrayF32 intensityImage, @Nullable QueueCorner localMin, @Nullable QueueCorner localMax) { if( localMin != null ) localMin.reset(); if( localMax != null ) localMax.reset(); // the defines the region that can be processed int endX = intensityImage.width - border; int endY = intensityImage.height - border; int step = configuration.radius+1; search.initialize(configuration,intensityImage,localMin,localMax); // Compute number of y iterations int range = endY-border; int N = range/step; if( range > N*step ) N += 1; BoofConcurrency.loopFor(0,N, iterY -> { NonMaxBlock.Search search; QueueCorner threadMin=null,threadMax=null; // get work space for this thread synchronized (lock) { if( searches.isEmpty() ) { search = this.search.newInstance(); } else { search = searches.remove( searches.size()-1 ); } if( search.isDetectMinimums() ) { threadMin = pop(); } if( search.isDetectMaximums() ) { threadMax = pop(); } } search.initialize(configuration,intensityImage,threadMin,threadMax); // search for local peaks along this block row int y = border + iterY*step; int y1 = y + step; if( y1 > endY) y1 = endY; for(int x = border; x < endX; x += step ) { int x1 = x + step; if( x1 > endX) x1 = endX; search.searchBlock(x,y,x1,y1); } // Save the results and recycle thread working space synchronized (lock) { saveResults(localMin, threadMin); saveResults(localMax, threadMax); searches.add(search); if( threadMin != null ) cornerLists.add(threadMin); if( threadMax != null ) cornerLists.add(threadMax); } }); } }
public class class_name { @Override public void process(GrayF32 intensityImage, @Nullable QueueCorner localMin, @Nullable QueueCorner localMax) { if( localMin != null ) localMin.reset(); if( localMax != null ) localMax.reset(); // the defines the region that can be processed int endX = intensityImage.width - border; int endY = intensityImage.height - border; int step = configuration.radius+1; search.initialize(configuration,intensityImage,localMin,localMax); // Compute number of y iterations int range = endY-border; int N = range/step; if( range > N*step ) N += 1; BoofConcurrency.loopFor(0,N, iterY -> { NonMaxBlock.Search search; QueueCorner threadMin=null,threadMax=null; // get work space for this thread synchronized (lock) { if( searches.isEmpty() ) { search = this.search.newInstance(); // depends on control dependency: [if], data = [none] } else { search = searches.remove( searches.size()-1 ); // depends on control dependency: [if], data = [none] } if( search.isDetectMinimums() ) { threadMin = pop(); // depends on control dependency: [if], data = [none] } if( search.isDetectMaximums() ) { threadMax = pop(); // depends on control dependency: [if], data = [none] } } search.initialize(configuration,intensityImage,threadMin,threadMax); // search for local peaks along this block row int y = border + iterY*step; int y1 = y + step; if( y1 > endY) y1 = endY; for(int x = border; x < endX; x += step ) { int x1 = x + step; if( x1 > endX) x1 = endX; search.searchBlock(x,y,x1,y1); // depends on control dependency: [for], data = [x] } // Save the results and recycle thread working space synchronized (lock) { saveResults(localMin, threadMin); saveResults(localMax, threadMax); searches.add(search); if( threadMin != null ) cornerLists.add(threadMin); if( threadMax != null ) cornerLists.add(threadMax); } }); } }
public class class_name { @JsonIgnore public void encodeAndSetContent(byte[] byteContent) { if (byteContent == null) { this.content = null; return; } this.content = Base64.getEncoder().encodeToString(byteContent); encoding = "base64"; } }
public class class_name { @JsonIgnore public void encodeAndSetContent(byte[] byteContent) { if (byteContent == null) { this.content = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.content = Base64.getEncoder().encodeToString(byteContent); encoding = "base64"; } }
public class class_name { private JsonEncoding getJsonEncoding(MediaType contentType) { if (contentType != null && contentType.getCharset() != null) { Charset charset = contentType.getCharset(); for (JsonEncoding encoding : JsonEncoding.values()) { if (charset.name().equals(encoding.getJavaName())) { return encoding; } } } return JsonEncoding.UTF8; } }
public class class_name { private JsonEncoding getJsonEncoding(MediaType contentType) { if (contentType != null && contentType.getCharset() != null) { Charset charset = contentType.getCharset(); for (JsonEncoding encoding : JsonEncoding.values()) { if (charset.name().equals(encoding.getJavaName())) { return encoding; // depends on control dependency: [if], data = [none] } } } return JsonEncoding.UTF8; } }
public class class_name { public static Long safeJsonToLong(Object obj) { Long longValue = null; try { String str = safeJsonToString(obj); longValue = str != null ? Long.parseLong(str) : null; } catch (NumberFormatException e) { LOGGER.warn("Safe JSON conversion to Long failed", e); } return longValue; } }
public class class_name { public static Long safeJsonToLong(Object obj) { Long longValue = null; try { String str = safeJsonToString(obj); longValue = str != null ? Long.parseLong(str) : null; // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { LOGGER.warn("Safe JSON conversion to Long failed", e); } // depends on control dependency: [catch], data = [none] return longValue; } }
public class class_name { public List<FogbugzEvent> getEventsForCase(int id) { try { HashMap<String, String> params = new HashMap<String, String>(); // Hashmap defaults to <String, String> params.put("cmd", "search"); params.put("q", Integer.toString(id)); params.put("cols", "events"); Document doc = this.getFogbugzDocument(params); List<FogbugzEvent> eventList = new ArrayList<FogbugzEvent>(); NodeList eventsNodeList = doc.getElementsByTagName("event"); if (eventsNodeList != null && eventsNodeList.getLength() != 0) { for (int i = 0; i < eventsNodeList.getLength(); i++) { Element currentNode = (Element) eventsNodeList.item(i); // Construct event object from retrieved data. eventList.add(new FogbugzEvent( Integer.parseInt(currentNode.getElementsByTagName("ixBugEvent").item(0).getTextContent()), // eventid id, // caseid currentNode.getElementsByTagName("sVerb").item(0).getTextContent(), // verb Integer.parseInt(currentNode.getElementsByTagName("ixPerson").item(0).getTextContent()), // person Integer.parseInt(currentNode.getElementsByTagName("ixPersonAssignedTo").item(0).getTextContent()), // personAssignedTo DatatypeConverter.parseDateTime(currentNode.getElementsByTagName("dt").item(0).getTextContent()).getTime(), // dateTime currentNode.getElementsByTagName("evtDescription").item(0).getTextContent(), // evtDescription currentNode.getElementsByTagName("sPerson").item(0).getTextContent() // sPerson )); } } return eventList; } catch (Exception e) { FogbugzManager.log.log(Level.SEVERE, "Exception while fetching case " + Integer.toString(id), e); } return null; } }
public class class_name { public List<FogbugzEvent> getEventsForCase(int id) { try { HashMap<String, String> params = new HashMap<String, String>(); // Hashmap defaults to <String, String> params.put("cmd", "search"); // depends on control dependency: [try], data = [none] params.put("q", Integer.toString(id)); // depends on control dependency: [try], data = [none] params.put("cols", "events"); // depends on control dependency: [try], data = [none] Document doc = this.getFogbugzDocument(params); List<FogbugzEvent> eventList = new ArrayList<FogbugzEvent>(); NodeList eventsNodeList = doc.getElementsByTagName("event"); if (eventsNodeList != null && eventsNodeList.getLength() != 0) { for (int i = 0; i < eventsNodeList.getLength(); i++) { Element currentNode = (Element) eventsNodeList.item(i); // Construct event object from retrieved data. eventList.add(new FogbugzEvent( Integer.parseInt(currentNode.getElementsByTagName("ixBugEvent").item(0).getTextContent()), // eventid id, // caseid currentNode.getElementsByTagName("sVerb").item(0).getTextContent(), // verb Integer.parseInt(currentNode.getElementsByTagName("ixPerson").item(0).getTextContent()), // person Integer.parseInt(currentNode.getElementsByTagName("ixPersonAssignedTo").item(0).getTextContent()), // personAssignedTo DatatypeConverter.parseDateTime(currentNode.getElementsByTagName("dt").item(0).getTextContent()).getTime(), // dateTime currentNode.getElementsByTagName("evtDescription").item(0).getTextContent(), // evtDescription currentNode.getElementsByTagName("sPerson").item(0).getTextContent() // sPerson )); // depends on control dependency: [for], data = [none] } } return eventList; // depends on control dependency: [try], data = [none] } catch (Exception e) { FogbugzManager.log.log(Level.SEVERE, "Exception while fetching case " + Integer.toString(id), e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public EClass getIfcShapeModel() { if (ifcShapeModelEClass == null) { ifcShapeModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(511); } return ifcShapeModelEClass; } }
public class class_name { public EClass getIfcShapeModel() { if (ifcShapeModelEClass == null) { ifcShapeModelEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(511); // depends on control dependency: [if], data = [none] } return ifcShapeModelEClass; } }
public class class_name { public List<Excerpt> getAccessorAnnotations() { if (accessorAnnotations instanceof ImmutableList) { accessorAnnotations = new ArrayList<>(accessorAnnotations); } return Collections.unmodifiableList(accessorAnnotations); } }
public class class_name { public List<Excerpt> getAccessorAnnotations() { if (accessorAnnotations instanceof ImmutableList) { accessorAnnotations = new ArrayList<>(accessorAnnotations); // depends on control dependency: [if], data = [none] } return Collections.unmodifiableList(accessorAnnotations); } }
public class class_name { private void injectCache(DoubleConsumer cacheAware) { if (cacheAware instanceof CacheAware) { ((CacheAware) cacheAware).injectCache(registry.getComponent(Cache.class)); } } }
public class class_name { private void injectCache(DoubleConsumer cacheAware) { if (cacheAware instanceof CacheAware) { ((CacheAware) cacheAware).injectCache(registry.getComponent(Cache.class)); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static int determineCommandNameWidth(List<Command.Descriptor> descriptors) { int max = 0; for (Command.Descriptor d : descriptors) { max = Math.max(max, d.getName().length()); } return max; } }
public class class_name { private static int determineCommandNameWidth(List<Command.Descriptor> descriptors) { int max = 0; for (Command.Descriptor d : descriptors) { max = Math.max(max, d.getName().length()); // depends on control dependency: [for], data = [d] } return max; } }
public class class_name { public UTF8String translate(Map<Character, Character> dict) { String srcStr = this.toString(); StringBuilder sb = new StringBuilder(); for(int k = 0; k< srcStr.length(); k++) { if (null == dict.get(srcStr.charAt(k))) { sb.append(srcStr.charAt(k)); } else if ('\0' != dict.get(srcStr.charAt(k))){ sb.append(dict.get(srcStr.charAt(k))); } } return fromString(sb.toString()); } }
public class class_name { public UTF8String translate(Map<Character, Character> dict) { String srcStr = this.toString(); StringBuilder sb = new StringBuilder(); for(int k = 0; k< srcStr.length(); k++) { if (null == dict.get(srcStr.charAt(k))) { sb.append(srcStr.charAt(k)); // depends on control dependency: [if], data = [none] } else if ('\0' != dict.get(srcStr.charAt(k))){ sb.append(dict.get(srcStr.charAt(k))); // depends on control dependency: [if], data = [dict.get(srcStr.charAt(k)))] } } return fromString(sb.toString()); } }
public class class_name { public int numberOfRunsAdjustment() { int ans = 0; if (BufferUtil.isBackedBySimpleArray(bitmap)) { long[] b = bitmap.array(); long nextWord = b[0]; for (int i = 0; i < b.length - 1; i++) { final long word = nextWord; nextWord = b[i + 1]; ans += ((word >>> 63) & ~nextWord); } final long word = nextWord; if ((word & 0x8000000000000000L) != 0) { ans++; } } else { long nextWord = bitmap.get(0); int len = bitmap.limit(); for (int i = 0; i < len - 1; i++) { final long word = nextWord; nextWord = bitmap.get(i + 1); ans += ((word >>> 63) & ~nextWord); } final long word = nextWord; if ((word & 0x8000000000000000L) != 0) { ans++; } } return ans; } }
public class class_name { public int numberOfRunsAdjustment() { int ans = 0; if (BufferUtil.isBackedBySimpleArray(bitmap)) { long[] b = bitmap.array(); long nextWord = b[0]; for (int i = 0; i < b.length - 1; i++) { final long word = nextWord; nextWord = b[i + 1]; // depends on control dependency: [for], data = [i] ans += ((word >>> 63) & ~nextWord); // depends on control dependency: [for], data = [none] } final long word = nextWord; if ((word & 0x8000000000000000L) != 0) { ans++; // depends on control dependency: [if], data = [none] } } else { long nextWord = bitmap.get(0); int len = bitmap.limit(); for (int i = 0; i < len - 1; i++) { final long word = nextWord; nextWord = bitmap.get(i + 1); // depends on control dependency: [for], data = [i] ans += ((word >>> 63) & ~nextWord); // depends on control dependency: [for], data = [none] } final long word = nextWord; if ((word & 0x8000000000000000L) != 0) { ans++; // depends on control dependency: [if], data = [none] } } return ans; } }
public class class_name { public BaseLuceneStorage setIndexManager(IndexManager indexManager) { if (this.indexManager != null && myOwnIndexManager) { this.indexManager.close(); myOwnIndexManager = false; } this.indexManager = indexManager; return this; } }
public class class_name { public BaseLuceneStorage setIndexManager(IndexManager indexManager) { if (this.indexManager != null && myOwnIndexManager) { this.indexManager.close(); // depends on control dependency: [if], data = [none] myOwnIndexManager = false; // depends on control dependency: [if], data = [none] } this.indexManager = indexManager; return this; } }
public class class_name { public Action getAction(String url, String[] urlPara) { Action action = mapping.get(url); if (action != null) { return action; } // -------- int i = url.lastIndexOf('/'); if (i != -1) { action = mapping.get(url.substring(0, i)); if (action != null) { urlPara[0] = url.substring(i + 1); } } return action; } }
public class class_name { public Action getAction(String url, String[] urlPara) { Action action = mapping.get(url); if (action != null) { return action; // depends on control dependency: [if], data = [none] } // -------- int i = url.lastIndexOf('/'); if (i != -1) { action = mapping.get(url.substring(0, i)); // depends on control dependency: [if], data = [none] if (action != null) { urlPara[0] = url.substring(i + 1); // depends on control dependency: [if], data = [none] } } return action; } }
public class class_name { protected String getLanguageSimpleName() { final String name = getGrammar().getName(); final int index = name.lastIndexOf('.'); if (index > 0) { return name.substring(index + 1); } return name; } }
public class class_name { protected String getLanguageSimpleName() { final String name = getGrammar().getName(); final int index = name.lastIndexOf('.'); if (index > 0) { return name.substring(index + 1); // depends on control dependency: [if], data = [(index] } return name; } }
public class class_name { @FFDCIgnore(NoSuchMethodException.class) public Object activateEndpoint(WSMessageEndpointFactory mef, @Sensitive Properties activationProperties, String authenticationAlias, String destinationIDOrJNDIName, AdminObjectService adminObjSvc, String adminObjSvcRefId) throws ResourceException { final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(this, tc, "activateEndpoint", mef, activationProperties, authenticationAlias, destinationIDOrJNDIName, adminObjSvc, adminObjSvcRefId); // Identify the RA Service to call endpointActivation on Object activationSpec; BootstrapContextImpl bootstrapContext = null; try { bootstrapContext = bootstrapContextRef.getServiceWithException(); String adapterPid = (String) bootstrapContextRef.getReference().getProperty(Constants.SERVICE_PID); // If the mdb runtime found the admin object service, then get the id from the service, // otherwise pass in the destinationID if (adminObjSvcRefId == null) adminObjSvcRefId = destinationIDOrJNDIName; activationSpec = createActivationSpec(activationProperties, authenticationAlias, adminObjSvc, adminObjSvcRefId, destinationIDOrJNDIName, mef.getJ2EEName().getApplication()); int[] fullJCAVersionArray = getFullJCAVersion(bootstrapContext); mef.setJCAVersion(fullJCAVersionArray[0], // Major Version fullJCAVersionArray[1]); // Minor Version mef.setRAKey(adapterPid); ActivationConfig config = new ActivationConfig(activationProperties, adminObjSvcRefId, authenticationAlias, mef.getJ2EEName().getApplication()); if (qmidenabled) { Class<? extends Object> mcImplClass = activationSpec.getClass(); try { Method m = mcImplClass.getMethod("getQmid", (Class<?>[]) null); String qmid = (String) m.invoke(activationSpec, (Object[]) null); if (qmid != null) { config.setQmid(qmid); } } catch (NoSuchMethodException nsme) { qmidenabled = false; } catch (InvocationTargetException ite) { qmidenabled = false; } } // register with the TM int recoveryId = isRRSTransactional(activationSpec) ? registerRRSXAResourceInfo(id) : registerXAResourceInfo(config); mef.setRecoveryID(recoveryId); //TODO Add support for deferred Endpoint Activation. if (activationSpec instanceof ActivationSpec) { appsToRecycle.add(mef.getJ2EEName().getApplication()); jcasu = new JcaServiceUtilities(); raClassLoader = bootstrapContext.getRaClassLoader(); ClassLoader previousClassLoader = jcasu.beginContextClassLoader(raClassLoader); try { bootstrapContext.resourceAdapter.endpointActivation(mef, (ActivationSpec) activationSpec); } finally { jcasu.endContextClassLoader(raClassLoader, previousClassLoader); } if (qmidenabled && config.getQmid() == null) { Class<? extends Object> mcImplClass = activationSpec.getClass(); try { Method m = mcImplClass.getMethod("getQmid", (Class<?>[]) null); String qmid = (String) m.invoke(activationSpec, (Object[]) null); config.setQmid(qmid); // TODO - Need to finish this code // recoveryId = isRRSTransactional(activationSpec) ? registerRRSXAResourceInfo(id) : registerXAResourceInfo(config); // mef.setRecoveryID(recoveryId); } catch (NoSuchMethodException nsme) { qmidenabled = false; } catch (InvocationTargetException ite) { qmidenabled = false; } } endpointActivationParams.add(new ActivationParams(activationSpec, mef)); } else { //TODO We need to handle the case when @Activation is used. throw new UnsupportedOperationException(); } Tr.info(tc, "J2CA8801.act.spec.active", id, mef.getJ2EEName()); } catch (Exception ex) { Tr.error(tc, "J2CA8802.activation.failed", bootstrapContext == null ? null : bootstrapContext.getResourceAdapterName(), ex); throw ex instanceof ResourceException ? (ResourceException) ex : new ResourceException(ex); } if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "activateEndpoint", activationSpec); return activationSpec; } }
public class class_name { @FFDCIgnore(NoSuchMethodException.class) public Object activateEndpoint(WSMessageEndpointFactory mef, @Sensitive Properties activationProperties, String authenticationAlias, String destinationIDOrJNDIName, AdminObjectService adminObjSvc, String adminObjSvcRefId) throws ResourceException { final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(this, tc, "activateEndpoint", mef, activationProperties, authenticationAlias, destinationIDOrJNDIName, adminObjSvc, adminObjSvcRefId); // Identify the RA Service to call endpointActivation on Object activationSpec; BootstrapContextImpl bootstrapContext = null; try { bootstrapContext = bootstrapContextRef.getServiceWithException(); String adapterPid = (String) bootstrapContextRef.getReference().getProperty(Constants.SERVICE_PID); // If the mdb runtime found the admin object service, then get the id from the service, // otherwise pass in the destinationID if (adminObjSvcRefId == null) adminObjSvcRefId = destinationIDOrJNDIName; activationSpec = createActivationSpec(activationProperties, authenticationAlias, adminObjSvc, adminObjSvcRefId, destinationIDOrJNDIName, mef.getJ2EEName().getApplication()); int[] fullJCAVersionArray = getFullJCAVersion(bootstrapContext); mef.setJCAVersion(fullJCAVersionArray[0], // Major Version fullJCAVersionArray[1]); // Minor Version mef.setRAKey(adapterPid); ActivationConfig config = new ActivationConfig(activationProperties, adminObjSvcRefId, authenticationAlias, mef.getJ2EEName().getApplication()); if (qmidenabled) { Class<? extends Object> mcImplClass = activationSpec.getClass(); try { Method m = mcImplClass.getMethod("getQmid", (Class<?>[]) null); String qmid = (String) m.invoke(activationSpec, (Object[]) null); if (qmid != null) { config.setQmid(qmid); } } catch (NoSuchMethodException nsme) { qmidenabled = false; } catch (InvocationTargetException ite) { qmidenabled = false; } } // register with the TM int recoveryId = isRRSTransactional(activationSpec) ? registerRRSXAResourceInfo(id) : registerXAResourceInfo(config); mef.setRecoveryID(recoveryId); //TODO Add support for deferred Endpoint Activation. if (activationSpec instanceof ActivationSpec) { appsToRecycle.add(mef.getJ2EEName().getApplication()); jcasu = new JcaServiceUtilities(); raClassLoader = bootstrapContext.getRaClassLoader(); ClassLoader previousClassLoader = jcasu.beginContextClassLoader(raClassLoader); try { bootstrapContext.resourceAdapter.endpointActivation(mef, (ActivationSpec) activationSpec); // depends on control dependency: [try], data = [none] } finally { jcasu.endContextClassLoader(raClassLoader, previousClassLoader); } if (qmidenabled && config.getQmid() == null) { Class<? extends Object> mcImplClass = activationSpec.getClass(); try { Method m = mcImplClass.getMethod("getQmid", (Class<?>[]) null); String qmid = (String) m.invoke(activationSpec, (Object[]) null); config.setQmid(qmid); // depends on control dependency: [try], data = [none] // TODO - Need to finish this code // recoveryId = isRRSTransactional(activationSpec) ? registerRRSXAResourceInfo(id) : registerXAResourceInfo(config); // mef.setRecoveryID(recoveryId); } catch (NoSuchMethodException nsme) { qmidenabled = false; } catch (InvocationTargetException ite) { // depends on control dependency: [catch], data = [none] qmidenabled = false; } // depends on control dependency: [catch], data = [none] } endpointActivationParams.add(new ActivationParams(activationSpec, mef)); } else { //TODO We need to handle the case when @Activation is used. throw new UnsupportedOperationException(); } Tr.info(tc, "J2CA8801.act.spec.active", id, mef.getJ2EEName()); } catch (Exception ex) { Tr.error(tc, "J2CA8802.activation.failed", bootstrapContext == null ? null : bootstrapContext.getResourceAdapterName(), ex); throw ex instanceof ResourceException ? (ResourceException) ex : new ResourceException(ex); } if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "activateEndpoint", activationSpec); return activationSpec; } }
public class class_name { public static String toStringPart(Field field) { String getterName; if (field.isMap()) { getterName = getMapGetterName(field); } else if (field.isRepeated()) { getterName = getRepeatedFieldGetterName(field); } else { getterName = getFieldGetterName(field); } if (field.getType().isEnum() && !field.isRepeated()) { return "\"" + getFieldName(field) + "=\" + " + getterName + "() + '(' + " + getEnumFieldValueGetterName(field) + "() + ')'"; } return "\"" + getFieldName(field) + "=\" + " + getterName + "()"; } }
public class class_name { public static String toStringPart(Field field) { String getterName; if (field.isMap()) { getterName = getMapGetterName(field); // depends on control dependency: [if], data = [none] } else if (field.isRepeated()) { getterName = getRepeatedFieldGetterName(field); // depends on control dependency: [if], data = [none] } else { getterName = getFieldGetterName(field); // depends on control dependency: [if], data = [none] } if (field.getType().isEnum() && !field.isRepeated()) { return "\"" + getFieldName(field) + "=\" + " + getterName + "() + '(' + " + getEnumFieldValueGetterName(field) + "() + ')'"; } return "\"" + getFieldName(field) + "=\" + " + getterName + "()"; // depends on control dependency: [if], data = [none] } }
public class class_name { private boolean isSelectMany(String textToCheckParam) { if(textToCheckParam == null || textToCheckParam.trim().isEmpty()) { return false; } String toCheckLower = textToCheckParam.toLowerCase(); return toCheckLower.startsWith(SELECT_MANY.toLowerCase()); } }
public class class_name { private boolean isSelectMany(String textToCheckParam) { if(textToCheckParam == null || textToCheckParam.trim().isEmpty()) { return false; // depends on control dependency: [if], data = [none] } String toCheckLower = textToCheckParam.toLowerCase(); return toCheckLower.startsWith(SELECT_MANY.toLowerCase()); } }
public class class_name { static ArrayList<DbStats> getDbStats() { ArrayList<DbStats> dbStatsList = new ArrayList<DbStats>(); for (SQLiteDatabase db : getActiveDatabases()) { db.collectDbStats(dbStatsList); } return dbStatsList; } }
public class class_name { static ArrayList<DbStats> getDbStats() { ArrayList<DbStats> dbStatsList = new ArrayList<DbStats>(); for (SQLiteDatabase db : getActiveDatabases()) { db.collectDbStats(dbStatsList); // depends on control dependency: [for], data = [db] } return dbStatsList; } }
public class class_name { public synchronized File getLogFile() { File dir = getStorageDir(); if (dir == null) { Log.e(TAG, "Unable to open log file from external storage"); return null; } switch (wraper.nameFormat) { case WrapFormatter.NAME_FORMAT_DATE: { String file = getFormattedNameWithDate(dir); mLogFile = new File(dir, file); } break; case WrapFormatter.NAME_FORMAT_PLUSINDEX: { String file = getFormattedNameWithPlusIndex(dir); mLogFile = new File(dir, file); } break; case WrapFormatter.NAME_FORMAT_NONE: { mLogFile = new File(dir, getFileName(fileName).concat(DEFUALT_FILE_EXT)); } break; default: { mLogFile = new File(dir, getFileName(fileName).concat(DEFUALT_FILE_EXT)); } break; } try { mLogFile.getParentFile().mkdirs(); mLogFile.createNewFile(); } catch (IOException e) { Log.e(TAG, "创建文件失败了..." + e.toString() + " mLogFile.getParentFile()"); } return mLogFile; } }
public class class_name { public synchronized File getLogFile() { File dir = getStorageDir(); if (dir == null) { Log.e(TAG, "Unable to open log file from external storage"); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } switch (wraper.nameFormat) { case WrapFormatter.NAME_FORMAT_DATE: { String file = getFormattedNameWithDate(dir); mLogFile = new File(dir, file); } break; case WrapFormatter.NAME_FORMAT_PLUSINDEX: { String file = getFormattedNameWithPlusIndex(dir); mLogFile = new File(dir, file); } break; case WrapFormatter.NAME_FORMAT_NONE: { mLogFile = new File(dir, getFileName(fileName).concat(DEFUALT_FILE_EXT)); } break; default: { mLogFile = new File(dir, getFileName(fileName).concat(DEFUALT_FILE_EXT)); } break; } try { mLogFile.getParentFile().mkdirs(); mLogFile.createNewFile(); } catch (IOException e) { Log.e(TAG, "创建文件失败了..." + e.toString() + " mLogFile.getParentFile()"); } return mLogFile; } }
public class class_name { public JobUpdateOptions withOcpDate(DateTime ocpDate) { if (ocpDate == null) { this.ocpDate = null; } else { this.ocpDate = new DateTimeRfc1123(ocpDate); } return this; } }
public class class_name { public JobUpdateOptions withOcpDate(DateTime ocpDate) { if (ocpDate == null) { this.ocpDate = null; // depends on control dependency: [if], data = [none] } else { this.ocpDate = new DateTimeRfc1123(ocpDate); // depends on control dependency: [if], data = [(ocpDate] } return this; } }
public class class_name { public int run(String argv[]) throws Exception { if (argv.length < 1) { printUsage(""); return -1; } int exitCode = -1; int i = 0; String cmd = argv[i++]; // // verify that we have enough command line parameters // if ("-showConfig".equals(cmd)) { if (argv.length < 1) { printUsage(cmd); return exitCode; } } try { if ("-showConfig".equals(cmd)) { initializeRpc(conf, HighTideNode.getAddress(conf)); exitCode = showConfig(cmd, argv, i); } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); printUsage(""); } } catch (IllegalArgumentException arge) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd); } catch (RemoteException e) { // // This is a error returned by hightidenode server. Print // out the first line of the error mesage, ignore the stack trace. exitCode = -1; try { String[] content; content = e.getLocalizedMessage().split("\n"); System.err.println(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); } } catch (IOException e) { // // IO exception encountered locally. // exitCode = -1; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } catch (Exception re) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + re.getLocalizedMessage()); } finally { } return exitCode; } }
public class class_name { public int run(String argv[]) throws Exception { if (argv.length < 1) { printUsage(""); return -1; } int exitCode = -1; int i = 0; String cmd = argv[i++]; // // verify that we have enough command line parameters // if ("-showConfig".equals(cmd)) { if (argv.length < 1) { printUsage(cmd); // depends on control dependency: [if], data = [none] return exitCode; // depends on control dependency: [if], data = [none] } } try { if ("-showConfig".equals(cmd)) { initializeRpc(conf, HighTideNode.getAddress(conf)); // depends on control dependency: [if], data = [none] exitCode = showConfig(cmd, argv, i); // depends on control dependency: [if], data = [none] } else { exitCode = -1; // depends on control dependency: [if], data = [none] System.err.println(cmd.substring(1) + ": Unknown command"); // depends on control dependency: [if], data = [none] printUsage(""); // depends on control dependency: [if], data = [none] } } catch (IllegalArgumentException arge) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd); } catch (RemoteException e) { // // This is a error returned by hightidenode server. Print // out the first line of the error mesage, ignore the stack trace. exitCode = -1; try { String[] content; content = e.getLocalizedMessage().split("\n"); // depends on control dependency: [try], data = [none] System.err.println(cmd.substring(1) + ": " + content[0]); // depends on control dependency: [try], data = [none] } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); } // depends on control dependency: [catch], data = [none] } catch (IOException e) { // // IO exception encountered locally. // exitCode = -1; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } catch (Exception re) { exitCode = -1; System.err.println(cmd.substring(1) + ": " + re.getLocalizedMessage()); } finally { } return exitCode; } }
public class class_name { static void decrementKey(byte[] key) { for (int i = key.length - 1; i >= 0; i--) { int v = key[i] & 0xff; if (v > 0) { key[i] = (byte) (v - 1); return; } key[i] = (byte) 0xff; } } }
public class class_name { static void decrementKey(byte[] key) { for (int i = key.length - 1; i >= 0; i--) { int v = key[i] & 0xff; if (v > 0) { key[i] = (byte) (v - 1); // depends on control dependency: [if], data = [(v] return; // depends on control dependency: [if], data = [none] } key[i] = (byte) 0xff; // depends on control dependency: [for], data = [i] } } }
public class class_name { private static byte[] digest(final byte[] input, final String algorithm, final byte[] salt, final int iterations) { try { MessageDigest digest = MessageDigest.getInstance(algorithm); if (salt != null) { digest.update(salt); } byte[] result = digest.digest(input); for (int i = 1; i < iterations; i++) { digest.reset(); result = digest.digest(result); } return result; } catch (NoSuchAlgorithmException e) { throw new ImpossibleException(e); } } }
public class class_name { private static byte[] digest(final byte[] input, final String algorithm, final byte[] salt, final int iterations) { try { MessageDigest digest = MessageDigest.getInstance(algorithm); if (salt != null) { digest.update(salt); // depends on control dependency: [if], data = [(salt] } byte[] result = digest.digest(input); for (int i = 1; i < iterations; i++) { digest.reset(); // depends on control dependency: [for], data = [none] result = digest.digest(result); // depends on control dependency: [for], data = [none] } return result; // depends on control dependency: [try], data = [none] } catch (NoSuchAlgorithmException e) { throw new ImpossibleException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String create( JClassType interfaceClass ) throws UnableToCompleteException { // We concatenate the name of all the enclosing class. StringBuilder builder = new StringBuilder( interfaceClass.getSimpleSourceName() + "Impl" ); JClassType enclosingType = interfaceClass.getEnclosingType(); while ( null != enclosingType ) { builder.insert( 0, enclosingType.getSimpleSourceName() + "_" ); enclosingType = enclosingType.getEnclosingType(); } String mapperClassSimpleName = builder.toString(); String packageName = interfaceClass.getPackage().getName(); String qualifiedMapperClassName = packageName + "." + mapperClassSimpleName; PrintWriter printWriter = getPrintWriter( packageName, mapperClassSimpleName ); // The class already exists, no need to continue. if ( printWriter == null ) { return qualifiedMapperClassName; } try { // Extract the type of the object to map. JClassType mappedTypeClass = extractMappedType( interfaceClass ); boolean reader = typeOracle.isObjectReader( interfaceClass ); boolean writer = typeOracle.isObjectWriter( interfaceClass ); Class<?> abstractClass; if ( reader ) { if ( writer ) { abstractClass = AbstractObjectMapper.class; } else { abstractClass = AbstractObjectReader.class; } } else { abstractClass = AbstractObjectWriter.class; } TypeSpec.Builder mapperBuilder = TypeSpec.classBuilder( mapperClassSimpleName ) .addModifiers( Modifier.PUBLIC, Modifier.FINAL ) .addSuperinterface( typeName( interfaceClass ) ) .superclass( parameterizedName( abstractClass, mappedTypeClass ) ) .addMethod( buildConstructor( mappedTypeClass ) ); if ( reader ) { mapperBuilder.addMethod( buildNewDeserializerMethod( mappedTypeClass ) ); } if ( writer ) { mapperBuilder.addMethod( buildNewSerializerMethod( mappedTypeClass ) ); } write( packageName, mapperBuilder.build(), printWriter ); } finally { printWriter.close(); } return qualifiedMapperClassName; } }
public class class_name { public String create( JClassType interfaceClass ) throws UnableToCompleteException { // We concatenate the name of all the enclosing class. StringBuilder builder = new StringBuilder( interfaceClass.getSimpleSourceName() + "Impl" ); JClassType enclosingType = interfaceClass.getEnclosingType(); while ( null != enclosingType ) { builder.insert( 0, enclosingType.getSimpleSourceName() + "_" ); enclosingType = enclosingType.getEnclosingType(); } String mapperClassSimpleName = builder.toString(); String packageName = interfaceClass.getPackage().getName(); String qualifiedMapperClassName = packageName + "." + mapperClassSimpleName; PrintWriter printWriter = getPrintWriter( packageName, mapperClassSimpleName ); // The class already exists, no need to continue. if ( printWriter == null ) { return qualifiedMapperClassName; } try { // Extract the type of the object to map. JClassType mappedTypeClass = extractMappedType( interfaceClass ); boolean reader = typeOracle.isObjectReader( interfaceClass ); boolean writer = typeOracle.isObjectWriter( interfaceClass ); Class<?> abstractClass; if ( reader ) { if ( writer ) { abstractClass = AbstractObjectMapper.class; // depends on control dependency: [if], data = [none] } else { abstractClass = AbstractObjectReader.class; // depends on control dependency: [if], data = [none] } } else { abstractClass = AbstractObjectWriter.class; // depends on control dependency: [if], data = [none] } TypeSpec.Builder mapperBuilder = TypeSpec.classBuilder( mapperClassSimpleName ) .addModifiers( Modifier.PUBLIC, Modifier.FINAL ) .addSuperinterface( typeName( interfaceClass ) ) .superclass( parameterizedName( abstractClass, mappedTypeClass ) ) .addMethod( buildConstructor( mappedTypeClass ) ); if ( reader ) { mapperBuilder.addMethod( buildNewDeserializerMethod( mappedTypeClass ) ); // depends on control dependency: [if], data = [none] } if ( writer ) { mapperBuilder.addMethod( buildNewSerializerMethod( mappedTypeClass ) ); // depends on control dependency: [if], data = [none] } write( packageName, mapperBuilder.build(), printWriter ); } finally { printWriter.close(); } return qualifiedMapperClassName; } }
public class class_name { @SuppressWarnings("resource") @Override public Reader createResource(GeneratorContext context) { String path = context.getPath(); StopWatch stopWatch = null; if (PERF_LOGGER.isDebugEnabled()) { stopWatch = new StopWatch("Generating resource '" + path + "' with " + getName() + " generator"); stopWatch.start(); } Reader rd = null; if (useCache) { List<FilePathMapping> fMappings = linkedResourceMap.get(getResourceCacheKey(path, context)); if (fMappings != null && !checkResourcesModified(context, fMappings)) { // Retrieve from cache // Checks if temp resource is already created if (context.isProcessingBundle()) { if (cacheMode.equals(CacheMode.PROD) || cacheMode.equals(CacheMode.ALL)) { rd = retrieveFromCache(path, context, CacheMode.PROD); } } else { if (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL)) { rd = retrieveFromCache(path, context, CacheMode.DEBUG); } } // Update the file path mapping addLinkedResources(path, context, fMappings); } } if (rd == null) { rd = generateResource(path, context); if (useCache) { if (rd != null) { if (cacheMode.equals(CacheMode.PROD) || cacheMode.equals(CacheMode.ALL)) { rd = createTempResource(context, CacheMode.PROD, rd); } } } if (context.isProcessingBundle()) { if (useCache && (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL))) { // Create debug cache while processing bundle if cache is // allowed in debug String content = null; try { content = IOUtils.toString(rd); } catch (IOException e) { throw new BundlingProcessException(e); } Reader dRd = generateResourceForDebug(new StringReader(content), context); createTempResource(context, CacheMode.DEBUG, dRd); rd = new StringReader(content); } } else { rd = generateResourceForDebug(rd, context); if (useCache && (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL))) { rd = createTempResource(context, CacheMode.DEBUG, rd); } } } if (PERF_LOGGER.isDebugEnabled()) { stopWatch.stop(); PERF_LOGGER.debug(stopWatch.shortSummary()); } return rd; } }
public class class_name { @SuppressWarnings("resource") @Override public Reader createResource(GeneratorContext context) { String path = context.getPath(); StopWatch stopWatch = null; if (PERF_LOGGER.isDebugEnabled()) { stopWatch = new StopWatch("Generating resource '" + path + "' with " + getName() + " generator"); // depends on control dependency: [if], data = [none] stopWatch.start(); // depends on control dependency: [if], data = [none] } Reader rd = null; if (useCache) { List<FilePathMapping> fMappings = linkedResourceMap.get(getResourceCacheKey(path, context)); if (fMappings != null && !checkResourcesModified(context, fMappings)) { // Retrieve from cache // Checks if temp resource is already created if (context.isProcessingBundle()) { if (cacheMode.equals(CacheMode.PROD) || cacheMode.equals(CacheMode.ALL)) { rd = retrieveFromCache(path, context, CacheMode.PROD); // depends on control dependency: [if], data = [none] } } else { if (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL)) { rd = retrieveFromCache(path, context, CacheMode.DEBUG); // depends on control dependency: [if], data = [none] } } // Update the file path mapping addLinkedResources(path, context, fMappings); // depends on control dependency: [if], data = [none] } } if (rd == null) { rd = generateResource(path, context); // depends on control dependency: [if], data = [none] if (useCache) { if (rd != null) { if (cacheMode.equals(CacheMode.PROD) || cacheMode.equals(CacheMode.ALL)) { rd = createTempResource(context, CacheMode.PROD, rd); // depends on control dependency: [if], data = [none] } } } if (context.isProcessingBundle()) { if (useCache && (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL))) { // Create debug cache while processing bundle if cache is // allowed in debug String content = null; try { content = IOUtils.toString(rd); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new BundlingProcessException(e); } // depends on control dependency: [catch], data = [none] Reader dRd = generateResourceForDebug(new StringReader(content), context); createTempResource(context, CacheMode.DEBUG, dRd); // depends on control dependency: [if], data = [none] rd = new StringReader(content); // depends on control dependency: [if], data = [none] } } else { rd = generateResourceForDebug(rd, context); // depends on control dependency: [if], data = [none] if (useCache && (cacheMode.equals(CacheMode.DEBUG) || cacheMode.equals(CacheMode.ALL))) { rd = createTempResource(context, CacheMode.DEBUG, rd); // depends on control dependency: [if], data = [none] } } } if (PERF_LOGGER.isDebugEnabled()) { stopWatch.stop(); // depends on control dependency: [if], data = [none] PERF_LOGGER.debug(stopWatch.shortSummary()); // depends on control dependency: [if], data = [none] } return rd; } }
public class class_name { private UndiGraph<Var, Void> computeVariableNamesInterferenceGraph( ControlFlowGraph<Node> cfg, Set<? extends Var> escaped) { UndiGraph<Var, Void> interferenceGraph = LinkedUndirectedGraph.create(); // First create a node for each non-escaped variable. We add these nodes in the order in which // they appear in the code because we want the names that appear earlier in the code to be used // when coalescing to variables that appear later in the code. List<Var> orderedVariables = liveness.getAllVariablesInOrder(); for (Var v : orderedVariables) { if (escaped.contains(v)) { continue; } // NOTE(user): In theory, we CAN coalesce function names just like any variables. Our // Liveness analysis captures this just like it as described in the specification. However, we // saw some zipped and unzipped size increase after this. We are not totally sure why // that is but, for now, we will respect the dead functions and not play around with it if (v.getParentNode().isFunction()) { continue; } // NOTE: we skip class declarations for a combination of two reasons: // 1. they are block-scoped, so we would need to rewrite them as class expressions // e.g. `class C {}` -> `var C = class {}` to avoid incorrect semantics // (see testDontCoalesceClassDeclarationsWithDestructuringDeclaration). // This is possible but increases pre-gzip code size and complexity. // 2. since function declaration coalescing seems to cause a size regression (as discussed // above) we assume that coalescing class names may cause a similar size regression. if (v.getParentNode().isClass()) { continue; } // Skip lets and consts that have multiple variables declared in them, otherwise this produces // incorrect semantics. See test case "testCapture". // Skipping vars technically isn't needed for correct semantics, but works around a Safari // bug for var redeclarations (https://github.com/google/closure-compiler/issues/3164) if (isInMultipleLvalueDecl(v)) { continue; } interferenceGraph.createNode(v); } // Go through each variable and try to connect them. int v1Index = -1; for (Var v1 : orderedVariables) { v1Index++; int v2Index = -1; NEXT_VAR_PAIR: for (Var v2 : orderedVariables) { v2Index++; // Skip duplicate pairs. if (v1Index > v2Index) { continue; } if (!interferenceGraph.hasNode(v1) || !interferenceGraph.hasNode(v2)) { // Skip nodes that were not added. They are globals and escaped // locals. Also avoid merging a variable with itself. continue NEXT_VAR_PAIR; } if (v1.isParam() && v2.isParam()) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } // Go through every CFG node in the program and look at // this variable pair. If they are both live at the same // time, add an edge between them and continue to the next pair. NEXT_CROSS_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_CROSS_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); // Check the live states and add edge when possible. if ((state.getIn().isLive(v1Index) && state.getIn().isLive(v2Index)) || (state.getOut().isLive(v1Index) && state.getOut().isLive(v2Index))) { interferenceGraph.connectIfNotFound(v1, null, v2); continue NEXT_VAR_PAIR; } } // v1 and v2 might not have an edge between them! woohoo. there's // one last sanity check that we have to do: we have to check // if there's a collision *within* the cfg node. NEXT_INTRA_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_INTRA_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); boolean v1OutLive = state.getOut().isLive(v1Index); boolean v2OutLive = state.getOut().isLive(v2Index); CombinedLiveRangeChecker checker = new CombinedLiveRangeChecker( cfgNode.getValue(), new LiveRangeChecker(v1, v2OutLive ? null : v2), new LiveRangeChecker(v2, v1OutLive ? null : v1)); checker.check(cfgNode.getValue()); if (checker.connectIfCrossed(interferenceGraph)) { continue NEXT_VAR_PAIR; } } } } return interferenceGraph; } }
public class class_name { private UndiGraph<Var, Void> computeVariableNamesInterferenceGraph( ControlFlowGraph<Node> cfg, Set<? extends Var> escaped) { UndiGraph<Var, Void> interferenceGraph = LinkedUndirectedGraph.create(); // First create a node for each non-escaped variable. We add these nodes in the order in which // they appear in the code because we want the names that appear earlier in the code to be used // when coalescing to variables that appear later in the code. List<Var> orderedVariables = liveness.getAllVariablesInOrder(); for (Var v : orderedVariables) { if (escaped.contains(v)) { continue; } // NOTE(user): In theory, we CAN coalesce function names just like any variables. Our // Liveness analysis captures this just like it as described in the specification. However, we // saw some zipped and unzipped size increase after this. We are not totally sure why // that is but, for now, we will respect the dead functions and not play around with it if (v.getParentNode().isFunction()) { continue; } // NOTE: we skip class declarations for a combination of two reasons: // 1. they are block-scoped, so we would need to rewrite them as class expressions // e.g. `class C {}` -> `var C = class {}` to avoid incorrect semantics // (see testDontCoalesceClassDeclarationsWithDestructuringDeclaration). // This is possible but increases pre-gzip code size and complexity. // 2. since function declaration coalescing seems to cause a size regression (as discussed // above) we assume that coalescing class names may cause a similar size regression. if (v.getParentNode().isClass()) { continue; } // Skip lets and consts that have multiple variables declared in them, otherwise this produces // incorrect semantics. See test case "testCapture". // Skipping vars technically isn't needed for correct semantics, but works around a Safari // bug for var redeclarations (https://github.com/google/closure-compiler/issues/3164) if (isInMultipleLvalueDecl(v)) { continue; } interferenceGraph.createNode(v); // depends on control dependency: [for], data = [v] } // Go through each variable and try to connect them. int v1Index = -1; for (Var v1 : orderedVariables) { v1Index++; // depends on control dependency: [for], data = [v1] int v2Index = -1; NEXT_VAR_PAIR: for (Var v2 : orderedVariables) { v2Index++; // depends on control dependency: [for], data = [v2] // Skip duplicate pairs. if (v1Index > v2Index) { continue; } if (!interferenceGraph.hasNode(v1) || !interferenceGraph.hasNode(v2)) { // Skip nodes that were not added. They are globals and escaped // locals. Also avoid merging a variable with itself. continue NEXT_VAR_PAIR; } if (v1.isParam() && v2.isParam()) { interferenceGraph.connectIfNotFound(v1, null, v2); // depends on control dependency: [if], data = [none] continue NEXT_VAR_PAIR; } // Go through every CFG node in the program and look at // this variable pair. If they are both live at the same // time, add an edge between them and continue to the next pair. NEXT_CROSS_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_CROSS_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); // Check the live states and add edge when possible. if ((state.getIn().isLive(v1Index) && state.getIn().isLive(v2Index)) || (state.getOut().isLive(v1Index) && state.getOut().isLive(v2Index))) { interferenceGraph.connectIfNotFound(v1, null, v2); // depends on control dependency: [if], data = [none] continue NEXT_VAR_PAIR; } } // v1 and v2 might not have an edge between them! woohoo. there's // one last sanity check that we have to do: we have to check // if there's a collision *within* the cfg node. NEXT_INTRA_CFG_NODE: for (DiGraphNode<Node, Branch> cfgNode : cfg.getDirectedGraphNodes()) { if (cfg.isImplicitReturn(cfgNode)) { continue NEXT_INTRA_CFG_NODE; } FlowState<LiveVariableLattice> state = cfgNode.getAnnotation(); boolean v1OutLive = state.getOut().isLive(v1Index); boolean v2OutLive = state.getOut().isLive(v2Index); CombinedLiveRangeChecker checker = new CombinedLiveRangeChecker( cfgNode.getValue(), new LiveRangeChecker(v1, v2OutLive ? null : v2), new LiveRangeChecker(v2, v1OutLive ? null : v1)); checker.check(cfgNode.getValue()); // depends on control dependency: [for], data = [cfgNode] if (checker.connectIfCrossed(interferenceGraph)) { continue NEXT_VAR_PAIR; } } } } return interferenceGraph; } }
public class class_name { @Deprecated public static DiscordRecords series2DiscordsDeprecated(double[] series, int discordsNumToReport, int windowSize, int paaSize, int alphabetSize, SlidingWindowMarkerAlgorithm markerAlgorithm, NumerosityReductionStrategy strategy, double nThreshold) throws Exception { Date start = new Date(); // get the SAX transform NormalAlphabet normalA = new NormalAlphabet(); SAXRecords sax = sp.ts2saxViaWindow(series, windowSize, alphabetSize, normalA.getCuts(alphabetSize), strategy, nThreshold); Date saxEnd = new Date(); LOGGER.debug("discretized in {}, words: {}, indexes: {}", SAXProcessor.timeToString(start.getTime(), saxEnd.getTime()), sax.getRecords().size(), sax.getIndexes().size()); // instantiate the hash HashMap<String, ArrayList<Integer>> hash = new HashMap<String, ArrayList<Integer>>(); // fill the hash for (SAXRecord sr : sax.getRecords()) { for (Integer pos : sr.getIndexes()) { // add to hash String word = String.valueOf(sr.getPayload()); if (!(hash.containsKey(word))) { hash.put(word, new ArrayList<Integer>()); } hash.get(String.valueOf(word)).add(pos); } } Date hashEnd = new Date(); LOGGER.debug("Hash filled in : {}", SAXProcessor.timeToString(saxEnd.getTime(), hashEnd.getTime())); DiscordRecords discords = getDiscordsWithHash(series, windowSize, hash, discordsNumToReport, markerAlgorithm, nThreshold); Date end = new Date(); LOGGER.info("{} discords found in {}", discords.getSize(), SAXProcessor.timeToString(start.getTime(), end.getTime())); return discords; } }
public class class_name { @Deprecated public static DiscordRecords series2DiscordsDeprecated(double[] series, int discordsNumToReport, int windowSize, int paaSize, int alphabetSize, SlidingWindowMarkerAlgorithm markerAlgorithm, NumerosityReductionStrategy strategy, double nThreshold) throws Exception { Date start = new Date(); // get the SAX transform NormalAlphabet normalA = new NormalAlphabet(); SAXRecords sax = sp.ts2saxViaWindow(series, windowSize, alphabetSize, normalA.getCuts(alphabetSize), strategy, nThreshold); Date saxEnd = new Date(); LOGGER.debug("discretized in {}, words: {}, indexes: {}", SAXProcessor.timeToString(start.getTime(), saxEnd.getTime()), sax.getRecords().size(), sax.getIndexes().size()); // instantiate the hash HashMap<String, ArrayList<Integer>> hash = new HashMap<String, ArrayList<Integer>>(); // fill the hash for (SAXRecord sr : sax.getRecords()) { for (Integer pos : sr.getIndexes()) { // add to hash String word = String.valueOf(sr.getPayload()); if (!(hash.containsKey(word))) { hash.put(word, new ArrayList<Integer>()); // depends on control dependency: [if], data = [none] } hash.get(String.valueOf(word)).add(pos); } } Date hashEnd = new Date(); LOGGER.debug("Hash filled in : {}", SAXProcessor.timeToString(saxEnd.getTime(), hashEnd.getTime())); DiscordRecords discords = getDiscordsWithHash(series, windowSize, hash, discordsNumToReport, markerAlgorithm, nThreshold); Date end = new Date(); LOGGER.info("{} discords found in {}", discords.getSize(), SAXProcessor.timeToString(start.getTime(), end.getTime())); return discords; } }
public class class_name { public String processLink(final String src) { if (newAction) { if (bundleId == null) { bundleId = bundlesManager.registerNewBundleId(); bundleId += '.' + bundleContentType; } sources.add(src); } if (firstScriptTag) { // this is the first tag, change the url to point to the bundle firstScriptTag = false; return buildStaplerUrl(); } else { // ignore all other script tags return null; } } }
public class class_name { public String processLink(final String src) { if (newAction) { if (bundleId == null) { bundleId = bundlesManager.registerNewBundleId(); // depends on control dependency: [if], data = [none] bundleId += '.' + bundleContentType; // depends on control dependency: [if], data = [none] } sources.add(src); // depends on control dependency: [if], data = [none] } if (firstScriptTag) { // this is the first tag, change the url to point to the bundle firstScriptTag = false; // depends on control dependency: [if], data = [none] return buildStaplerUrl(); // depends on control dependency: [if], data = [none] } else { // ignore all other script tags return null; // depends on control dependency: [if], data = [none] } } }
public class class_name { private static void updateOptionDescriptions() { Collection<Option> allOptions = options.getOptions(); for (Option option : allOptions) { String key = option.getOpt(); if (key == null) { key = option.getLongOpt(); } if (key != null) { String description = I18n.getMessage(key); if (description != null) { if (key.equals(Opt.COWFILE.text)) { description = String.format(description, File.separatorChar); } option.setDescription(description); } } } } }
public class class_name { private static void updateOptionDescriptions() { Collection<Option> allOptions = options.getOptions(); for (Option option : allOptions) { String key = option.getOpt(); if (key == null) { key = option.getLongOpt(); // depends on control dependency: [if], data = [none] } if (key != null) { String description = I18n.getMessage(key); if (description != null) { if (key.equals(Opt.COWFILE.text)) { description = String.format(description, File.separatorChar); // depends on control dependency: [if], data = [none] } option.setDescription(description); // depends on control dependency: [if], data = [(description] } } } } }
public class class_name { public Policies withLBCookieStickinessPolicies(LBCookieStickinessPolicy... lBCookieStickinessPolicies) { if (this.lBCookieStickinessPolicies == null) { setLBCookieStickinessPolicies(new com.amazonaws.internal.SdkInternalList<LBCookieStickinessPolicy>(lBCookieStickinessPolicies.length)); } for (LBCookieStickinessPolicy ele : lBCookieStickinessPolicies) { this.lBCookieStickinessPolicies.add(ele); } return this; } }
public class class_name { public Policies withLBCookieStickinessPolicies(LBCookieStickinessPolicy... lBCookieStickinessPolicies) { if (this.lBCookieStickinessPolicies == null) { setLBCookieStickinessPolicies(new com.amazonaws.internal.SdkInternalList<LBCookieStickinessPolicy>(lBCookieStickinessPolicies.length)); // depends on control dependency: [if], data = [none] } for (LBCookieStickinessPolicy ele : lBCookieStickinessPolicies) { this.lBCookieStickinessPolicies.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public <T extends Serializable> List<T> dequeueFromBuffer(String topic, Class<T> type, int timeout, int limit) { List<T> result = new ArrayList<T>(); long cutoff = System.currentTimeMillis() + timeout; BlockingQueue<String> queue = _topics.get(topic).getMessages(); while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { if (Thread.currentThread().isInterrupted()) { break; } try { String message = queue.poll(timeout, TimeUnit.MILLISECONDS); if (message != null && !message.isEmpty()) { if (String.class.isAssignableFrom(type)) { result.add(type.cast(message)); } else { result.add(_mapper.readValue(message, type)); } if (result.size() % 1000 == 0) { _logger.debug("Dequeued {} messages from local buffer.", result.size()); } } } catch (InterruptedException e) { _logger.warn("Interrupted while waiting for poll() to return a message."); Thread.currentThread().interrupt(); } catch (IOException e) { _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); } } return result; } }
public class class_name { public <T extends Serializable> List<T> dequeueFromBuffer(String topic, Class<T> type, int timeout, int limit) { List<T> result = new ArrayList<T>(); long cutoff = System.currentTimeMillis() + timeout; BlockingQueue<String> queue = _topics.get(topic).getMessages(); while (System.currentTimeMillis() < cutoff && (limit < 0 || result.size() < limit)) { if (Thread.currentThread().isInterrupted()) { break; } try { String message = queue.poll(timeout, TimeUnit.MILLISECONDS); if (message != null && !message.isEmpty()) { if (String.class.isAssignableFrom(type)) { result.add(type.cast(message)); // depends on control dependency: [if], data = [none] } else { result.add(_mapper.readValue(message, type)); // depends on control dependency: [if], data = [none] } if (result.size() % 1000 == 0) { _logger.debug("Dequeued {} messages from local buffer.", result.size()); // depends on control dependency: [if], data = [none] } } } catch (InterruptedException e) { _logger.warn("Interrupted while waiting for poll() to return a message."); Thread.currentThread().interrupt(); } catch (IOException e) { // depends on control dependency: [catch], data = [none] _logger.warn("Exception while deserializing message to type: " + type + ". Skipping this message.", e); } // depends on control dependency: [catch], data = [none] } return result; } }
public class class_name { @Override public void addTypeParamInfo(Content classInfoTree) { if (!utils.getTypeParamTrees(typeElement).isEmpty()) { Content typeParam = (new ParamTaglet()).getTagletOutput(typeElement, getTagletWriterInstance(false)); Content dl = HtmlTree.DL(typeParam); classInfoTree.addContent(dl); } } }
public class class_name { @Override public void addTypeParamInfo(Content classInfoTree) { if (!utils.getTypeParamTrees(typeElement).isEmpty()) { Content typeParam = (new ParamTaglet()).getTagletOutput(typeElement, getTagletWriterInstance(false)); Content dl = HtmlTree.DL(typeParam); classInfoTree.addContent(dl); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static MediaType parse(String string) { if (string == null) { return null; } // Optimization for the common cases synchronized (SIMPLE_TYPES) { MediaType type = SIMPLE_TYPES.get(string); if (type == null) { int slash = string.indexOf('/'); if (slash == -1) { return null; } else if (SIMPLE_TYPES.size() < 10000 && isSimpleName(string.substring(0, slash)) && isSimpleName(string.substring(slash + 1))) { type = new MediaType(string, slash); SIMPLE_TYPES.put(string, type); } } if (type != null) { return type; } } Matcher matcher; matcher = TYPE_PATTERN.matcher(string); if (matcher.matches()) { return new MediaType( matcher.group(1), matcher.group(2), parseParameters(matcher.group(3))); } matcher = CHARSET_FIRST_PATTERN.matcher(string); if (matcher.matches()) { return new MediaType( matcher.group(2), matcher.group(3), parseParameters(matcher.group(1))); } return null; } }
public class class_name { public static MediaType parse(String string) { if (string == null) { return null; // depends on control dependency: [if], data = [none] } // Optimization for the common cases synchronized (SIMPLE_TYPES) { MediaType type = SIMPLE_TYPES.get(string); if (type == null) { int slash = string.indexOf('/'); if (slash == -1) { return null; // depends on control dependency: [if], data = [none] } else if (SIMPLE_TYPES.size() < 10000 && isSimpleName(string.substring(0, slash)) && isSimpleName(string.substring(slash + 1))) { type = new MediaType(string, slash); // depends on control dependency: [if], data = [none] SIMPLE_TYPES.put(string, type); // depends on control dependency: [if], data = [none] } } if (type != null) { return type; // depends on control dependency: [if], data = [none] } } Matcher matcher; matcher = TYPE_PATTERN.matcher(string); if (matcher.matches()) { return new MediaType( matcher.group(1), matcher.group(2), parseParameters(matcher.group(3))); // depends on control dependency: [if], data = [none] } matcher = CHARSET_FIRST_PATTERN.matcher(string); if (matcher.matches()) { return new MediaType( matcher.group(2), matcher.group(3), parseParameters(matcher.group(1))); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public void popRTFContext() { int previous=m_last_pushed_rtfdtm.pop(); if(null==m_rtfdtm_stack) return; if(m_which_rtfdtm==previous) { if(previous>=0) // guard against none-active { boolean isEmpty=((SAX2RTFDTM)(m_rtfdtm_stack.elementAt(previous))).popRewindMark(); } } else while(m_which_rtfdtm!=previous) { // Empty each DTM before popping, so it's ready for reuse // _DON'T_ pop the previous, since it's still open (which is why we // stacked up more of these) and did not receive a mark. boolean isEmpty=((SAX2RTFDTM)(m_rtfdtm_stack.elementAt(m_which_rtfdtm))).popRewindMark(); --m_which_rtfdtm; } } }
public class class_name { public void popRTFContext() { int previous=m_last_pushed_rtfdtm.pop(); if(null==m_rtfdtm_stack) return; if(m_which_rtfdtm==previous) { if(previous>=0) // guard against none-active { boolean isEmpty=((SAX2RTFDTM)(m_rtfdtm_stack.elementAt(previous))).popRewindMark(); } } else while(m_which_rtfdtm!=previous) { // Empty each DTM before popping, so it's ready for reuse // _DON'T_ pop the previous, since it's still open (which is why we // stacked up more of these) and did not receive a mark. boolean isEmpty=((SAX2RTFDTM)(m_rtfdtm_stack.elementAt(m_which_rtfdtm))).popRewindMark(); --m_which_rtfdtm; // depends on control dependency: [while], data = [none] } } }
public class class_name { public void pushParameterStack(Map parameters) { if (parameters == null) { _paramStack.push(null); } else { _paramStack.push(((Hashtable)parameters).clone()); } } }
public class class_name { public void pushParameterStack(Map parameters) { if (parameters == null) { _paramStack.push(null); // depends on control dependency: [if], data = [null)] } else { _paramStack.push(((Hashtable)parameters).clone()); // depends on control dependency: [if], data = [none] } } }
public class class_name { private static String dealName(String fullName) { Matcher matcher = SUPER_PATTERN.matcher(fullName); String name; if (matcher.find()) { name = matcher.group(1); } else { matcher = EXTENDS_PATTERN.matcher(fullName); if (matcher.find()) { name = matcher.group(1); } else { name = fullName; } } return name; } }
public class class_name { private static String dealName(String fullName) { Matcher matcher = SUPER_PATTERN.matcher(fullName); String name; if (matcher.find()) { name = matcher.group(1); // depends on control dependency: [if], data = [none] } else { matcher = EXTENDS_PATTERN.matcher(fullName); // depends on control dependency: [if], data = [none] if (matcher.find()) { name = matcher.group(1); // depends on control dependency: [if], data = [none] } else { name = fullName; // depends on control dependency: [if], data = [none] } } return name; } }
public class class_name { private static int indexOfBlock(Block block, List<Block> blocks) { int position = 0; for (Block child : blocks) { if (child == block) { return position; } ++position; } return -1; } }
public class class_name { private static int indexOfBlock(Block block, List<Block> blocks) { int position = 0; for (Block child : blocks) { if (child == block) { return position; // depends on control dependency: [if], data = [none] } ++position; // depends on control dependency: [for], data = [none] } return -1; } }
public class class_name { private void sendOneAsyncHint(final ByteArray slopKey, final Versioned<byte[]> slopVersioned, final List<Node> nodesToTry) { Node nodeToHostHint = null; boolean foundNode = false; while(nodesToTry.size() > 0) { nodeToHostHint = nodesToTry.remove(0); if(!failedNodes.contains(nodeToHostHint) && failureDetector.isAvailable(nodeToHostHint)) { foundNode = true; break; } } if(!foundNode) { Slop slop = slopSerializer.toObject(slopVersioned.getValue()); logger.error("Trying to send an async hint but used up all nodes. key: " + slop.getKey() + " version: " + slopVersioned.getVersion().toString()); return; } final Node node = nodeToHostHint; int nodeId = node.getId(); NonblockingStore nonblockingStore = nonblockingSlopStores.get(nodeId); Utils.notNull(nonblockingStore); final Long startNs = System.nanoTime(); NonblockingStoreCallback callback = new NonblockingStoreCallback() { @Override public void requestComplete(Object result, long requestTime) { Slop slop = null; boolean loggerDebugEnabled = logger.isDebugEnabled(); if(loggerDebugEnabled) { slop = slopSerializer.toObject(slopVersioned.getValue()); } Response<ByteArray, Object> response = new Response<ByteArray, Object>(node, slopKey, result, requestTime); if(response.getValue() instanceof Exception && !(response.getValue() instanceof ObsoleteVersionException)) { if(!failedNodes.contains(node)) failedNodes.add(node); if(response.getValue() instanceof UnreachableStoreException) { UnreachableStoreException use = (UnreachableStoreException) response.getValue(); if(loggerDebugEnabled) { logger.debug("Write of key " + slop.getKey() + " for " + slop.getNodeId() + " to node " + node + " failed due to unreachable: " + use.getMessage()); } failureDetector.recordException(node, (System.nanoTime() - startNs) / Time.NS_PER_MS, use); } sendOneAsyncHint(slopKey, slopVersioned, nodesToTry); } if(loggerDebugEnabled) logger.debug("Slop write of key " + slop.getKey() + " for node " + slop.getNodeId() + " to node " + node + " succeeded in " + (System.nanoTime() - startNs) + " ns"); failureDetector.recordSuccess(node, (System.nanoTime() - startNs) / Time.NS_PER_MS); } }; nonblockingStore.submitPutRequest(slopKey, slopVersioned, null, callback, timeoutMs); } }
public class class_name { private void sendOneAsyncHint(final ByteArray slopKey, final Versioned<byte[]> slopVersioned, final List<Node> nodesToTry) { Node nodeToHostHint = null; boolean foundNode = false; while(nodesToTry.size() > 0) { nodeToHostHint = nodesToTry.remove(0); // depends on control dependency: [while], data = [0)] if(!failedNodes.contains(nodeToHostHint) && failureDetector.isAvailable(nodeToHostHint)) { foundNode = true; // depends on control dependency: [if], data = [none] break; } } if(!foundNode) { Slop slop = slopSerializer.toObject(slopVersioned.getValue()); logger.error("Trying to send an async hint but used up all nodes. key: " + slop.getKey() + " version: " + slopVersioned.getVersion().toString()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } final Node node = nodeToHostHint; int nodeId = node.getId(); NonblockingStore nonblockingStore = nonblockingSlopStores.get(nodeId); Utils.notNull(nonblockingStore); final Long startNs = System.nanoTime(); NonblockingStoreCallback callback = new NonblockingStoreCallback() { @Override public void requestComplete(Object result, long requestTime) { Slop slop = null; boolean loggerDebugEnabled = logger.isDebugEnabled(); if(loggerDebugEnabled) { slop = slopSerializer.toObject(slopVersioned.getValue()); // depends on control dependency: [if], data = [none] } Response<ByteArray, Object> response = new Response<ByteArray, Object>(node, slopKey, result, requestTime); if(response.getValue() instanceof Exception && !(response.getValue() instanceof ObsoleteVersionException)) { if(!failedNodes.contains(node)) failedNodes.add(node); if(response.getValue() instanceof UnreachableStoreException) { UnreachableStoreException use = (UnreachableStoreException) response.getValue(); if(loggerDebugEnabled) { logger.debug("Write of key " + slop.getKey() + " for " + slop.getNodeId() + " to node " + node + " failed due to unreachable: " + use.getMessage()); // depends on control dependency: [if], data = [none] } failureDetector.recordException(node, (System.nanoTime() - startNs) / Time.NS_PER_MS, use); // depends on control dependency: [if], data = [none] } sendOneAsyncHint(slopKey, slopVersioned, nodesToTry); // depends on control dependency: [if], data = [none] } if(loggerDebugEnabled) logger.debug("Slop write of key " + slop.getKey() + " for node " + slop.getNodeId() + " to node " + node + " succeeded in " + (System.nanoTime() - startNs) + " ns"); failureDetector.recordSuccess(node, (System.nanoTime() - startNs) / Time.NS_PER_MS); } }; nonblockingStore.submitPutRequest(slopKey, slopVersioned, null, callback, timeoutMs); } }
public class class_name { @Override public void actionPerformed(ActionEvent event) { if (event.getSource().equals(LED_BLINKING_TIMER)) { currentLedImage = ledOn == true ? getLedImageOn() : getLedImageOff(); ledOn ^= true; repaint(); } } }
public class class_name { @Override public void actionPerformed(ActionEvent event) { if (event.getSource().equals(LED_BLINKING_TIMER)) { currentLedImage = ledOn == true ? getLedImageOn() : getLedImageOff(); // depends on control dependency: [if], data = [none] ledOn ^= true; // depends on control dependency: [if], data = [none] repaint(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public java.util.List<String> getRuleNames() { if (ruleNames == null) { ruleNames = new com.amazonaws.internal.SdkInternalList<String>(); } return ruleNames; } }
public class class_name { public java.util.List<String> getRuleNames() { if (ruleNames == null) { ruleNames = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none] } return ruleNames; } }
public class class_name { public void setScope(java.util.Collection<String> scope) { if (scope == null) { this.scope = null; return; } this.scope = new java.util.ArrayList<String>(scope); } }
public class class_name { public void setScope(java.util.Collection<String> scope) { if (scope == null) { this.scope = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.scope = new java.util.ArrayList<String>(scope); } }
public class class_name { public static synchronized void setDescription(URL descriptionURL) { if (descriptionURL == null) { return; } try { setDescription(descriptionURL.openStream()); ConsoleWriter.writeSystem(MessageFormat.format("Loaded logspace configuration from ''{0}''.", descriptionURL)); } catch (IOException ioex) { throw new AgentControllerInitializationException( "Could not load logspace configuration from URL '" + descriptionURL + "'.", ioex); } } }
public class class_name { public static synchronized void setDescription(URL descriptionURL) { if (descriptionURL == null) { return; // depends on control dependency: [if], data = [none] } try { setDescription(descriptionURL.openStream()); // depends on control dependency: [try], data = [none] ConsoleWriter.writeSystem(MessageFormat.format("Loaded logspace configuration from ''{0}''.", descriptionURL)); // depends on control dependency: [try], data = [none] } catch (IOException ioex) { throw new AgentControllerInitializationException( "Could not load logspace configuration from URL '" + descriptionURL + "'.", ioex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void changeBase(Permutation newBase) { PermutationGroup h = new PermutationGroup(newBase); int firstDiffIndex = base.firstIndexOfDifference(newBase); for (int j = firstDiffIndex; j < size; j++) { for (int a = 0; a < size; a++) { Permutation g = permutations[j][a]; if (g != null) { h.enter(g); } } } for (int j = 0; j < firstDiffIndex; j++) { for (int a = 0; a < size; a++) { Permutation g = permutations[j][a]; if (g != null) { int hj = h.base.get(j); int x = g.get(hj); h.permutations[j][x] = new Permutation(g); } } } this.base = new Permutation(h.base); this.permutations = h.permutations.clone(); } }
public class class_name { public void changeBase(Permutation newBase) { PermutationGroup h = new PermutationGroup(newBase); int firstDiffIndex = base.firstIndexOfDifference(newBase); for (int j = firstDiffIndex; j < size; j++) { for (int a = 0; a < size; a++) { Permutation g = permutations[j][a]; if (g != null) { h.enter(g); // depends on control dependency: [if], data = [(g] } } } for (int j = 0; j < firstDiffIndex; j++) { for (int a = 0; a < size; a++) { Permutation g = permutations[j][a]; if (g != null) { int hj = h.base.get(j); int x = g.get(hj); h.permutations[j][x] = new Permutation(g); // depends on control dependency: [if], data = [(g] } } } this.base = new Permutation(h.base); this.permutations = h.permutations.clone(); } }
public class class_name { @Override public final Scene moveDown(final Layer layer) { if ((null != layer) && (LienzoCore.IS_CANVAS_SUPPORTED)) { final int size = getElement().getChildCount(); if (size < 2) { return this; } final DivElement element = layer.getElement(); for (int i = 0; i < size; i++) { final DivElement look = getElement().getChild(i).cast(); if (look == element) { if (i == 0) { // already at bottom break; } getElement().insertBefore(element, getElement().getChild(i - 1)); break; } } final NFastArrayList<Layer> layers = getChildNodes(); if (null != layers) { layers.moveDown(layer); } } return this; } }
public class class_name { @Override public final Scene moveDown(final Layer layer) { if ((null != layer) && (LienzoCore.IS_CANVAS_SUPPORTED)) { final int size = getElement().getChildCount(); if (size < 2) { return this; // depends on control dependency: [if], data = [none] } final DivElement element = layer.getElement(); for (int i = 0; i < size; i++) { final DivElement look = getElement().getChild(i).cast(); if (look == element) { if (i == 0) { // already at bottom break; } getElement().insertBefore(element, getElement().getChild(i - 1)); // depends on control dependency: [if], data = [none] break; } } final NFastArrayList<Layer> layers = getChildNodes(); if (null != layers) { layers.moveDown(layer); // depends on control dependency: [if], data = [none] } } return this; } }
public class class_name { private static <K, V> Collection<Entry<K, V>> unmodifiableEntries( Collection<Entry<K, V>> entries) { if (entries instanceof Set) { return Maps.unmodifiableEntrySet((Set<Entry<K, V>>) entries); } return new Maps.UnmodifiableEntries<>(Collections.unmodifiableCollection(entries)); } }
public class class_name { private static <K, V> Collection<Entry<K, V>> unmodifiableEntries( Collection<Entry<K, V>> entries) { if (entries instanceof Set) { return Maps.unmodifiableEntrySet((Set<Entry<K, V>>) entries); // depends on control dependency: [if], data = [none] } return new Maps.UnmodifiableEntries<>(Collections.unmodifiableCollection(entries)); } }
public class class_name { public static Class<?> isTypeOf(TypeMirror type, List<Class<?>> classList) { for (Class<?> c : classList) { if (isTypeOf(type, c)) { return c; } } return null; } }
public class class_name { public static Class<?> isTypeOf(TypeMirror type, List<Class<?>> classList) { for (Class<?> c : classList) { if (isTypeOf(type, c)) { return c; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public static Validator validKeyStoreType() { return (s, o) -> { if (!(o instanceof String)) { throw new ConfigException(s, o, "Must be a string."); } String keyStoreType = o.toString(); try { KeyStore.getInstance(keyStoreType); } catch (KeyStoreException e) { ConfigException exception = new ConfigException(s, o, "Invalid KeyStore type"); exception.initCause(e); throw exception; } }; } }
public class class_name { public static Validator validKeyStoreType() { return (s, o) -> { if (!(o instanceof String)) { throw new ConfigException(s, o, "Must be a string."); } String keyStoreType = o.toString(); try { KeyStore.getInstance(keyStoreType); // depends on control dependency: [try], data = [none] } catch (KeyStoreException e) { ConfigException exception = new ConfigException(s, o, "Invalid KeyStore type"); exception.initCause(e); throw exception; } // depends on control dependency: [catch], data = [none] }; } }
public class class_name { public static ExpiringCode getExpiringCode( ExpiringCodeStore codeStore, String userId, String email, String clientId, String redirectUri, ExpiringCodeType intent, String currentZoneId) { Assert.notNull(codeStore, "codeStore must not be null"); Assert.notNull(userId, "userId must not be null"); Assert.notNull(email, "email must not be null"); Assert.notNull(intent, "intent must not be null"); Map<String, String> codeData = new HashMap<>(); codeData.put("user_id", userId); codeData.put("email", email); codeData.put("client_id", clientId); if (redirectUri != null) { codeData.put("redirect_uri", redirectUri); } String codeDataString = JsonUtils.writeValueAsString(codeData); Timestamp expiresAt = new Timestamp(System.currentTimeMillis() + (60 * 60 * 1000)); // 1 hour return codeStore.generateCode( codeDataString, expiresAt, intent.name(), currentZoneId); } }
public class class_name { public static ExpiringCode getExpiringCode( ExpiringCodeStore codeStore, String userId, String email, String clientId, String redirectUri, ExpiringCodeType intent, String currentZoneId) { Assert.notNull(codeStore, "codeStore must not be null"); Assert.notNull(userId, "userId must not be null"); Assert.notNull(email, "email must not be null"); Assert.notNull(intent, "intent must not be null"); Map<String, String> codeData = new HashMap<>(); codeData.put("user_id", userId); codeData.put("email", email); codeData.put("client_id", clientId); if (redirectUri != null) { codeData.put("redirect_uri", redirectUri); // depends on control dependency: [if], data = [none] } String codeDataString = JsonUtils.writeValueAsString(codeData); Timestamp expiresAt = new Timestamp(System.currentTimeMillis() + (60 * 60 * 1000)); // 1 hour return codeStore.generateCode( codeDataString, expiresAt, intent.name(), currentZoneId); } }
public class class_name { static String toStringImpl(final Collection<?> collection) { StringBuilder sb = newStringBuilderForCollection(collection.size()).append('['); boolean first = true; for (Object o : collection) { if (!first) { sb.append(", "); } first = false; if (o == collection) { sb.append("(this Collection)"); } else { sb.append(o); } } return sb.append(']').toString(); } }
public class class_name { static String toStringImpl(final Collection<?> collection) { StringBuilder sb = newStringBuilderForCollection(collection.size()).append('['); boolean first = true; for (Object o : collection) { if (!first) { sb.append(", "); // depends on control dependency: [if], data = [none] } first = false; // depends on control dependency: [for], data = [none] if (o == collection) { sb.append("(this Collection)"); // depends on control dependency: [if], data = [none] } else { sb.append(o); // depends on control dependency: [if], data = [(o] } } return sb.append(']').toString(); } }
public class class_name { public static Object buildObjectInstance(Class<?> clazz) { Object rets = null; try { rets = clazz.newInstance(); } catch (Exception e) { throw new BundlingProcessException(e.getMessage() + " [The custom class " + clazz.getName() + " could not be instantiated, check whether it is available on the classpath and" + " verify that it has a zero-arg constructor].\n" + " The specific error message is: " + e.getClass().getName() + ":" + e.getMessage(), e); } return rets; } }
public class class_name { public static Object buildObjectInstance(Class<?> clazz) { Object rets = null; try { rets = clazz.newInstance(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new BundlingProcessException(e.getMessage() + " [The custom class " + clazz.getName() + " could not be instantiated, check whether it is available on the classpath and" + " verify that it has a zero-arg constructor].\n" + " The specific error message is: " + e.getClass().getName() + ":" + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return rets; } }
public class class_name { public double probability(int x) { double ret; if (x < 0 || x > numberOfTrials) { ret = 0.0; } else { ret = FastMath.exp(SaddlePointExpansion.logBinomialProbability(x, numberOfTrials, probabilityOfSuccess, 1.0 - probabilityOfSuccess)); } return ret; } }
public class class_name { public double probability(int x) { double ret; if (x < 0 || x > numberOfTrials) { ret = 0.0; // depends on control dependency: [if], data = [none] } else { ret = FastMath.exp(SaddlePointExpansion.logBinomialProbability(x, numberOfTrials, probabilityOfSuccess, 1.0 - probabilityOfSuccess)); // depends on control dependency: [if], data = [none] } return ret; } }
public class class_name { public void plusNanoSeconds(long delta) { if (delta != 0) { long result = getNanoSecond() + delta; setNanoSecond((int) Math.floorMod(result, 1000000000)); plusSeconds(Math.floorDiv(result, 1000000000)); } } }
public class class_name { public void plusNanoSeconds(long delta) { if (delta != 0) { long result = getNanoSecond() + delta; setNanoSecond((int) Math.floorMod(result, 1000000000)); // depends on control dependency: [if], data = [0)] plusSeconds(Math.floorDiv(result, 1000000000)); // depends on control dependency: [if], data = [0)] } } }
public class class_name { public void setFocus(CmsAttributeValueView target) { if (m_currentFocus == target) { return; } if ((m_currentFocus != null)) { m_currentFocus.toggleFocus(false); } m_currentFocus = target; m_currentFocus.toggleFocus(true); } }
public class class_name { public void setFocus(CmsAttributeValueView target) { if (m_currentFocus == target) { return; // depends on control dependency: [if], data = [none] } if ((m_currentFocus != null)) { m_currentFocus.toggleFocus(false); // depends on control dependency: [if], data = [none] } m_currentFocus = target; m_currentFocus.toggleFocus(true); } }
public class class_name { private void showParams(List<CmsSearchParamPanel> paramPanels) { m_params.clear(); if ((paramPanels == null) || (paramPanels.size() == 0)) { m_params.setVisible(false); updateListSize(); return; } m_params.setVisible(true); for (CmsSearchParamPanel panel : paramPanels) { m_params.add(panel); } updateListSize(); } }
public class class_name { private void showParams(List<CmsSearchParamPanel> paramPanels) { m_params.clear(); if ((paramPanels == null) || (paramPanels.size() == 0)) { m_params.setVisible(false); // depends on control dependency: [if], data = [none] updateListSize(); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } m_params.setVisible(true); for (CmsSearchParamPanel panel : paramPanels) { m_params.add(panel); // depends on control dependency: [for], data = [panel] } updateListSize(); } }
public class class_name { private static void configureCredentialDetailTypeMapping(ObjectMapper objectMapper) { List<NamedType> subtypes = new ArrayList<>(); for (CredentialType type : CredentialType.values()) { subtypes.add(new NamedType(type.getModelClass(), type.getValueType())); } registerSubtypes(objectMapper, subtypes); } }
public class class_name { private static void configureCredentialDetailTypeMapping(ObjectMapper objectMapper) { List<NamedType> subtypes = new ArrayList<>(); for (CredentialType type : CredentialType.values()) { subtypes.add(new NamedType(type.getModelClass(), type.getValueType())); // depends on control dependency: [for], data = [type] } registerSubtypes(objectMapper, subtypes); } }
public class class_name { @Override public void removeByG_C_C_DS(long groupId, long classNameId, long classPK, boolean defaultShipping) { for (CommerceAddress commerceAddress : findByG_C_C_DS(groupId, classNameId, classPK, defaultShipping, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commerceAddress); } } }
public class class_name { @Override public void removeByG_C_C_DS(long groupId, long classNameId, long classPK, boolean defaultShipping) { for (CommerceAddress commerceAddress : findByG_C_C_DS(groupId, classNameId, classPK, defaultShipping, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commerceAddress); // depends on control dependency: [for], data = [commerceAddress] } } }
public class class_name { @Override public EClass getIfcBuildingElementPart() { if (ifcBuildingElementPartEClass == null) { ifcBuildingElementPartEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(59); } return ifcBuildingElementPartEClass; } }
public class class_name { @Override public EClass getIfcBuildingElementPart() { if (ifcBuildingElementPartEClass == null) { ifcBuildingElementPartEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(59); // depends on control dependency: [if], data = [none] } return ifcBuildingElementPartEClass; } }
public class class_name { void processClaimInjections(@Observes ProcessInjectionPoint pip) { log.debugf("pipRaw: %s", pip.getInjectionPoint()); InjectionPoint ip = pip.getInjectionPoint(); if (ip.getAnnotated().isAnnotationPresent(Claim.class)) { Claim claim = ip.getAnnotated().getAnnotation(Claim.class); if (ip.getType() instanceof Class) { Class rawClass = (Class) ip.getType(); // Primative types if (Modifier.isFinal(rawClass.getModifiers())) { rawTypes.add(ip.getType()); rawTypeQualifiers.add(claim); log.debugf("+++ Added Claim raw type: %s", ip.getType()); Class declaringClass = ip.getMember().getDeclaringClass(); Annotation[] appScoped = declaringClass.getAnnotationsByType(ApplicationScoped.class); Annotation[] sessionScoped = declaringClass.getAnnotationsByType(SessionScoped.class); if ((appScoped != null && appScoped.length > 0) || (sessionScoped != null && sessionScoped.length > 0)) { String err = String.format("A raw type cannot be injected into application/session scope: IP=%s", ip); pip.addDefinitionError(new DeploymentException(err)); } } // This handles collections of primative types } else if (isRawParameterizedType(ip.getType())) { log.debugf("+++ Added Claim ParameterizedType: %s", ip.getType()); rawTypes.add(ip.getType()); rawTypeQualifiers.add(claim); } } else { log.debugf("Skipping pip: %s, type: %s/%s", ip, ip.getType(), ip.getType().getClass()); } } }
public class class_name { void processClaimInjections(@Observes ProcessInjectionPoint pip) { log.debugf("pipRaw: %s", pip.getInjectionPoint()); InjectionPoint ip = pip.getInjectionPoint(); if (ip.getAnnotated().isAnnotationPresent(Claim.class)) { Claim claim = ip.getAnnotated().getAnnotation(Claim.class); if (ip.getType() instanceof Class) { Class rawClass = (Class) ip.getType(); // depends on control dependency: [if], data = [none] // Primative types if (Modifier.isFinal(rawClass.getModifiers())) { rawTypes.add(ip.getType()); // depends on control dependency: [if], data = [none] rawTypeQualifiers.add(claim); // depends on control dependency: [if], data = [none] log.debugf("+++ Added Claim raw type: %s", ip.getType()); // depends on control dependency: [if], data = [none] Class declaringClass = ip.getMember().getDeclaringClass(); Annotation[] appScoped = declaringClass.getAnnotationsByType(ApplicationScoped.class); Annotation[] sessionScoped = declaringClass.getAnnotationsByType(SessionScoped.class); if ((appScoped != null && appScoped.length > 0) || (sessionScoped != null && sessionScoped.length > 0)) { String err = String.format("A raw type cannot be injected into application/session scope: IP=%s", ip); pip.addDefinitionError(new DeploymentException(err)); // depends on control dependency: [if], data = [none] } } // This handles collections of primative types } else if (isRawParameterizedType(ip.getType())) { log.debugf("+++ Added Claim ParameterizedType: %s", ip.getType()); // depends on control dependency: [if], data = [none] rawTypes.add(ip.getType()); // depends on control dependency: [if], data = [none] rawTypeQualifiers.add(claim); // depends on control dependency: [if], data = [none] } } else { log.debugf("Skipping pip: %s, type: %s/%s", ip, ip.getType(), ip.getType().getClass()); // depends on control dependency: [if], data = [none] } } }
public class class_name { public BulkResponse tryFlush() { BulkResponse bulkResult = null; boolean trackingArrayExpanded = false; String bulkLoggingID = createDebugTxnID(); try { // double check data - it might be a false flush (called on clean-up) if (data.length() > 0) { int totalDocs = data.entries(); int docsSent = 0; int docsSkipped = 0; int docsAborted = 0; long totalTime = 0L; boolean retryOperation = false; int totalAttempts = 0; long waitTime = 0L; List<BulkAttempt> retries = new ArrayList<BulkAttempt>(); List<BulkResponse.BulkError> abortErrors = new ArrayList<BulkResponse.BulkError>(); do { // Throw to break out of a possible infinite loop, but only if the limit is a positive number if (retryLimit >= 0 && totalAttempts > retryLimit) { throw new EsHadoopException("Executed too many bulk requests without success. Attempted [" + totalAttempts + "] write operations, which exceeds the bulk request retry limit specified" + "by [" + ConfigurationOptions.ES_BATCH_WRITE_RETRY_LIMIT + "], and found data still " + "not accepted. Perhaps there is an error handler that is not terminating? Bailing out..." ); } // Log messages, and if wait time is set, perform the thread sleep. initFlushOperation(bulkLoggingID, retryOperation, retries.size(), waitTime); // Exec bulk operation to ES, get response. debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); // Log retry stats if relevant if (retryOperation) { stats.docsRetried += data.entries(); stats.bytesRetried += data.length(); stats.bulkRetries++; stats.bulkRetriesTotalTime += bar.getTimeSpent(); } executedBulkWrite = true; // Handle bulk write failures if (!bar.getEntries().hasNext()) { // Legacy Case: // If no items on response, assume all documents made it in. // Recorded bytes are ack'd here stats.bytesAccepted += data.length(); stats.docsAccepted += data.entries(); retryOperation = false; bulkResult = BulkResponse.complete(bar.getResponseCode(), totalTime, totalDocs, totalDocs, 0); } else { // Base Case: // Iterate over the response and the data in the tracking bytes array at the same time, passing // errors to error handlers for resolution. // Keep track of which document we are on as well as where we are in the tracking bytes array. int documentNumber = 0; int trackingBytesPosition = 0; // Hand off the previous list of retries so that we can track the next set of retries (if any). List<BulkAttempt> previousRetries = retries; retries = new ArrayList<BulkAttempt>(); // If a document is edited and retried then it is added at the end of the buffer. Keep a tail list of these new retry attempts. List<BulkAttempt> newDocumentRetries = new ArrayList<BulkAttempt>(); BulkWriteErrorCollector errorCollector = new BulkWriteErrorCollector(); // Iterate over all entries, and for each error found, attempt to handle the problem. for (Iterator<Map> iterator = bar.getEntries(); iterator.hasNext(); ) { // The array of maps are (operation -> document info) maps Map map = iterator.next(); // Get the underlying document information as a map and extract the error information. Map values = (Map) map.values().iterator().next(); Integer docStatus = (Integer) values.get("status"); EsHadoopException error = errorExtractor.extractError(values); if (error == null){ // Write operation for this entry succeeded stats.bytesAccepted += data.length(trackingBytesPosition); stats.docsAccepted += 1; docsSent += 1; data.remove(trackingBytesPosition); } else { // Found a failed write BytesArray document = data.entry(trackingBytesPosition); // In pre-2.x ES versions, the status is not included. int status = docStatus == null ? -1 : docStatus; // Figure out which attempt number sending this document was and which position the doc was in BulkAttempt previousAttempt; if (previousRetries.isEmpty()) { // No previous retries, create an attempt for the first run previousAttempt = new BulkAttempt(1, documentNumber); } else { // Grab the previous attempt for the document we're processing, and bump the attempt number. previousAttempt = previousRetries.get(documentNumber); previousAttempt.attemptNumber++; } // Handle bulk write failures // Todo: We should really do more with these bulk error pass reasons if the final outcome is an ABORT. List<String> bulkErrorPassReasons = new ArrayList<String>(); BulkWriteFailure failure = new BulkWriteFailure( status, error, document, previousAttempt.attemptNumber, bulkErrorPassReasons ); // Label the loop since we'll be breaking to/from it within a switch block. handlerLoop: for (IBulkWriteErrorHandler errorHandler : documentBulkErrorHandlers) { HandlerResult result; try { result = errorHandler.onError(failure, errorCollector); } catch (EsHadoopAbortHandlerException ahe) { // Count this as an abort operation, but capture the error message from the // exception as the reason. Log any cause since it will be swallowed. Throwable cause = ahe.getCause(); if (cause != null) { LOG.error("Bulk write error handler abort exception caught with underlying cause:", cause); } result = HandlerResult.ABORT; error = ahe; } catch (Exception e) { throw new EsHadoopException("Encountered exception during error handler.", e); } switch (result) { case HANDLED: Assert.isTrue(errorCollector.getAndClearMessage() == null, "Found pass message with Handled response. Be sure to return the value " + "returned from pass(String) call."); // Check for document retries if (errorCollector.receivedRetries()) { byte[] retryDataBuffer = errorCollector.getAndClearRetryValue(); if (retryDataBuffer == null || document.bytes() == retryDataBuffer) { // Retry the same data. // Continue to track the previous attempts. retries.add(previousAttempt); trackingBytesPosition++; } else { // Check document contents to see if it was deserialized and reserialized. if (ArrayUtils.sliceEquals(document.bytes(), document.offset(), document.length(), retryDataBuffer, 0, retryDataBuffer.length)) { // Same document content. Leave the data as is in tracking buffer, // and continue tracking previous attempts. retries.add(previousAttempt); trackingBytesPosition++; } else { // Document has changed. // Track new attempts. BytesRef newEntry = validateEditedEntry(retryDataBuffer); data.remove(trackingBytesPosition); data.copyFrom(newEntry); // Determine if our tracking bytes array is going to expand. if (ba.available() < newEntry.length()) { trackingArrayExpanded = true; } previousAttempt.attemptNumber = 0; newDocumentRetries.add(previousAttempt); } } } else { // Handled but not retried means we won't have sent that document. data.remove(trackingBytesPosition); docsSkipped += 1; } break handlerLoop; case PASS: String reason = errorCollector.getAndClearMessage(); if (reason != null) { bulkErrorPassReasons.add(reason); } continue handlerLoop; case ABORT: errorCollector.getAndClearMessage(); // Sanity clearing data.remove(trackingBytesPosition); docsAborted += 1; abortErrors.add(new BulkResponse.BulkError(previousAttempt.originalPosition, document, status, error)); break handlerLoop; } } } documentNumber++; } // Place any new documents that have been added at the end of the data buffer at the end of the retry list. retries.addAll(newDocumentRetries); if (!retries.isEmpty()) { retryOperation = true; waitTime = errorCollector.getDelayTimeBetweenRetries(); } else { retryOperation = false; if (docsAborted > 0) { bulkResult = BulkResponse.partial(bar.getResponseCode(), totalTime, totalDocs, docsSent, docsSkipped, docsAborted, abortErrors); } else { bulkResult = BulkResponse.complete(bar.getResponseCode(), totalTime, totalDocs, docsSent, docsSkipped); } } } } while (retryOperation); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, docsSent, totalDocs, docsSkipped, totalDocs, docsAborted, totalDocs ); } else { bulkResult = BulkResponse.complete(); } } catch (EsHadoopException ex) { debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex; } // always discard data since there's no code path that uses the in flight data // during retry operations, the tracking bytes array may grow. In that case, do a hard reset. // TODO: Perhaps open an issue to limit the expansion of a single byte array (for repeated rewrite-retries) if (trackingArrayExpanded) { ba = new BytesArray(new byte[settings.getBatchSizeInBytes()], 0); data = new TrackingBytesArray(ba); } else { data.reset(); dataEntries = 0; } return bulkResult; } }
public class class_name { public BulkResponse tryFlush() { BulkResponse bulkResult = null; boolean trackingArrayExpanded = false; String bulkLoggingID = createDebugTxnID(); try { // double check data - it might be a false flush (called on clean-up) if (data.length() > 0) { int totalDocs = data.entries(); int docsSent = 0; int docsSkipped = 0; int docsAborted = 0; long totalTime = 0L; boolean retryOperation = false; int totalAttempts = 0; long waitTime = 0L; List<BulkAttempt> retries = new ArrayList<BulkAttempt>(); List<BulkResponse.BulkError> abortErrors = new ArrayList<BulkResponse.BulkError>(); do { // Throw to break out of a possible infinite loop, but only if the limit is a positive number if (retryLimit >= 0 && totalAttempts > retryLimit) { throw new EsHadoopException("Executed too many bulk requests without success. Attempted [" + totalAttempts + "] write operations, which exceeds the bulk request retry limit specified" + "by [" + ConfigurationOptions.ES_BATCH_WRITE_RETRY_LIMIT + "], and found data still " + "not accepted. Perhaps there is an error handler that is not terminating? Bailing out..." ); } // Log messages, and if wait time is set, perform the thread sleep. initFlushOperation(bulkLoggingID, retryOperation, retries.size(), waitTime); // Exec bulk operation to ES, get response. debugLog(bulkLoggingID, "Submitting request"); RestClient.BulkActionResponse bar = restClient.bulk(resource, data); debugLog(bulkLoggingID, "Response received"); totalAttempts++; totalTime += bar.getTimeSpent(); // Log retry stats if relevant if (retryOperation) { stats.docsRetried += data.entries(); stats.bytesRetried += data.length(); stats.bulkRetries++; stats.bulkRetriesTotalTime += bar.getTimeSpent(); } executedBulkWrite = true; // Handle bulk write failures if (!bar.getEntries().hasNext()) { // Legacy Case: // If no items on response, assume all documents made it in. // Recorded bytes are ack'd here stats.bytesAccepted += data.length(); stats.docsAccepted += data.entries(); retryOperation = false; bulkResult = BulkResponse.complete(bar.getResponseCode(), totalTime, totalDocs, totalDocs, 0); } else { // Base Case: // Iterate over the response and the data in the tracking bytes array at the same time, passing // errors to error handlers for resolution. // Keep track of which document we are on as well as where we are in the tracking bytes array. int documentNumber = 0; int trackingBytesPosition = 0; // Hand off the previous list of retries so that we can track the next set of retries (if any). List<BulkAttempt> previousRetries = retries; retries = new ArrayList<BulkAttempt>(); // If a document is edited and retried then it is added at the end of the buffer. Keep a tail list of these new retry attempts. List<BulkAttempt> newDocumentRetries = new ArrayList<BulkAttempt>(); BulkWriteErrorCollector errorCollector = new BulkWriteErrorCollector(); // Iterate over all entries, and for each error found, attempt to handle the problem. for (Iterator<Map> iterator = bar.getEntries(); iterator.hasNext(); ) { // The array of maps are (operation -> document info) maps Map map = iterator.next(); // Get the underlying document information as a map and extract the error information. Map values = (Map) map.values().iterator().next(); Integer docStatus = (Integer) values.get("status"); EsHadoopException error = errorExtractor.extractError(values); if (error == null){ // Write operation for this entry succeeded stats.bytesAccepted += data.length(trackingBytesPosition); stats.docsAccepted += 1; docsSent += 1; data.remove(trackingBytesPosition); } else { // Found a failed write BytesArray document = data.entry(trackingBytesPosition); // In pre-2.x ES versions, the status is not included. int status = docStatus == null ? -1 : docStatus; // Figure out which attempt number sending this document was and which position the doc was in BulkAttempt previousAttempt; if (previousRetries.isEmpty()) { // No previous retries, create an attempt for the first run previousAttempt = new BulkAttempt(1, documentNumber); } else { // Grab the previous attempt for the document we're processing, and bump the attempt number. previousAttempt = previousRetries.get(documentNumber); previousAttempt.attemptNumber++; } // Handle bulk write failures // Todo: We should really do more with these bulk error pass reasons if the final outcome is an ABORT. List<String> bulkErrorPassReasons = new ArrayList<String>(); BulkWriteFailure failure = new BulkWriteFailure( status, error, document, previousAttempt.attemptNumber, bulkErrorPassReasons ); // Label the loop since we'll be breaking to/from it within a switch block. handlerLoop: for (IBulkWriteErrorHandler errorHandler : documentBulkErrorHandlers) { HandlerResult result; try { result = errorHandler.onError(failure, errorCollector); } catch (EsHadoopAbortHandlerException ahe) { // Count this as an abort operation, but capture the error message from the // exception as the reason. Log any cause since it will be swallowed. Throwable cause = ahe.getCause(); if (cause != null) { LOG.error("Bulk write error handler abort exception caught with underlying cause:", cause); // depends on control dependency: [if], data = [none] } result = HandlerResult.ABORT; error = ahe; } catch (Exception e) { throw new EsHadoopException("Encountered exception during error handler.", e); } switch (result) { case HANDLED: Assert.isTrue(errorCollector.getAndClearMessage() == null, "Found pass message with Handled response. Be sure to return the value " + "returned from pass(String) call."); // Check for document retries if (errorCollector.receivedRetries()) { byte[] retryDataBuffer = errorCollector.getAndClearRetryValue(); if (retryDataBuffer == null || document.bytes() == retryDataBuffer) { // Retry the same data. // Continue to track the previous attempts. retries.add(previousAttempt); // depends on control dependency: [if], data = [none] trackingBytesPosition++; // depends on control dependency: [if], data = [none] } else { // Check document contents to see if it was deserialized and reserialized. if (ArrayUtils.sliceEquals(document.bytes(), document.offset(), document.length(), retryDataBuffer, 0, retryDataBuffer.length)) { // Same document content. Leave the data as is in tracking buffer, // and continue tracking previous attempts. retries.add(previousAttempt); // depends on control dependency: [if], data = [none] trackingBytesPosition++; // depends on control dependency: [if], data = [none] } else { // Document has changed. // Track new attempts. BytesRef newEntry = validateEditedEntry(retryDataBuffer); data.remove(trackingBytesPosition); // depends on control dependency: [if], data = [none] data.copyFrom(newEntry); // depends on control dependency: [if], data = [none] // Determine if our tracking bytes array is going to expand. if (ba.available() < newEntry.length()) { trackingArrayExpanded = true; // depends on control dependency: [if], data = [none] } previousAttempt.attemptNumber = 0; // depends on control dependency: [if], data = [none] newDocumentRetries.add(previousAttempt); // depends on control dependency: [if], data = [none] } } } else { // Handled but not retried means we won't have sent that document. data.remove(trackingBytesPosition); // depends on control dependency: [if], data = [none] docsSkipped += 1; // depends on control dependency: [if], data = [none] } break handlerLoop; case PASS: String reason = errorCollector.getAndClearMessage(); if (reason != null) { bulkErrorPassReasons.add(reason); // depends on control dependency: [if], data = [(reason] } continue handlerLoop; case ABORT: errorCollector.getAndClearMessage(); // Sanity clearing data.remove(trackingBytesPosition); docsAborted += 1; abortErrors.add(new BulkResponse.BulkError(previousAttempt.originalPosition, document, status, error)); break handlerLoop; } } } documentNumber++; } // Place any new documents that have been added at the end of the data buffer at the end of the retry list. retries.addAll(newDocumentRetries); if (!retries.isEmpty()) { retryOperation = true; waitTime = errorCollector.getDelayTimeBetweenRetries(); } else { retryOperation = false; if (docsAborted > 0) { bulkResult = BulkResponse.partial(bar.getResponseCode(), totalTime, totalDocs, docsSent, docsSkipped, docsAborted, abortErrors); } else { bulkResult = BulkResponse.complete(bar.getResponseCode(), totalTime, totalDocs, docsSent, docsSkipped); } } } } while (retryOperation); debugLog(bulkLoggingID, "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted.", totalDocs, totalAttempts, docsSent, totalDocs, docsSkipped, totalDocs, docsAborted, totalDocs ); // depends on control dependency: [if], data = [none] } else { bulkResult = BulkResponse.complete(); // depends on control dependency: [if], data = [none] } } catch (EsHadoopException ex) { debugLog(bulkLoggingID, "Failed. %s", ex.getMessage()); hadWriteErrors = true; throw ex; } // depends on control dependency: [catch], data = [none] // always discard data since there's no code path that uses the in flight data // during retry operations, the tracking bytes array may grow. In that case, do a hard reset. // TODO: Perhaps open an issue to limit the expansion of a single byte array (for repeated rewrite-retries) if (trackingArrayExpanded) { ba = new BytesArray(new byte[settings.getBatchSizeInBytes()], 0); // depends on control dependency: [if], data = [none] data = new TrackingBytesArray(ba); // depends on control dependency: [if], data = [none] } else { data.reset(); // depends on control dependency: [if], data = [none] dataEntries = 0; // depends on control dependency: [if], data = [none] } return bulkResult; } }
public class class_name { public int freeMemory() { int size = 0; for (int m = 0; m < memory.length; m++) { size += memory[m].length; memory[m] = null; } memory = null; return size; } }
public class class_name { public int freeMemory() { int size = 0; for (int m = 0; m < memory.length; m++) { size += memory[m].length; // depends on control dependency: [for], data = [m] memory[m] = null; // depends on control dependency: [for], data = [m] } memory = null; return size; } }
public class class_name { @Override public Character fromString(Class targetClass, String s) { try { if (s == null) { log.error("Can't convert String " + s + " to character"); throw new PropertyAccessException("Can't convert String " + s + " to character"); } Character c = null; if (s.length() == 1) { c = s.charAt(0); } else { c = Character.MIN_VALUE; } return c; } catch (NumberFormatException e) { log.error("Number format exception caught,Caused by {}.", e); throw new PropertyAccessException(e); } } }
public class class_name { @Override public Character fromString(Class targetClass, String s) { try { if (s == null) { log.error("Can't convert String " + s + " to character"); throw new PropertyAccessException("Can't convert String " + s + " to character"); // depends on control dependency: [if], data = [none] } Character c = null; if (s.length() == 1) { c = s.charAt(0); // depends on control dependency: [if], data = [none] } else { c = Character.MIN_VALUE; // depends on control dependency: [if], data = [none] } return c; // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { log.error("Number format exception caught,Caused by {}.", e); throw new PropertyAccessException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public DescribeComplianceByResourceRequest withComplianceTypes(String... complianceTypes) { if (this.complianceTypes == null) { setComplianceTypes(new com.amazonaws.internal.SdkInternalList<String>(complianceTypes.length)); } for (String ele : complianceTypes) { this.complianceTypes.add(ele); } return this; } }
public class class_name { public DescribeComplianceByResourceRequest withComplianceTypes(String... complianceTypes) { if (this.complianceTypes == null) { setComplianceTypes(new com.amazonaws.internal.SdkInternalList<String>(complianceTypes.length)); // depends on control dependency: [if], data = [none] } for (String ele : complianceTypes) { this.complianceTypes.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { boolean scanNoParse() { while (true) { switch (state) { case 300: if (peek() == '#' && next() == '[' && next() == '[') { state = 301; continue ; } return fail(); case 301: for (char c=next(); true; c=next()) { if (c == ']' && buf[forward + 1] == ']' && buf[forward + 2] == '#') { addTextToken(subBuf(lexemeBegin + 3, forward - 1)); // NoParse 块使用 TextToken return prepareNextScan(3); } if (c == EOF) { throw new ParseException("The \"no parse\" start block \"#[[\" can not match the end block: \"]]#\"", new Location(fileName, beginRow)); } } default : return fail(); } } } }
public class class_name { boolean scanNoParse() { while (true) { switch (state) { case 300: if (peek() == '#' && next() == '[' && next() == '[') { state = 301; // depends on control dependency: [if], data = [none] continue ; } return fail(); case 301: for (char c=next(); true; c=next()) { if (c == ']' && buf[forward + 1] == ']' && buf[forward + 2] == '#') { addTextToken(subBuf(lexemeBegin + 3, forward - 1)); // NoParse 块使用 TextToken // depends on control dependency: [if], data = [none] return prepareNextScan(3); // depends on control dependency: [if], data = [none] } if (c == EOF) { throw new ParseException("The \"no parse\" start block \"#[[\" can not match the end block: \"]]#\"", new Location(fileName, beginRow)); } } default : return fail(); } } } }
public class class_name { private static boolean checkAboutNormalization(String args[]) { boolean normalize = false ; if (args.length == 4) { if (args[3].equals("TRUE")) { normalize = true; } else if (args[3].equals("FALSE")) { normalize = false; } else { throw new JMetalException("The value for normalizing must be TRUE or FALSE"); } } return normalize ; } }
public class class_name { private static boolean checkAboutNormalization(String args[]) { boolean normalize = false ; if (args.length == 4) { if (args[3].equals("TRUE")) { normalize = true; // depends on control dependency: [if], data = [none] } else if (args[3].equals("FALSE")) { normalize = false; // depends on control dependency: [if], data = [none] } else { throw new JMetalException("The value for normalizing must be TRUE or FALSE"); } } return normalize ; } }
public class class_name { private List<Collection<QueryResult>> lookupGPXEntries(List<Observation> gpxList, EdgeFilter edgeFilter) { final List<Collection<QueryResult>> gpxEntryLocations = new ArrayList<>(); for (Observation gpxEntry : gpxList) { final List<QueryResult> queryResults = locationIndex.findNClosest( gpxEntry.getPoint().lat, gpxEntry.getPoint().lon, edgeFilter, measurementErrorSigma); gpxEntryLocations.add(queryResults); } return gpxEntryLocations; } }
public class class_name { private List<Collection<QueryResult>> lookupGPXEntries(List<Observation> gpxList, EdgeFilter edgeFilter) { final List<Collection<QueryResult>> gpxEntryLocations = new ArrayList<>(); for (Observation gpxEntry : gpxList) { final List<QueryResult> queryResults = locationIndex.findNClosest( gpxEntry.getPoint().lat, gpxEntry.getPoint().lon, edgeFilter, measurementErrorSigma); gpxEntryLocations.add(queryResults); // depends on control dependency: [for], data = [gpxEntry] } return gpxEntryLocations; } }
public class class_name { private PersistenceBrokerException createException(final Exception ex, String message, final Object objectToIdentify, Class topLevelClass, Class realClass, Object[] pks) { final String eol = SystemUtils.LINE_SEPARATOR; StringBuffer msg = new StringBuffer(); if(message == null) { msg.append("Unexpected error: "); } else { msg.append(message).append(" :"); } if(topLevelClass != null) msg.append(eol).append("objectTopLevelClass=").append(topLevelClass.getName()); if(realClass != null) msg.append(eol).append("objectRealClass=").append(realClass.getName()); if(pks != null) msg.append(eol).append("pkValues=").append(ArrayUtils.toString(pks)); if(objectToIdentify != null) msg.append(eol).append("object to identify: ").append(objectToIdentify); if(ex != null) { // add causing stack trace Throwable rootCause = ExceptionUtils.getRootCause(ex); if(rootCause != null) { msg.append(eol).append("The root stack trace is --> "); String rootStack = ExceptionUtils.getStackTrace(rootCause); msg.append(eol).append(rootStack); } return new PersistenceBrokerException(msg.toString(), ex); } else { return new PersistenceBrokerException(msg.toString()); } } }
public class class_name { private PersistenceBrokerException createException(final Exception ex, String message, final Object objectToIdentify, Class topLevelClass, Class realClass, Object[] pks) { final String eol = SystemUtils.LINE_SEPARATOR; StringBuffer msg = new StringBuffer(); if(message == null) { msg.append("Unexpected error: "); // depends on control dependency: [if], data = [none] } else { msg.append(message).append(" :"); // depends on control dependency: [if], data = [(message] } if(topLevelClass != null) msg.append(eol).append("objectTopLevelClass=").append(topLevelClass.getName()); if(realClass != null) msg.append(eol).append("objectRealClass=").append(realClass.getName()); if(pks != null) msg.append(eol).append("pkValues=").append(ArrayUtils.toString(pks)); if(objectToIdentify != null) msg.append(eol).append("object to identify: ").append(objectToIdentify); if(ex != null) { // add causing stack trace Throwable rootCause = ExceptionUtils.getRootCause(ex); if(rootCause != null) { msg.append(eol).append("The root stack trace is --> "); // depends on control dependency: [if], data = [none] String rootStack = ExceptionUtils.getStackTrace(rootCause); msg.append(eol).append(rootStack); // depends on control dependency: [if], data = [none] } return new PersistenceBrokerException(msg.toString(), ex); // depends on control dependency: [if], data = [none] } else { return new PersistenceBrokerException(msg.toString()); // depends on control dependency: [if], data = [none] } } }
public class class_name { private CompletableFuture<Boolean> isTableSegmentEmpty(DirectSegmentAccess segment, TimeoutTimer timer) { // Get a snapshot of the Tail Index and identify all bucket removals. val tailHashes = this.cache.getTailHashes(segment.getSegmentId()); val tailRemovals = tailHashes.entrySet().stream() .filter(e -> e.getValue().isRemoval()) .map(Map.Entry::getKey) .collect(Collectors.toList()); if (tailHashes.size() > tailRemovals.size()) { // Tail Index has at least one update, which implies the Table Segment is not empty. return CompletableFuture.completedFuture(false); } else { // Get the number of indexed Table Buckets. SegmentProperties sp = segment.getInfo(); long indexedBucketCount = this.indexReader.getBucketCount(sp); if (tailRemovals.isEmpty()) { // No removals in the Tail index, so we can derive our response from the total number of indexed buckets. return CompletableFuture.completedFuture(indexedBucketCount <= 0); } else { // Tail Index has at least one removal. We need to check which of these removals point to a real Table Bucket. // It is possible that we received an unconditional remove for a Table Bucket that does not exist or a Table // Bucket has been created and immediately deleted (before being included in the main Index). In order to // determine if the table is empty, we need to figure out the exact count of removed buckets. return this.indexReader.locateBuckets(segment, tailRemovals, timer) .thenApply(buckets -> { long removedCount = buckets.values().stream().filter(TableBucket::exists).count(); return indexedBucketCount <= removedCount; }); } } } }
public class class_name { private CompletableFuture<Boolean> isTableSegmentEmpty(DirectSegmentAccess segment, TimeoutTimer timer) { // Get a snapshot of the Tail Index and identify all bucket removals. val tailHashes = this.cache.getTailHashes(segment.getSegmentId()); val tailRemovals = tailHashes.entrySet().stream() .filter(e -> e.getValue().isRemoval()) .map(Map.Entry::getKey) .collect(Collectors.toList()); if (tailHashes.size() > tailRemovals.size()) { // Tail Index has at least one update, which implies the Table Segment is not empty. return CompletableFuture.completedFuture(false); // depends on control dependency: [if], data = [none] } else { // Get the number of indexed Table Buckets. SegmentProperties sp = segment.getInfo(); long indexedBucketCount = this.indexReader.getBucketCount(sp); if (tailRemovals.isEmpty()) { // No removals in the Tail index, so we can derive our response from the total number of indexed buckets. return CompletableFuture.completedFuture(indexedBucketCount <= 0); // depends on control dependency: [if], data = [none] } else { // Tail Index has at least one removal. We need to check which of these removals point to a real Table Bucket. // It is possible that we received an unconditional remove for a Table Bucket that does not exist or a Table // Bucket has been created and immediately deleted (before being included in the main Index). In order to // determine if the table is empty, we need to figure out the exact count of removed buckets. return this.indexReader.locateBuckets(segment, tailRemovals, timer) .thenApply(buckets -> { long removedCount = buckets.values().stream().filter(TableBucket::exists).count(); // depends on control dependency: [if], data = [none] return indexedBucketCount <= removedCount; // depends on control dependency: [if], data = [none] }); } } } }
public class class_name { @NotNull public PaginatedResult<Person> listPeople() { PaginatedResult<Person> result = restAdapter.listPeople(); for (Person person : result.getData()) { person.setAdapter(restAdapter); } return result; } }
public class class_name { @NotNull public PaginatedResult<Person> listPeople() { PaginatedResult<Person> result = restAdapter.listPeople(); for (Person person : result.getData()) { person.setAdapter(restAdapter); // depends on control dependency: [for], data = [person] } return result; } }
public class class_name { public Map getStringDecodedMap(final Map encodedMap, final Map defaults) { if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.entry(this, TRACE, "getStringDecodedMap", new Object[] {encodedMap,defaults}); } final Map<String,Object> decoded = new HashMap<String,Object>(); // Preload with the defaults - if the property exists in the input // it will override this default decoded.putAll(defaults); // Look at each property in turn. final Iterator keyList = encodedMap.keySet().iterator(); while (keyList.hasNext()) { // These variables will point to the info to be placed // in the map. String propName = null; Object propVal = null; // Get the coded version of the name. This will start with one // of the prefix values. The codedName must have been non-null. String encodedKey = (String) keyList.next(); String encodedVal = (String) encodedMap.get(encodedKey); // Extract the prefix. String prefix = null; int sepIndex = encodedKey.indexOf(PREFIX_SEPARATOR); if (sepIndex == -1) { // The separator was not found - this is really bad, and // suggests // that the encoding step was flawed. if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) SibTr.debug(TRACE, "Ignoring malformed encoded name: " + encodedKey); continue; } else { // Extract the prefix and clean version of the name. prefix = encodedKey.substring(0, sepIndex); propName = encodedKey.substring(sepIndex + PREFIX_SEPARATOR.length()); }//if // Catch any number conversion errors that arise while converting // the // string to an object. try { // Decode the prefix to recreate the data type. if (PREFIX_NULL.equals(prefix)) { // The value was null. propVal = null; } else if (PREFIX_STRING.equals(prefix)) { propVal = encodedVal; // Because the value was not prefixed with PREFIX_NULL, we // know that // if this val is null, it was meant to be an empty // string... if (propVal == null) propVal = ""; } else if (PREFIX_BOOLEAN.equals(prefix)) { propVal = Boolean.valueOf(encodedVal); } else if (PREFIX_INT.equals(prefix)) { propVal = Integer.valueOf(encodedVal); } else if (PREFIX_BYTE.equals(prefix)) { propVal = Byte.valueOf(encodedVal); } else if (PREFIX_SHORT.equals(prefix)) { propVal = Short.valueOf(encodedVal); } else if (PREFIX_FLOAT.equals(prefix)) { propVal = Float.valueOf(encodedVal); } else if (PREFIX_DOUBLE.equals(prefix)) { propVal = Double.valueOf(encodedVal); } else if (PREFIX_LONG.equals(prefix)) { propVal = Long.valueOf(encodedVal); } else if (PREFIX_ROUTING_PATH.equals(prefix)) { // encodedVal = array represented as one long string. // This uses the Java 1.4 regex method on a string to split // it into // an array, with the individual strings being separated by // the string passed in. String[] array = encodedVal .split(JmsraConstants.PATH_ELEMENT_SEPARATOR); // propVal = what we want to return (a string array wrapper // containing the string[]) String bigDestName = (String) encodedMap .get(PREFIX_STRING + PREFIX_SEPARATOR + JmsInternalConstants.DEST_NAME); propVal = StringArrayWrapper.create(array, bigDestName); } else { // Did not match any of the known prefixes if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr .debug(TRACE, "Ignoring unknown prefix: " + prefix); } continue; }// (if)switch on prefix type. // We have successfully decoded the property, so now add it to // the // temporary map of properties. decoded.put(propName, propVal); if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(TRACE, "retrieved: " + propName + " = " + propVal); } } catch (final Exception exception) { FFDCFilter .processException( exception, "com.ibm.ws.sib.api.jmsra.impl.JmsJcaReferenceUtilsImpl.getStringDecodedMap", FFDC_PROBE_1, this); // Catch any NumberFormatException or similar thing that arises // from the attempt to convert the string to another data type. if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(TRACE, "Error decoding string to object. ", exception); } continue; }//try }//while if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.exit(this, TRACE, "getStringDecodedMap", decoded); } return decoded; } }
public class class_name { public Map getStringDecodedMap(final Map encodedMap, final Map defaults) { if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.entry(this, TRACE, "getStringDecodedMap", new Object[] {encodedMap,defaults}); // depends on control dependency: [if], data = [none] } final Map<String,Object> decoded = new HashMap<String,Object>(); // Preload with the defaults - if the property exists in the input // it will override this default decoded.putAll(defaults); // Look at each property in turn. final Iterator keyList = encodedMap.keySet().iterator(); while (keyList.hasNext()) { // These variables will point to the info to be placed // in the map. String propName = null; Object propVal = null; // Get the coded version of the name. This will start with one // of the prefix values. The codedName must have been non-null. String encodedKey = (String) keyList.next(); String encodedVal = (String) encodedMap.get(encodedKey); // Extract the prefix. String prefix = null; int sepIndex = encodedKey.indexOf(PREFIX_SEPARATOR); if (sepIndex == -1) { // The separator was not found - this is really bad, and // suggests // that the encoding step was flawed. if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) SibTr.debug(TRACE, "Ignoring malformed encoded name: " + encodedKey); continue; } else { // Extract the prefix and clean version of the name. prefix = encodedKey.substring(0, sepIndex); // depends on control dependency: [if], data = [none] propName = encodedKey.substring(sepIndex + PREFIX_SEPARATOR.length()); // depends on control dependency: [if], data = [none] }//if // Catch any number conversion errors that arise while converting // the // string to an object. try { // Decode the prefix to recreate the data type. if (PREFIX_NULL.equals(prefix)) { // The value was null. propVal = null; // depends on control dependency: [if], data = [none] } else if (PREFIX_STRING.equals(prefix)) { propVal = encodedVal; // depends on control dependency: [if], data = [none] // Because the value was not prefixed with PREFIX_NULL, we // know that // if this val is null, it was meant to be an empty // string... if (propVal == null) propVal = ""; } else if (PREFIX_BOOLEAN.equals(prefix)) { propVal = Boolean.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_INT.equals(prefix)) { propVal = Integer.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_BYTE.equals(prefix)) { propVal = Byte.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_SHORT.equals(prefix)) { propVal = Short.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_FLOAT.equals(prefix)) { propVal = Float.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_DOUBLE.equals(prefix)) { propVal = Double.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_LONG.equals(prefix)) { propVal = Long.valueOf(encodedVal); // depends on control dependency: [if], data = [none] } else if (PREFIX_ROUTING_PATH.equals(prefix)) { // encodedVal = array represented as one long string. // This uses the Java 1.4 regex method on a string to split // it into // an array, with the individual strings being separated by // the string passed in. String[] array = encodedVal .split(JmsraConstants.PATH_ELEMENT_SEPARATOR); // propVal = what we want to return (a string array wrapper // containing the string[]) String bigDestName = (String) encodedMap .get(PREFIX_STRING + PREFIX_SEPARATOR + JmsInternalConstants.DEST_NAME); propVal = StringArrayWrapper.create(array, bigDestName); // depends on control dependency: [if], data = [none] } else { // Did not match any of the known prefixes if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr .debug(TRACE, "Ignoring unknown prefix: " + prefix); // depends on control dependency: [if], data = [none] } continue; }// (if)switch on prefix type. // We have successfully decoded the property, so now add it to // the // temporary map of properties. decoded.put(propName, propVal); // depends on control dependency: [try], data = [none] if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(TRACE, "retrieved: " + propName + " = " + propVal); // depends on control dependency: [if], data = [none] } } catch (final Exception exception) { FFDCFilter .processException( exception, "com.ibm.ws.sib.api.jmsra.impl.JmsJcaReferenceUtilsImpl.getStringDecodedMap", FFDC_PROBE_1, this); // Catch any NumberFormatException or similar thing that arises // from the attempt to convert the string to another data type. if (TraceComponent.isAnyTracingEnabled() && TRACE.isDebugEnabled()) { SibTr.debug(TRACE, "Error decoding string to object. ", exception); // depends on control dependency: [if], data = [none] } continue; }//try // depends on control dependency: [catch], data = [none] }//while if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) { SibTr.exit(this, TRACE, "getStringDecodedMap", decoded); // depends on control dependency: [if], data = [none] } return decoded; } }
public class class_name { public static boolean isTinyType(Class<?> candidate) { for (MetaTinyType meta : metas) { if (meta.isMetaOf(candidate)) { return true; } } return false; } }
public class class_name { public static boolean isTinyType(Class<?> candidate) { for (MetaTinyType meta : metas) { if (meta.isMetaOf(candidate)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void getPersistentData(ObjectOutputStream dout) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPersistentData", dout); try { dout.writeUTF(remoteMEId.toString()); dout.writeUTF(streamId.toString()); String id = NULL; if (gatheringTargetDestUuid!=null) id = gatheringTargetDestUuid.toString(); dout.writeUTF(id); } catch (IOException e) { // No FFDC code needed SIErrorException e2 = new SIErrorException(e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData", e2); throw e2; } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); } }
public class class_name { public void getPersistentData(ObjectOutputStream dout) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "getPersistentData", dout); try { dout.writeUTF(remoteMEId.toString()); // depends on control dependency: [try], data = [none] dout.writeUTF(streamId.toString()); // depends on control dependency: [try], data = [none] String id = NULL; if (gatheringTargetDestUuid!=null) id = gatheringTargetDestUuid.toString(); dout.writeUTF(id); // depends on control dependency: [try], data = [none] } catch (IOException e) { // No FFDC code needed SIErrorException e2 = new SIErrorException(e); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData", e2); throw e2; } // depends on control dependency: [catch], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "getPersistentData"); } }
public class class_name { private void copyRecursive(File source, File destination, FileFilter filter) { if (source.isDirectory()) { boolean okay = destination.mkdir(); if (!okay) { throw new FileCreationFailedException(destination.getAbsolutePath(), true); } File[] children; if (filter == null) { children = source.listFiles(); } else { children = source.listFiles(filter); } for (File file : children) { copyRecursive(file, new File(destination, file.getName()), filter); } } else { copyFile(source, destination); } } }
public class class_name { private void copyRecursive(File source, File destination, FileFilter filter) { if (source.isDirectory()) { boolean okay = destination.mkdir(); if (!okay) { throw new FileCreationFailedException(destination.getAbsolutePath(), true); } File[] children; if (filter == null) { children = source.listFiles(); // depends on control dependency: [if], data = [none] } else { children = source.listFiles(filter); // depends on control dependency: [if], data = [(filter] } for (File file : children) { copyRecursive(file, new File(destination, file.getName()), filter); // depends on control dependency: [for], data = [file] } } else { copyFile(source, destination); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static <T, R> Stream<R> zip(final Collection<? extends Stream<? extends T>> c, final Function<? super List<? extends T>, R> zipFunction) { if (N.isNullOrEmpty(c)) { return Stream.empty(); } final int len = c.size(); final List<Iterator<? extends T>> iterList = new ArrayList<>(len); for (Stream<? extends T> e : c) { iterList.add(e.iterator()); } return zipp(iterList, zipFunction).onClose(newCloseHandler(c)); } }
public class class_name { public static <T, R> Stream<R> zip(final Collection<? extends Stream<? extends T>> c, final Function<? super List<? extends T>, R> zipFunction) { if (N.isNullOrEmpty(c)) { return Stream.empty(); // depends on control dependency: [if], data = [none] } final int len = c.size(); final List<Iterator<? extends T>> iterList = new ArrayList<>(len); for (Stream<? extends T> e : c) { iterList.add(e.iterator()); } return zipp(iterList, zipFunction).onClose(newCloseHandler(c)); } }
public class class_name { @Override public String format( Date date, String... args ) { if( date == null ) { return ""; } if( args.length == 0 ) { throw new JGivenWrongUsageException( String.format( "A SimpleDateFormat pattern is expected as first argument" ) ); } String pattern = args[0]; Locale locale = Locale.getDefault(); if( args.length > 1 ) { locale = new Locale( args[1] ); } SimpleDateFormat sdf; try { sdf = new SimpleDateFormat( pattern, locale ); } catch( IllegalArgumentException e ) { throw new JGivenWrongUsageException( String.format( "A valid SimpleDateFormat pattern is expected (was '%s')", pattern ) ); } return sdf.format( date ); } }
public class class_name { @Override public String format( Date date, String... args ) { if( date == null ) { return ""; // depends on control dependency: [if], data = [none] } if( args.length == 0 ) { throw new JGivenWrongUsageException( String.format( "A SimpleDateFormat pattern is expected as first argument" ) ); } String pattern = args[0]; Locale locale = Locale.getDefault(); if( args.length > 1 ) { locale = new Locale( args[1] ); // depends on control dependency: [if], data = [none] } SimpleDateFormat sdf; try { sdf = new SimpleDateFormat( pattern, locale ); // depends on control dependency: [try], data = [none] } catch( IllegalArgumentException e ) { throw new JGivenWrongUsageException( String.format( "A valid SimpleDateFormat pattern is expected (was '%s')", pattern ) ); } // depends on control dependency: [catch], data = [none] return sdf.format( date ); } }
public class class_name { @Override protected void onSelectionChanged(final int selStart, final int selEnd) { // Handle case where there is only one cursor (i.e. not selecting a range, just moving cursor) if (selStart == selEnd) { if (!onCursorChanged(selStart)) { super.onSelectionChanged(selStart, selEnd); } return; } else { updateSelectionIfRequired(selStart, selEnd); } super.onSelectionChanged(selStart, selEnd); } }
public class class_name { @Override protected void onSelectionChanged(final int selStart, final int selEnd) { // Handle case where there is only one cursor (i.e. not selecting a range, just moving cursor) if (selStart == selEnd) { if (!onCursorChanged(selStart)) { super.onSelectionChanged(selStart, selEnd); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } else { updateSelectionIfRequired(selStart, selEnd); // depends on control dependency: [if], data = [(selStart] } super.onSelectionChanged(selStart, selEnd); } }
public class class_name { public final void mDELETE() throws RecognitionException { try { int _type = DELETE; int _channel = DEFAULT_TOKEN_CHANNEL; // druidG.g:596:17: ( ( 'DELETE' | 'delete' ) ) // druidG.g:596:18: ( 'DELETE' | 'delete' ) { // druidG.g:596:18: ( 'DELETE' | 'delete' ) int alt12=2; int LA12_0 = input.LA(1); if ( (LA12_0=='D') ) { alt12=1; } else if ( (LA12_0=='d') ) { alt12=2; } else { NoViableAltException nvae = new NoViableAltException("", 12, 0, input); throw nvae; } switch (alt12) { case 1 : // druidG.g:596:19: 'DELETE' { match("DELETE"); } break; case 2 : // druidG.g:596:28: 'delete' { match("delete"); } break; } } state.type = _type; state.channel = _channel; } finally { // do for sure before leaving } } }
public class class_name { public final void mDELETE() throws RecognitionException { try { int _type = DELETE; int _channel = DEFAULT_TOKEN_CHANNEL; // druidG.g:596:17: ( ( 'DELETE' | 'delete' ) ) // druidG.g:596:18: ( 'DELETE' | 'delete' ) { // druidG.g:596:18: ( 'DELETE' | 'delete' ) int alt12=2; int LA12_0 = input.LA(1); if ( (LA12_0=='D') ) { alt12=1; // depends on control dependency: [if], data = [none] } else if ( (LA12_0=='d') ) { alt12=2; // depends on control dependency: [if], data = [none] } else { NoViableAltException nvae = new NoViableAltException("", 12, 0, input); throw nvae; } switch (alt12) { case 1 : // druidG.g:596:19: 'DELETE' { match("DELETE"); } break; case 2 : // druidG.g:596:28: 'delete' { match("delete"); } break; } } state.type = _type; state.channel = _channel; } finally { // do for sure before leaving } } }
public class class_name { protected AptControlInterface initControlInterface() { TypeMirror controlType = _fieldDecl.getType(); if (! (controlType instanceof DeclaredType)) { _ap.printError( _fieldDecl, "control.field.bad.type" ); return null; } // // The field can either be declared as the bean type or the public interface type. // If it is the bean type, then we need to reflect to find the public interface // type it implements. // TypeDeclaration typeDecl = ((DeclaredType)controlType).getDeclaration(); InterfaceDeclaration controlIntf = null; // // It is possible that the declared type is associated with a to-be-generated // bean type. In this case, look for the associated control interface on the // processor input list. // if ( typeDecl == null ) { String className = controlType.toString(); String intfName = className.substring(0, className.length() - 4); String interfaceHint = getControlInterfaceHint(); controlIntf = (InterfaceDeclaration)_ap.getAnnotationProcessorEnvironment().getTypeDeclaration(intfName); if (controlIntf == null) { // The specified class name may not be fully qualified. In this case, the // best we can do is look for a best fit match against the input types for (TypeDeclaration td :_ap.getAnnotationProcessorEnvironment().getSpecifiedTypeDeclarations()) { // if an interface hint was provided, use it to find the control interface, // if not provided try to find the control interface by matching simple names. if (interfaceHint != null) { if (td instanceof InterfaceDeclaration && td.getQualifiedName().equals(interfaceHint)) { controlIntf = (InterfaceDeclaration)td; break; } } else { if (td instanceof InterfaceDeclaration && td.getSimpleName().equals(intfName)) { controlIntf = (InterfaceDeclaration)td; break; } } } } } else if (typeDecl instanceof ClassDeclaration) { Collection<InterfaceType> implIntfs = ((ClassDeclaration)typeDecl).getSuperinterfaces(); for (InterfaceType intfType : implIntfs) { InterfaceDeclaration intfDecl = intfType.getDeclaration(); if ( intfDecl == null ) return null; if (intfDecl.getAnnotation(ControlInterface.class) != null|| intfDecl.getAnnotation(ControlExtension.class) != null) { controlIntf = intfDecl; break; } } } else if (typeDecl instanceof InterfaceDeclaration) { controlIntf = (InterfaceDeclaration)typeDecl; } if (controlIntf == null) { _ap.printError( _fieldDecl, "control.field.bad.type.2" ); return null; } return new AptControlInterface(controlIntf, _ap); } }
public class class_name { protected AptControlInterface initControlInterface() { TypeMirror controlType = _fieldDecl.getType(); if (! (controlType instanceof DeclaredType)) { _ap.printError( _fieldDecl, "control.field.bad.type" ); return null; // depends on control dependency: [if], data = [none] } // // The field can either be declared as the bean type or the public interface type. // If it is the bean type, then we need to reflect to find the public interface // type it implements. // TypeDeclaration typeDecl = ((DeclaredType)controlType).getDeclaration(); InterfaceDeclaration controlIntf = null; // // It is possible that the declared type is associated with a to-be-generated // bean type. In this case, look for the associated control interface on the // processor input list. // if ( typeDecl == null ) { String className = controlType.toString(); String intfName = className.substring(0, className.length() - 4); String interfaceHint = getControlInterfaceHint(); controlIntf = (InterfaceDeclaration)_ap.getAnnotationProcessorEnvironment().getTypeDeclaration(intfName); // depends on control dependency: [if], data = [none] if (controlIntf == null) { // The specified class name may not be fully qualified. In this case, the // best we can do is look for a best fit match against the input types for (TypeDeclaration td :_ap.getAnnotationProcessorEnvironment().getSpecifiedTypeDeclarations()) { // if an interface hint was provided, use it to find the control interface, // if not provided try to find the control interface by matching simple names. if (interfaceHint != null) { if (td instanceof InterfaceDeclaration && td.getQualifiedName().equals(interfaceHint)) { controlIntf = (InterfaceDeclaration)td; // depends on control dependency: [if], data = [none] break; } } else { if (td instanceof InterfaceDeclaration && td.getSimpleName().equals(intfName)) { controlIntf = (InterfaceDeclaration)td; // depends on control dependency: [if], data = [none] break; } } } } } else if (typeDecl instanceof ClassDeclaration) { Collection<InterfaceType> implIntfs = ((ClassDeclaration)typeDecl).getSuperinterfaces(); for (InterfaceType intfType : implIntfs) { InterfaceDeclaration intfDecl = intfType.getDeclaration(); if ( intfDecl == null ) return null; if (intfDecl.getAnnotation(ControlInterface.class) != null|| intfDecl.getAnnotation(ControlExtension.class) != null) { controlIntf = intfDecl; // depends on control dependency: [if], data = [none] break; } } } else if (typeDecl instanceof InterfaceDeclaration) { controlIntf = (InterfaceDeclaration)typeDecl; // depends on control dependency: [if], data = [none] } if (controlIntf == null) { _ap.printError( _fieldDecl, "control.field.bad.type.2" ); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } return new AptControlInterface(controlIntf, _ap); } }
public class class_name { public static List<File> getResourcesInnPackage(String root) { try { logger.debug("getResourcesInnPackage for package: " + root); ClassLoader loader = Thread.currentThread().getContextClassLoader(); URL url = loader.getResource(root); if (url != null) { logger.debug(" URL :" + url.toURI()); InputStream in = loader.getResourceAsStream(root); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String resource; List<File> fileNames = new ArrayList<>(); while ((resource = br.readLine()) != null) { if (!root.endsWith("/")) { root += "/"; } String resourcePath = root + resource; logger.debug("Processing resource path:" + resourcePath); File file = new File(loader.getResource(resourcePath).toURI()); if (file.isDirectory()) { fileNames.addAll(getResourcesInnPackage(resourcePath + "/")); } else { logger.debug("Adding file :" + file.getName()); fileNames.add(file); } } return fileNames; } else { logger.debug("Package not found, return empty list"); return Collections.emptyList(); } } catch (Exception e) { JK.throww(e); return null; } } }
public class class_name { public static List<File> getResourcesInnPackage(String root) { try { logger.debug("getResourcesInnPackage for package: " + root); // depends on control dependency: [try], data = [none] ClassLoader loader = Thread.currentThread().getContextClassLoader(); URL url = loader.getResource(root); if (url != null) { logger.debug(" URL :" + url.toURI()); // depends on control dependency: [if], data = [none] InputStream in = loader.getResourceAsStream(root); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String resource; List<File> fileNames = new ArrayList<>(); while ((resource = br.readLine()) != null) { if (!root.endsWith("/")) { root += "/"; // depends on control dependency: [if], data = [none] } String resourcePath = root + resource; logger.debug("Processing resource path:" + resourcePath); // depends on control dependency: [while], data = [none] File file = new File(loader.getResource(resourcePath).toURI()); if (file.isDirectory()) { fileNames.addAll(getResourcesInnPackage(resourcePath + "/")); // depends on control dependency: [if], data = [none] } else { logger.debug("Adding file :" + file.getName()); // depends on control dependency: [if], data = [none] fileNames.add(file); // depends on control dependency: [if], data = [none] } } return fileNames; // depends on control dependency: [if], data = [none] } else { logger.debug("Package not found, return empty list"); // depends on control dependency: [if], data = [none] return Collections.emptyList(); // depends on control dependency: [if], data = [none] } } catch (Exception e) { JK.throww(e); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static String decode(Style style) { if (style != null) { if (style instanceof ShapeStyle) { return decode((ShapeStyle) style); } else if (style instanceof FontStyle) { return decode((FontStyle) style); } else if (style instanceof PictureStyle) { return decode((PictureStyle) style); } } return ""; } }
public class class_name { public static String decode(Style style) { if (style != null) { if (style instanceof ShapeStyle) { return decode((ShapeStyle) style); // depends on control dependency: [if], data = [none] } else if (style instanceof FontStyle) { return decode((FontStyle) style); // depends on control dependency: [if], data = [none] } else if (style instanceof PictureStyle) { return decode((PictureStyle) style); // depends on control dependency: [if], data = [none] } } return ""; } }
public class class_name { protected boolean containWildcard(Object value) { if (!(value instanceof String)) { return false; } String casted = (String) value; return casted.contains(LIKE_WILDCARD.toString()); } }
public class class_name { protected boolean containWildcard(Object value) { if (!(value instanceof String)) { return false; // depends on control dependency: [if], data = [none] } String casted = (String) value; return casted.contains(LIKE_WILDCARD.toString()); } }
public class class_name { public void unregisterExceptionMonitor( ExceptionMonitor monitor ) { synchronized( this ) { if( monitor == null || !monitor.equals( m_exceptionMonitor ) ) { return; } m_exceptionMonitor = null; } } }
public class class_name { public void unregisterExceptionMonitor( ExceptionMonitor monitor ) { synchronized( this ) { if( monitor == null || !monitor.equals( m_exceptionMonitor ) ) { return; // depends on control dependency: [if], data = [none] } m_exceptionMonitor = null; } } }
public class class_name { public void setScale(float x, float y, float z) { getTransform().setScale(x, y, z); if (mTransformCache.setScale(x, y, z)) { onTransformChanged(); } } }
public class class_name { public void setScale(float x, float y, float z) { getTransform().setScale(x, y, z); if (mTransformCache.setScale(x, y, z)) { onTransformChanged(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public EClass getIfcIonConcentrationMeasure() { if (ifcIonConcentrationMeasureEClass == null) { ifcIonConcentrationMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(690); } return ifcIonConcentrationMeasureEClass; } }
public class class_name { public EClass getIfcIonConcentrationMeasure() { if (ifcIonConcentrationMeasureEClass == null) { ifcIonConcentrationMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(690); // depends on control dependency: [if], data = [none] } return ifcIonConcentrationMeasureEClass; } }