code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * Created on May 17, 2004 * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2013/01/16 Minor fix to prevent NPE // ZAP: 2014/10/17 Issue 1308: Updated for latest icons // ZAP: 2015/02/10 Issue 1528: Support user defined font size // ZAP: 2015/09/07 Move icon loading to a utility class package org.parosproxy.paros.view; import java.awt.Dimension; import java.awt.Frame; import java.awt.Image; import java.awt.Point; import java.awt.Toolkit; import java.awt.event.ComponentAdapter; import java.awt.event.ComponentEvent; import java.awt.event.WindowEvent; import java.awt.event.WindowStateListener; import java.util.ArrayList; import java.util.List; import java.util.prefs.BackingStoreException; import java.util.prefs.Preferences; import javax.swing.JFrame; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.zaproxy.zap.utils.DisplayUtils; /** * Generic Frame, which handles some basic properties. * <ul> * <li>Sets the icon(s) for the frame, which are the ZAP icons</li> * <li>Centers the frame on screen</li> * <li>Sets the frame to _not_ visible</li> * <li>Sets a common font for the frame</li> * <li>Sets a default title (ZAP application name)</li> * <li>Preserves window state, location and size correctly (will survive multiple session)</li> * </ul> * Hint for implementers: If you use this class, * don't use {@link #setSize(Dimension)}, but {@link #setPreferredSize(Dimension)} * instead. Also, don't use {@link #setLocation(Point)}. This abstract class * will automatically take care of size and position. */ public abstract class AbstractFrame extends JFrame { private static final long serialVersionUID = 6751593232255236597L; private static final String PREF_WINDOW_STATE = "window.state"; private static final String PREF_WINDOW_SIZE = "window.size"; private static final String PREF_WINDOW_POSITION = "window.position"; private static final int WINDOW_DEFAULT_WIDTH = 800; private static final int WINDOW_DEFAULT_HEIGHT = 600; /** * Hint: Preferences are only saved by package. * We have to use a prefix for separation. */ private final Preferences preferences; private final String prefnzPrefix = this.getClass().getSimpleName()+"."; private final Logger logger = Logger.getLogger(AbstractFrame.class); /** * This is the default constructor */ public AbstractFrame() { super(); this.preferences = Preferences.userNodeForPackage(getClass()); initialize(); } /** * This method initializes this */ private void initialize() { // ZAP: Rebrand this.setIconImages(DisplayUtils.getZapIconImages()); this.setVisible(false); this.setTitle(Constant.PROGRAM_NAME); final Dimension dim = restoreWindowSize(); if (dim == null) { this.setSize(WINDOW_DEFAULT_WIDTH, WINDOW_DEFAULT_HEIGHT); } final Point point = restoreWindowLocation(); if (point == null) { centerFrame(); } restoreWindowState(); this.addWindowStateListener(new FrameWindowStateListener()); this.addComponentListener(new FrameResizedListener()); } /** * Centre this frame. * */ public void centerFrame() { final Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); final Dimension frameSize = this.getSize(); if (frameSize.height > screenSize.height) { frameSize.height = screenSize.height; } if (frameSize.width > screenSize.width) { frameSize.width = screenSize.width; } this.setLocation((screenSize.width - frameSize.width) / 2, (screenSize.height - frameSize.height) / 2); } /** * @param windowstate integer value, see {@link JFrame#getExtendedState()} */ private void saveWindowState(int windowstate) { if ((windowstate & Frame.ICONIFIED) == Frame.ICONIFIED) { preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.ICONFIED.toString()); if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.ICONFIED); } if ((windowstate & Frame.MAXIMIZED_BOTH) == Frame.MAXIMIZED_BOTH) { preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.MAXIMIZED.toString()); if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.MAXIMIZED); } if (windowstate == Frame.NORMAL) { // hint: Frame.NORMAL = 0, thats why no masking preferences.put(prefnzPrefix+PREF_WINDOW_STATE, SimpleWindowState.NORMAL.toString()); if (logger.isDebugEnabled()) logger.debug("Saving preference "+PREF_WINDOW_STATE+"=" + SimpleWindowState.NORMAL); } } /** * Loads and sets the last window state of the frame. * Additionally, the last state will be returned. * * @return last window state OR null */ private SimpleWindowState restoreWindowState() { SimpleWindowState laststate = null; final String statestr = preferences.get(prefnzPrefix+PREF_WINDOW_STATE, null); if (logger.isDebugEnabled()) logger.debug("Restoring preference "+PREF_WINDOW_STATE+"=" + statestr); if (statestr != null) { SimpleWindowState state = null; try { state = SimpleWindowState.valueOf(statestr); } catch (final IllegalArgumentException e) { state = null; } if (state != null) { switch (state) { case ICONFIED: this.setExtendedState(Frame.ICONIFIED); break; case NORMAL: this.setExtendedState(Frame.NORMAL); break; case MAXIMIZED: this.setExtendedState(Frame.MAXIMIZED_BOTH); break; default: logger.error("Invalid window state (nothing will changed): " + statestr); } } laststate = state; } return laststate; } /** * Saves the size of this frame, but only, if window state is 'normal'. * If window state is iconfied or maximized, the size is not saved! * * @param size */ private void saveWindowSize(Dimension size) { if (size != null) { if (getExtendedState() == Frame.NORMAL) { if (logger.isDebugEnabled()) logger.debug("Saving preference " + PREF_WINDOW_SIZE + "=" + size.width + "," + size.height); this.preferences.put(prefnzPrefix+PREF_WINDOW_SIZE, size.width + "," + size.height); } else { if (logger.isDebugEnabled()) logger.debug("Preference " + PREF_WINDOW_SIZE + " not saved, cause window state is not 'normal'."); } } } /** * Loads and set the saved size preferences for this frame. * * @return the size of the frame OR null, if there wasn't any preference. */ private Dimension restoreWindowSize() { Dimension result = null; final String sizestr = preferences.get(prefnzPrefix+PREF_WINDOW_SIZE, null); if (sizestr != null) { int width = 0; int height = 0; final String[] sizes = sizestr.split("[,]"); try { width = Integer.parseInt(sizes[0].trim()); height = Integer.parseInt(sizes[1].trim()); } catch (final Exception e) { // ignoring, cause is prevented by default values; } if (width > 0 && height > 0) { result = new Dimension(width, height); if (logger.isDebugEnabled()) logger.debug("Restoring preference " + PREF_WINDOW_SIZE + "=" + result.width + "," + result.height); this.setSize(result); } } return result; } /** * Saves the location of this frame, but only, if window state is 'normal'. * If window state is iconfied or maximized, the location is not saved! * * @param point */ private void saveWindowLocation(Point point) { if (point != null) { if (getExtendedState() == Frame.NORMAL) { if (logger.isDebugEnabled()) logger.debug("Saving preference " + PREF_WINDOW_POSITION + "=" + point.x + "," + point.y); this.preferences.put(prefnzPrefix+PREF_WINDOW_POSITION, point.x + "," + point.y); } else { if (logger.isDebugEnabled()) logger.debug("Preference " + PREF_WINDOW_POSITION + " not saved, cause window state is not 'normal'."); } } } /** * Loads and set the saved position preferences for this frame. * * @return the size of the frame OR null, if there wasn't any preference. */ private Point restoreWindowLocation() { Point result = null; final String sizestr = preferences.get(prefnzPrefix+PREF_WINDOW_POSITION, null); if (sizestr != null) { int x = 0; int y = 0; final String[] sizes = sizestr.split("[,]"); try { x = Integer.parseInt(sizes[0].trim()); y = Integer.parseInt(sizes[1].trim()); } catch (final Exception e) { // ignoring, cause is prevented by default values; } if (x > 0 && y > 0) { result = new Point(x, y); if (logger.isDebugEnabled()) logger.debug("Restoring preference " + PREF_WINDOW_POSITION + "=" + result.x + "," + result.y); this.setLocation(result); } } return result; } /** * @deprecated (2.4.2) Use {@link DisplayUtils#getZapIconImages()} instead. It will be removed in a future release. */ @Deprecated @SuppressWarnings("javadoc") protected List<Image> loadIconImages() { return new ArrayList<>(DisplayUtils.getZapIconImages()); } @Override public void dispose() { super.dispose(); try { this.preferences.flush(); } catch (final BackingStoreException e) { logger.error("Error while saving the preferences", e); } } /* * ======================================================================== */ private final class FrameWindowStateListener implements WindowStateListener { @Override public void windowStateChanged(WindowEvent e) { saveWindowState(e.getNewState()); } } private final class FrameResizedListener extends ComponentAdapter { @Override public void componentResized(ComponentEvent e) { if (e.getComponent() != null) { saveWindowSize(e.getComponent().getSize()); } } @Override public void componentMoved(ComponentEvent e) { if (e.getComponent() != null) { saveWindowLocation(e.getComponent().getLocation()); } } } /** * Simplified version for easier handling of the states ... */ private enum SimpleWindowState { ICONFIED, NORMAL, MAXIMIZED; } } // @jve:visual-info decl-index=0 visual-constraint="31,17"
GillesMoris/OSS
src/org/parosproxy/paros/view/AbstractFrame.java
Java
apache-2.0
10,722
package info.novatec.testit.webtester.support.assertj; import static info.novatec.testit.webtester.support.assertj.WebTesterAssertions.assertThat; import static org.mockito.Mockito.doReturn; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import info.novatec.testit.webtester.pageobjects.RadioButton; @RunWith(MockitoJUnitRunner.class) public class RadioButtonAssertTest { @Mock RadioButton selectedRadioButton; @Mock RadioButton radioButton; @Before public void setUp() { doReturn(true).when(selectedRadioButton).isSelected(); } /* selected */ @Test public void selectedTrueTest() { assertThat(selectedRadioButton).isSelected(true); } @Test(expected = AssertionError.class) public void selectedFalseTest() { assertThat(radioButton).isSelected(true); } @Test public void notSelectedTrueTest() { assertThat(radioButton).isNotSelected(true); } @Test(expected = AssertionError.class) public void notSelectedFalseTest() { assertThat(selectedRadioButton).isNotSelected(true); } }
dbe-it/webtester-core
webtester-support-assertj/src/test/java/info/novatec/testit/webtester/support/assertj/RadioButtonAssertTest.java
Java
apache-2.0
1,213
package com.jt.test.sort; import java.util.Arrays; import java.util.Random; /** * since 2016/10/19. */ public class Select { public static void sort(Comparable[] data) { for (int i = 0; i < data.length; i++) { int min = i; for (int j = i+1; j < data.length; j++) { if (less(data, min, j)) { min = j; } } exch(data, i, min); } } private static boolean less(Comparable[] data, int min, int j) { return data[min].compareTo(data[j]) > 0; } private static void exch(Comparable[] data, int i, int min) { Comparable tmp = data[i]; data[i] = data[min]; data[min] = tmp; } public static boolean isSort(Comparable[] data) { for (int i = 0; i < data.length-1; i++) { if (less(data, i, i + 1)) { return false; } } return true; } public static void main(String[] args) throws Exception { Random random = new Random(); Integer[] datas = new Integer[10]; for (int i = 0; i < 10; i++) { datas[i] = random.nextInt(100); } sort(datas); if (!isSort(datas)) { System.err.println("not sort"); } System.out.println(Arrays.toString(datas)); } }
jt120/algorithm
new-man/src/test/java/com/jt/test/sort/Select.java
Java
apache-2.0
1,378
package com.canoo.ant.table; import com.canoo.ant.filter.AllEqualsFilter; import com.canoo.ant.filter.AllFilter; import com.canoo.ant.filter.ITableFilter; import org.apache.log4j.Logger; import java.io.File; import java.io.IOException; import java.util.*; public abstract class APropertyTable implements IPropertyTable { private static final Logger LOG = Logger.getLogger(APropertyTable.class); private static final int MAX_DEPTH = 10; // max recursion depth private static final ThreadLocal DEPTH = new ThreadLocal(); private File fContainer; private String fTable; private String fPrefix; private ITableFilter fFilter; private List fRawTable; private List fMetaTable; protected static final String EMPTY = ""; protected static final String KEY_JOIN = "JOIN"; protected APropertyTable() { fFilter = new AllFilter(); if( DEPTH.get() == null ) { setDepth(0); } } private static void setDepth(int depth){ DEPTH.set(new Integer(depth)); } private static int getDepth(){ return((Integer)DEPTH.get()).intValue(); } /** * @return columnName -> expander (Type IPropertyTable) */ public Map getColumnInfo() { List meta = getMetaTable(); Map result = new HashMap(meta.size()); // smaller is likely // find all properties for this table List tableSpecificColumnInfo = new AllEqualsFilter(TableFactory.KEY_TABLE).filter(meta, getTable()); for (Iterator eachColumnInfo = tableSpecificColumnInfo.iterator(); eachColumnInfo.hasNext();) { Properties colInfo = (Properties) eachColumnInfo.next(); try { // tableClass defaults to the current class IPropertyTable table = TableFactory.createTable(colInfo, getClass().getName()); ITableFilter filter = TableFactory.createFilter(colInfo); final File container; if (colInfo.getProperty(TableFactory.KEY_CONTAINER, "").length() > 0) { container = new File(getContainer().getParentFile(), colInfo.getProperty(TableFactory.KEY_CONTAINER)); colInfo.remove(TableFactory.KEY_CONTAINER); // to be sure that it doesn't get used with wrong path } else { container = getContainer(); } String key = colInfo.getProperty(TableFactory.KEY_NAME); // no default possible TableFactory.initOrDefault(table, filter, colInfo, container, key); result.put(key, table); } catch (Exception e) { LOG.error("cannot work with Property: " + colInfo.toString(), e); throw new RuntimeException("Cannot work with Property: " + colInfo.toString(), e); } } return result; } public List getPropertiesList(final String filterValue, final String prefix) { // start with copy of initial table // if current filter concerns extension keys, filter before extending // filtering in advance also lowers memory consumption in the average List result = getFilter().filter(getRawTable(), filterValue); if (getDepth() > MAX_DEPTH){ LOG.error("processing grounded due to excessive recursion calls: "+getDepth()); return result; } setDepth(getDepth()+1); final Map colInfo = getColumnInfo(); // only go over entries in the colInfo. // (property names without colInfo info are not expanded) for (Iterator eachExpandable = colInfo.keySet().iterator(); eachExpandable.hasNext();) { String expansionName = (String) eachExpandable.next(); expandName(result, expansionName, colInfo); } setDepth(getDepth()-1); // filter a second time to allow filters to work on expansions result = getFilter().filter(result, filterValue); // prefix is processed after filtering if (prefix!=null && prefix.length()>0){ result = mapPrefix(result, prefix); } return result; } // like a ruby map! private List mapPrefix(List result, final String prefix) { List collect = new ArrayList(result.size()); for (Iterator eachProps = result.iterator(); eachProps.hasNext();) { Properties props = (Properties) eachProps.next(); Properties mapped = new Properties(); for (Iterator eachKey = props.keySet().iterator(); eachKey.hasNext();) { String key = (String) eachKey.next(); String value = props.getProperty(key); mapped.setProperty(prefix+"."+key, value); } collect.add(mapped); } return collect; } protected void expandName(List result, String expansionName, Map colInfo) { List expansions = new LinkedList(); // cannot add while iterating. store and add later for (Iterator eachProperties = result.iterator(); eachProperties.hasNext();) { Properties props = (Properties) eachProperties.next(); List newExpansions = expandProps(props, expansionName, colInfo); // default behaviour: like OUTER join, we do not shrink if nothing found if (newExpansions.size() > 0) { eachProperties.remove(); expansions.addAll(newExpansions); } } result.addAll(expansions); } protected List expandProps(Properties props, String expansionName, Map colInfo) { String value = props.getProperty(expansionName); List propExpansions = new LinkedList(); IPropertyTable expansionTable = (IPropertyTable) colInfo.get(expansionName); // recursive call List expandWith = expansionTable.getPropertiesList(value, expansionTable.getPrefix()); for (Iterator eachExpansion = expandWith.iterator(); eachExpansion.hasNext();) { Properties expandProps = (Properties) eachExpansion.next(); // merge expansion with current line expandProps.putAll(props); // store for later adding propExpansions.add(expandProps); } return propExpansions; } //-------------- field accessors ------------------ public File getContainer() { return fContainer; } public void setContainer(File container) { fContainer = container; } public String getTable() { return fTable; } public void setTable(String table) { fTable = table; } public ITableFilter getFilter() { return fFilter; } public void setFilter(ITableFilter filter) { fFilter = filter; } public String getPrefix() { return fPrefix; } public void setPrefix(String prefix) { fPrefix = prefix; } //-------------- how to read specifics ------------------ /** lazy getter, cached */ public List getRawTable() { fRawTable = getCachedTable(getTable(), fRawTable); return fRawTable; } /** lazy getter, cached */ public List getMetaTable() { if (hasJoinTable()) { fMetaTable = getCachedTable(KEY_JOIN, fMetaTable); } else { fMetaTable = Collections.EMPTY_LIST; } return fMetaTable; } /** * Indicates if the table container has a JOIN table. * @return default is <code>true</code> */ protected boolean hasJoinTable() { return true; } protected List getCachedTable(final String table, List tableCache) { if (tableCache != null) { return tableCache; } try { tableCache = read(table); } catch (final IOException e) { LOG.error("Cannot read " + getContainer() + " " + table, e); String message = "Cannot read container >" + getContainer() + "<"; if (table != null) message += " (table " + table + ")"; message += ": " + e.getMessage(); throw new RuntimeException(message, e); } if (tableCache.isEmpty()) { LOG.debug("no entry in " + getContainer() + "/" + table); } LOG.debug(tableCache.size()+" entries in "+getContainer()+ " " + table); return tableCache; } protected abstract List read(String table) throws IOException; }
lukecampbell/webtest
src/main/java/com/canoo/ant/table/APropertyTable.java
Java
apache-2.0
8,495
// For conditions of distribution and use, see copyright notice in LICENSE #include "StableHeaders.h" #include "ZipAssetBundle.h" #include "ZipHelpers.h" #include "ZipWorker.h" #include "CoreDefines.h" #include "Framework.h" #include "FrameAPI.h" #include "AssetAPI.h" #include "AssetCache.h" #include "LoggingFunctions.h" #include <Urho3D/IO/FileSystem.h> #include <zzip/zzip.h> namespace Tundra { ZipAssetBundle::ZipAssetBundle(AssetAPI *owner, const String &type, const String &name) : IAssetBundle(owner, type, name), worker_(0), archive_(0), fileCount_(-1), done_(false), success_(false) { } ZipAssetBundle::~ZipAssetBundle() { Unload(); } void ZipAssetBundle::DoUnload() { Close(); StopThread(); fileCount_ = -1; } void ZipAssetBundle::Close() { if (archive_) { zzip_dir_close(archive_); archive_ = 0; } } bool ZipAssetBundle::DeserializeFromDiskSource() { if (!assetAPI_->Cache()) { LogError("ZipAssetBundle::DeserializeFromDiskSource: Cannot process archive, AssetAPI cache is null."); return false; } else if (DiskSource().Empty()) { LogError("ZipAssetBundle::DeserializeFromDiskSource: Cannot process archive, no disk source for " + Name()); return false; } /* We want to detect if the extracted files are already up to date to save time. If the last modified date for the sub asset is the same as the parent zip file, we don't extract it. If the zip is re-downloaded from source everything will get unpacked even if only one file would have changed inside it. We could do uncompressed size comparisons but that is not a absolute guarantee that the file has not changed. We'll be on the safe side to unpack the whole zip file. Zip files are meant for deploying the scene and should be touched rather rarely. Note that local:// refs are unpacked to cache but the zips disk source is not in the cache. Meaning that local:// zip files will always be extracted fully even if the disk source was not changed, we don't have a mechanism to get the last modified date properly except from the asset cache. For local scenes this should be fine as there is no real need to zip the scene up as you already have the disk sources right there in the storage. The last modified query will fail if the file is open with zziplib, do it first. */ uint zipLastModified = assetAPI_->Cache()->LastModified(Name()); const String diskSourceInternal = Urho3D::GetInternalPath(DiskSource()); zzip_error_t error = ZZIP_NO_ERROR; archive_ = zzip_dir_open(diskSourceInternal.CString(), &error); if (CheckAndLogZzipError(error) || CheckAndLogArchiveError(archive_) || !archive_) { archive_ = 0; return false; } int uncompressing = 0; ZZIP_DIRENT archiveEntry; while(zzip_dir_read(archive_, &archiveEntry)) { String relativePath = Urho3D::GetInternalPath(archiveEntry.d_name); if (!relativePath.EndsWith("/")) { String subAssetRef = GetFullAssetReference(relativePath); ZipArchiveFile file; file.relativePath = relativePath; file.cachePath = Urho3D::GetInternalPath(assetAPI_->Cache()->DiskSourceByRef(subAssetRef)); file.lastModified = assetAPI_->Cache()->LastModified(subAssetRef); file.compressedSize = archiveEntry.d_csize; file.uncompressedSize = archiveEntry.st_size; /* Mark this file for extraction. If both cache files have valid dates and they differ extract. If they have the same date stamp skip extraction. Note that file.lastModified will be non-valid for non cached files so we will cover also missing files. */ file.doExtract = (zipLastModified > 0 && file.lastModified > 0) ? (zipLastModified != file.lastModified) : true; if (file.doExtract) uncompressing++; files_.Push(file); fileCount_++; } } // Close the zzip directory ptr Close(); // If the zip file was empty we don't want IsLoaded to fail on the files_ check. // The bundle loaded fine but there was no content, log a warning. if (files_.Empty()) { LogWarning("ZipAssetBundle: Bundle loaded but does not contain any files " + Name()); files_.Push(ZipArchiveFile()); Loaded.Emit(this); return true; } // Don't spin the worker if all sub assets are up to date in cache. if (uncompressing > 0) { // Now that the file info has been read, continue in a worker thread. LogDebug("ZipAssetBundle: File information read for " + Name() + ". File count: " + String(files_.Size()) + ". Starting worker thread to uncompress " + String(uncompressing) + " files."); // ZipWorker is a QRunnable we can pass to QThreadPool, it will handle scheduling it and deletes it when done. worker_ = new ZipWorker(this, zipLastModified, diskSourceInternal, files_); if (!worker_->Run()) { LogError("ZipAssetBundle: Failed to start worker thread for " + Name()); files_.Clear(); return false; } assetAPI_->GetFramework()->Frame()->Updated.Connect(this, &ZipAssetBundle::CheckDone); } else Loaded.Emit(this); return true; } bool ZipAssetBundle::DeserializeFromData(const u8* /*data*/, uint /*numBytes*/) { /** @note At this point it seems zzip needs a disk source to do processing so we require disk source for the archive. This might change in the future by changing the lib. */ return false; } Vector<u8> ZipAssetBundle::GetSubAssetData(const String &subAssetName) { /* Makes no sense to keep the whole zip file contents in memory as only few files could be wanted from a 100mb bundle. Additionally all asset would take 2x the memory. We could make this function also open the zip file and uncompress the data for every sub asset request. But that would be rather pointless, not to mention slower, as we already have the unpacked individual assets on disk. If the unpacking to disk changes we might need to rethink this. */ String filePath = GetSubAssetDiskSource(subAssetName); if (filePath.Empty()) return Vector<u8>(); Vector<u8> data; return LoadFileToVector(filePath, data) ? data : Vector<u8>(); } String ZipAssetBundle::GetSubAssetDiskSource(const String &subAssetName) { return assetAPI_->Cache()->FindInCache(GetFullAssetReference(subAssetName)); } String ZipAssetBundle::GetFullAssetReference(const String &subAssetName) { return Name() + "#" + subAssetName; } bool ZipAssetBundle::IsLoaded() const { return (archive_ != 0 || !files_.Empty()); } void ZipAssetBundle::CheckDone(float /*frametime*/) { // Invoked in main thread context { Urho3D::MutexLock m(mutexDone_); if (!done_) return; if (success_) Loaded.Emit(this); else Failed.Emit(this); } StopThread(); assetAPI_->GetFramework()->Frame()->Updated.Disconnect(this, &ZipAssetBundle::CheckDone); } void ZipAssetBundle::WorkerDone(bool successful) { // Invoked in worker thread context Urho3D::MutexLock m(mutexDone_); done_ = true; success_ = successful; } void ZipAssetBundle::StopThread() { if (worker_) worker_->Stop(); SAFE_DELETE(worker_); } Urho3D::Context *ZipAssetBundle::Context() const { return assetAPI_->GetContext(); } Urho3D::FileSystem *ZipAssetBundle::FileSystem() const { return assetAPI_->GetSubsystem<Urho3D::FileSystem>(); } }
realXtend/tundra-urho3d
src/Plugins/ZipPlugin/ZipAssetBundle.cpp
C++
apache-2.0
7,865
import { Injectable } from "@angular/core"; import { InjectionFactory } from "../../L0/L0.injection-factory/injection-factory"; import { createSelector } from "../../L4/L4.ngrx/create-selector"; import { StatementsSelector } from "./statements.selector"; import { ExportDeclaration, SyntaxKind } from "typescript"; @Injectable() export class ExportDeclarationsSelector implements InjectionFactory { constructor(private readonly statementsSelector: StatementsSelector) { return this.factory() as any; } factory() { return createSelector( this.statementsSelector, statements => statements .filter(({kind}) => kind === SyntaxKind.ExportDeclaration) .map(item => item as ExportDeclaration) ); } }
dvabuzyarov/moq.ts
projects/schematics/src/L2/L2.selectors/export-declarations.selector.ts
TypeScript
apache-2.0
791
using System; using System.Collections.Generic; using System.Configuration; using System.Linq; using System.Net; using System.Net.Mail; using System.Web; namespace FashionStones.Utils { public class EmailSettings { public string Link = "www.fashion-stones.com.ua"; public string MailFromAddress = "kapitoshka0777@gmail.com"; public string ServerName = "smtp.gmail.com"; public bool UseSsl = true; public int ServerPort = 587; //465; public string password = "8425999kapitoshka"; } //public class GMailer //{ // public static string GmailUsername { get { return "kapitoshka0777@gmail.com"; } } // public static string GmailPassword { get {return "8425999kapitoshka";} } // public static int GmailPort { get; set; } // public static bool GmailSSL { get; set; } // public string ToEmail { get; set; } // public string Subject { get; set; } // public string Body { get; set; } // public bool IsHtml { get; set; } // static GMailer() // { // GmailHost = "smtp.gmail.com"; // GmailPort = 587; // Gmail can use ports 25, 465 & 587; but must be 25 for medium trust environment. // GmailSSL = true; // } //public void Send() //{ // SmtpClient smtp = new SmtpClient(); // smtp.Host = GmailHost; // smtp.Port = GmailPort; // smtp.EnableSsl = GmailSSL; // smtp.DeliveryMethod = SmtpDeliveryMethod.Network; // smtp.UseDefaultCredentials = false; // smtp.Credentials = new NetworkCredential(GmailUsername, GmailPassword); // using (var message = new MailMessage(GmailUsername, ToEmail)) // { // message.Subject = Subject; // message.Body = Body; // message.IsBodyHtml = IsHtml; // smtp.Send(message); // } //} // } }
dimakaminskiy/FashionStones
FashionStones/Utils/EmailSettings.cs
C#
apache-2.0
1,994
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.spanner; import static com.google.cloud.spanner.SpannerExceptionFactory.newSpannerException; import static com.google.cloud.spanner.SpannerExceptionFactory.newSpannerExceptionForCancellation; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.api.client.util.BackOff; import com.google.api.client.util.ExponentialBackOff; import com.google.api.gax.retrying.RetrySettings; import com.google.cloud.ByteArray; import com.google.cloud.Date; import com.google.cloud.Timestamp; import com.google.cloud.spanner.Type.StructField; import com.google.cloud.spanner.spi.v1.SpannerRpc; import com.google.cloud.spanner.v1.stub.SpannerStubSettings; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.AbstractIterator; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Uninterruptibles; import com.google.protobuf.ByteString; import com.google.protobuf.ListValue; import com.google.protobuf.Value.KindCase; import com.google.spanner.v1.PartialResultSet; import com.google.spanner.v1.ResultSetMetadata; import com.google.spanner.v1.ResultSetStats; import com.google.spanner.v1.Transaction; import com.google.spanner.v1.TypeCode; import io.grpc.Context; import io.opencensus.common.Scope; import io.opencensus.trace.AttributeValue; import io.opencensus.trace.Span; import io.opencensus.trace.Tracer; import io.opencensus.trace.Tracing; import java.io.IOException; import java.io.Serializable; import java.math.BigDecimal; import java.util.AbstractList; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.Nullable; /** Implementation of {@link ResultSet}. */ abstract class AbstractResultSet<R> extends AbstractStructReader implements ResultSet { private static final Tracer tracer = Tracing.getTracer(); interface Listener { /** * Called when transaction metadata is seen. This method may be invoked at most once. If the * method is invoked, it will precede {@link #onError(SpannerException)} or {@link #onDone()}. */ void onTransactionMetadata(Transaction transaction, boolean shouldIncludeId) throws SpannerException; /** Called when the read finishes with an error. Returns the error that should be thrown. */ SpannerException onError(SpannerException e, boolean withBeginTransaction); /** Called when the read finishes normally. */ void onDone(boolean withBeginTransaction); } @VisibleForTesting static class GrpcResultSet extends AbstractResultSet<List<Object>> { private final GrpcValueIterator iterator; private final Listener listener; private GrpcStruct currRow; private SpannerException error; private ResultSetStats statistics; private boolean closed; GrpcResultSet(CloseableIterator<PartialResultSet> iterator, Listener listener) { this.iterator = new GrpcValueIterator(iterator); this.listener = listener; } @Override protected GrpcStruct currRow() { checkState(!closed, "ResultSet is closed"); checkState(currRow != null, "next() call required"); return currRow; } @Override public boolean next() throws SpannerException { if (error != null) { throw newSpannerException(error); } try { if (currRow == null) { ResultSetMetadata metadata = iterator.getMetadata(); if (metadata.hasTransaction()) { listener.onTransactionMetadata( metadata.getTransaction(), iterator.isWithBeginTransaction()); } else if (iterator.isWithBeginTransaction()) { // The query should have returned a transaction. throw SpannerExceptionFactory.newSpannerException( ErrorCode.FAILED_PRECONDITION, AbstractReadContext.NO_TRANSACTION_RETURNED_MSG); } currRow = new GrpcStruct(iterator.type(), new ArrayList<>()); } boolean hasNext = currRow.consumeRow(iterator); if (!hasNext) { statistics = iterator.getStats(); } return hasNext; } catch (Throwable t) { throw yieldError( SpannerExceptionFactory.asSpannerException(t), iterator.isWithBeginTransaction() && currRow == null); } } @Override @Nullable public ResultSetStats getStats() { return statistics; } @Override public void close() { listener.onDone(iterator.isWithBeginTransaction()); iterator.close("ResultSet closed"); closed = true; } @Override public Type getType() { checkState(currRow != null, "next() call required"); return currRow.getType(); } private SpannerException yieldError(SpannerException e, boolean beginTransaction) { SpannerException toThrow = listener.onError(e, beginTransaction); close(); throw toThrow; } } /** * Adapts a stream of {@code PartialResultSet} messages into a stream of {@code Value} messages. */ private static class GrpcValueIterator extends AbstractIterator<com.google.protobuf.Value> { private enum StreamValue { METADATA, RESULT, } private final CloseableIterator<PartialResultSet> stream; private ResultSetMetadata metadata; private Type type; private PartialResultSet current; private int pos; private ResultSetStats statistics; GrpcValueIterator(CloseableIterator<PartialResultSet> stream) { this.stream = stream; } @SuppressWarnings("unchecked") @Override protected com.google.protobuf.Value computeNext() { if (!ensureReady(StreamValue.RESULT)) { endOfData(); return null; } com.google.protobuf.Value value = current.getValues(pos++); KindCase kind = value.getKindCase(); if (!isMergeable(kind)) { if (pos == current.getValuesCount() && current.getChunkedValue()) { throw newSpannerException(ErrorCode.INTERNAL, "Unexpected chunked PartialResultSet."); } else { return value; } } if (!current.getChunkedValue() || pos != current.getValuesCount()) { return value; } Object merged = kind == KindCase.STRING_VALUE ? value.getStringValue() : new ArrayList<>(value.getListValue().getValuesList()); while (current.getChunkedValue() && pos == current.getValuesCount()) { if (!ensureReady(StreamValue.RESULT)) { throw newSpannerException( ErrorCode.INTERNAL, "Stream closed in the middle of chunked value"); } com.google.protobuf.Value newValue = current.getValues(pos++); if (newValue.getKindCase() != kind) { throw newSpannerException( ErrorCode.INTERNAL, "Unexpected type in middle of chunked value. Expected: " + kind + " but got: " + newValue.getKindCase()); } if (kind == KindCase.STRING_VALUE) { merged = merged + newValue.getStringValue(); } else { concatLists( (List<com.google.protobuf.Value>) merged, newValue.getListValue().getValuesList()); } } if (kind == KindCase.STRING_VALUE) { return com.google.protobuf.Value.newBuilder().setStringValue((String) merged).build(); } else { return com.google.protobuf.Value.newBuilder() .setListValue( ListValue.newBuilder().addAllValues((List<com.google.protobuf.Value>) merged)) .build(); } } ResultSetMetadata getMetadata() throws SpannerException { if (metadata == null) { if (!ensureReady(StreamValue.METADATA)) { throw newSpannerException(ErrorCode.INTERNAL, "Stream closed without sending metadata"); } } return metadata; } /** * Get the query statistics. Query statistics are delivered with the last PartialResultSet in * the stream. Any attempt to call this method before the caller has finished consuming the * results will return null. */ @Nullable ResultSetStats getStats() { return statistics; } Type type() { checkState(type != null, "metadata has not been received"); return type; } private boolean ensureReady(StreamValue requiredValue) throws SpannerException { while (current == null || pos >= current.getValuesCount()) { if (!stream.hasNext()) { return false; } current = stream.next(); pos = 0; if (type == null) { // This is the first message on the stream. if (!current.hasMetadata() || !current.getMetadata().hasRowType()) { throw newSpannerException(ErrorCode.INTERNAL, "Missing type metadata in first message"); } metadata = current.getMetadata(); com.google.spanner.v1.Type typeProto = com.google.spanner.v1.Type.newBuilder() .setCode(TypeCode.STRUCT) .setStructType(metadata.getRowType()) .build(); try { type = Type.fromProto(typeProto); } catch (IllegalArgumentException e) { throw newSpannerException( ErrorCode.INTERNAL, "Invalid type metadata: " + e.getMessage(), e); } } if (current.hasStats()) { statistics = current.getStats(); } if (requiredValue == StreamValue.METADATA) { return true; } } return true; } void close(@Nullable String message) { stream.close(message); } boolean isWithBeginTransaction() { return stream.isWithBeginTransaction(); } /** @param a is a mutable list and b will be concatenated into a. */ private void concatLists(List<com.google.protobuf.Value> a, List<com.google.protobuf.Value> b) { if (a.size() == 0 || b.size() == 0) { a.addAll(b); return; } else { com.google.protobuf.Value last = a.get(a.size() - 1); com.google.protobuf.Value first = b.get(0); KindCase lastKind = last.getKindCase(); KindCase firstKind = first.getKindCase(); if (isMergeable(lastKind) && lastKind == firstKind) { com.google.protobuf.Value merged; if (lastKind == KindCase.STRING_VALUE) { String lastStr = last.getStringValue(); String firstStr = first.getStringValue(); merged = com.google.protobuf.Value.newBuilder().setStringValue(lastStr + firstStr).build(); } else { // List List<com.google.protobuf.Value> mergedList = new ArrayList<>(); mergedList.addAll(last.getListValue().getValuesList()); concatLists(mergedList, first.getListValue().getValuesList()); merged = com.google.protobuf.Value.newBuilder() .setListValue(ListValue.newBuilder().addAllValues(mergedList)) .build(); } a.set(a.size() - 1, merged); a.addAll(b.subList(1, b.size())); } else { a.addAll(b); } } } private boolean isMergeable(KindCase kind) { return kind == KindCase.STRING_VALUE || kind == KindCase.LIST_VALUE; } } static class GrpcStruct extends Struct implements Serializable { private final Type type; private final List<Object> rowData; /** * Builds an immutable version of this struct using {@link Struct#newBuilder()} which is used as * a serialization proxy. */ private Object writeReplace() { Builder builder = Struct.newBuilder(); List<Type.StructField> structFields = getType().getStructFields(); for (int i = 0; i < structFields.size(); i++) { Type.StructField field = structFields.get(i); String fieldName = field.getName(); Object value = rowData.get(i); Type fieldType = field.getType(); switch (fieldType.getCode()) { case BOOL: builder.set(fieldName).to((Boolean) value); break; case INT64: builder.set(fieldName).to((Long) value); break; case FLOAT64: builder.set(fieldName).to((Double) value); break; case NUMERIC: builder.set(fieldName).to((BigDecimal) value); break; case STRING: builder.set(fieldName).to((String) value); break; case JSON: builder.set(fieldName).to(Value.json((String) value)); break; case BYTES: builder.set(fieldName).to((ByteArray) value); break; case TIMESTAMP: builder.set(fieldName).to((Timestamp) value); break; case DATE: builder.set(fieldName).to((Date) value); break; case ARRAY: switch (fieldType.getArrayElementType().getCode()) { case BOOL: builder.set(fieldName).toBoolArray((Iterable<Boolean>) value); break; case INT64: builder.set(fieldName).toInt64Array((Iterable<Long>) value); break; case FLOAT64: builder.set(fieldName).toFloat64Array((Iterable<Double>) value); break; case NUMERIC: builder.set(fieldName).toNumericArray((Iterable<BigDecimal>) value); break; case STRING: builder.set(fieldName).toStringArray((Iterable<String>) value); break; case JSON: builder.set(fieldName).toJsonArray((Iterable<String>) value); break; case BYTES: builder.set(fieldName).toBytesArray((Iterable<ByteArray>) value); break; case TIMESTAMP: builder.set(fieldName).toTimestampArray((Iterable<Timestamp>) value); break; case DATE: builder.set(fieldName).toDateArray((Iterable<Date>) value); break; case STRUCT: builder .set(fieldName) .toStructArray(fieldType.getArrayElementType(), (Iterable<Struct>) value); break; default: throw new AssertionError( "Unhandled array type code: " + fieldType.getArrayElementType()); } break; case STRUCT: if (value == null) { builder.set(fieldName).to(fieldType, null); } else { builder.set(fieldName).to((Struct) value); } break; default: throw new AssertionError("Unhandled type code: " + fieldType.getCode()); } } return builder.build(); } GrpcStruct(Type type, List<Object> rowData) { this.type = type; this.rowData = rowData; } @Override public String toString() { return this.rowData.toString(); } boolean consumeRow(Iterator<com.google.protobuf.Value> iterator) { rowData.clear(); if (!iterator.hasNext()) { return false; } for (Type.StructField fieldType : getType().getStructFields()) { if (!iterator.hasNext()) { throw newSpannerException( ErrorCode.INTERNAL, "Invalid value stream: end of stream reached before row is complete"); } com.google.protobuf.Value value = iterator.next(); rowData.add(decodeValue(fieldType.getType(), value)); } return true; } private static Object decodeValue(Type fieldType, com.google.protobuf.Value proto) { if (proto.getKindCase() == KindCase.NULL_VALUE) { return null; } switch (fieldType.getCode()) { case BOOL: checkType(fieldType, proto, KindCase.BOOL_VALUE); return proto.getBoolValue(); case INT64: checkType(fieldType, proto, KindCase.STRING_VALUE); return Long.parseLong(proto.getStringValue()); case FLOAT64: return valueProtoToFloat64(proto); case NUMERIC: return new BigDecimal(proto.getStringValue()); case STRING: case JSON: checkType(fieldType, proto, KindCase.STRING_VALUE); return proto.getStringValue(); case BYTES: checkType(fieldType, proto, KindCase.STRING_VALUE); return ByteArray.fromBase64(proto.getStringValue()); case TIMESTAMP: checkType(fieldType, proto, KindCase.STRING_VALUE); return Timestamp.parseTimestamp(proto.getStringValue()); case DATE: checkType(fieldType, proto, KindCase.STRING_VALUE); return Date.parseDate(proto.getStringValue()); case ARRAY: checkType(fieldType, proto, KindCase.LIST_VALUE); ListValue listValue = proto.getListValue(); return decodeArrayValue(fieldType.getArrayElementType(), listValue); case STRUCT: checkType(fieldType, proto, KindCase.LIST_VALUE); ListValue structValue = proto.getListValue(); return decodeStructValue(fieldType, structValue); default: throw new AssertionError("Unhandled type code: " + fieldType.getCode()); } } private static Struct decodeStructValue(Type structType, ListValue structValue) { List<Type.StructField> fieldTypes = structType.getStructFields(); checkArgument( structValue.getValuesCount() == fieldTypes.size(), "Size mismatch between type descriptor and actual values."); List<Object> fields = new ArrayList<>(fieldTypes.size()); List<com.google.protobuf.Value> fieldValues = structValue.getValuesList(); for (int i = 0; i < fieldTypes.size(); ++i) { fields.add(decodeValue(fieldTypes.get(i).getType(), fieldValues.get(i))); } return new GrpcStruct(structType, fields); } static Object decodeArrayValue(Type elementType, ListValue listValue) { switch (elementType.getCode()) { case BOOL: // Use a view: element conversion is virtually free. return Lists.transform( listValue.getValuesList(), input -> input.getKindCase() == KindCase.NULL_VALUE ? null : input.getBoolValue()); case INT64: // For int64/float64 types, use custom containers. These avoid wrapper object // creation for non-null arrays. return new Int64Array(listValue); case FLOAT64: return new Float64Array(listValue); case NUMERIC: { // Materialize list: element conversion is expensive and should happen only once. ArrayList<Object> list = new ArrayList<>(listValue.getValuesCount()); for (com.google.protobuf.Value value : listValue.getValuesList()) { list.add( value.getKindCase() == KindCase.NULL_VALUE ? null : new BigDecimal(value.getStringValue())); } return list; } case STRING: case JSON: return Lists.transform( listValue.getValuesList(), input -> input.getKindCase() == KindCase.NULL_VALUE ? null : input.getStringValue()); case BYTES: { // Materialize list: element conversion is expensive and should happen only once. ArrayList<Object> list = new ArrayList<>(listValue.getValuesCount()); for (com.google.protobuf.Value value : listValue.getValuesList()) { list.add( value.getKindCase() == KindCase.NULL_VALUE ? null : ByteArray.fromBase64(value.getStringValue())); } return list; } case TIMESTAMP: { // Materialize list: element conversion is expensive and should happen only once. ArrayList<Object> list = new ArrayList<>(listValue.getValuesCount()); for (com.google.protobuf.Value value : listValue.getValuesList()) { list.add( value.getKindCase() == KindCase.NULL_VALUE ? null : Timestamp.parseTimestamp(value.getStringValue())); } return list; } case DATE: { // Materialize list: element conversion is expensive and should happen only once. ArrayList<Object> list = new ArrayList<>(listValue.getValuesCount()); for (com.google.protobuf.Value value : listValue.getValuesList()) { list.add( value.getKindCase() == KindCase.NULL_VALUE ? null : Date.parseDate(value.getStringValue())); } return list; } case STRUCT: { ArrayList<Struct> list = new ArrayList<>(listValue.getValuesCount()); for (com.google.protobuf.Value value : listValue.getValuesList()) { if (value.getKindCase() == KindCase.NULL_VALUE) { list.add(null); } else { ListValue structValue = value.getListValue(); list.add(decodeStructValue(elementType, structValue)); } } return list; } default: throw new AssertionError("Unhandled type code: " + elementType.getCode()); } } private static void checkType( Type fieldType, com.google.protobuf.Value proto, KindCase expected) { if (proto.getKindCase() != expected) { throw newSpannerException( ErrorCode.INTERNAL, "Invalid value for column type " + fieldType + " expected " + expected + " but was " + proto.getKindCase()); } } Struct immutableCopy() { return new GrpcStruct(type, new ArrayList<>(rowData)); } @Override public Type getType() { return type; } @Override public boolean isNull(int columnIndex) { return rowData.get(columnIndex) == null; } @Override protected boolean getBooleanInternal(int columnIndex) { return (Boolean) rowData.get(columnIndex); } @Override protected long getLongInternal(int columnIndex) { return (Long) rowData.get(columnIndex); } @Override protected double getDoubleInternal(int columnIndex) { return (Double) rowData.get(columnIndex); } @Override protected BigDecimal getBigDecimalInternal(int columnIndex) { return (BigDecimal) rowData.get(columnIndex); } @Override protected String getStringInternal(int columnIndex) { return (String) rowData.get(columnIndex); } @Override protected String getJsonInternal(int columnIndex) { return (String) rowData.get(columnIndex); } @Override protected ByteArray getBytesInternal(int columnIndex) { return (ByteArray) rowData.get(columnIndex); } @Override protected Timestamp getTimestampInternal(int columnIndex) { return (Timestamp) rowData.get(columnIndex); } @Override protected Date getDateInternal(int columnIndex) { return (Date) rowData.get(columnIndex); } @Override protected Value getValueInternal(int columnIndex) { final List<Type.StructField> structFields = getType().getStructFields(); final StructField structField = structFields.get(columnIndex); final Type columnType = structField.getType(); final boolean isNull = rowData.get(columnIndex) == null; switch (columnType.getCode()) { case BOOL: return Value.bool(isNull ? null : getBooleanInternal(columnIndex)); case INT64: return Value.int64(isNull ? null : getLongInternal(columnIndex)); case NUMERIC: return Value.numeric(isNull ? null : getBigDecimalInternal(columnIndex)); case FLOAT64: return Value.float64(isNull ? null : getDoubleInternal(columnIndex)); case STRING: return Value.string(isNull ? null : getStringInternal(columnIndex)); case BYTES: return Value.bytes(isNull ? null : getBytesInternal(columnIndex)); case TIMESTAMP: return Value.timestamp(isNull ? null : getTimestampInternal(columnIndex)); case DATE: return Value.date(isNull ? null : getDateInternal(columnIndex)); case STRUCT: return Value.struct(isNull ? null : getStructInternal(columnIndex)); case ARRAY: switch (columnType.getArrayElementType().getCode()) { case BOOL: return Value.boolArray(isNull ? null : getBooleanListInternal(columnIndex)); case INT64: return Value.int64Array(isNull ? null : getLongListInternal(columnIndex)); case NUMERIC: return Value.numericArray(isNull ? null : getBigDecimalListInternal(columnIndex)); case FLOAT64: return Value.float64Array(isNull ? null : getDoubleListInternal(columnIndex)); case STRING: return Value.stringArray(isNull ? null : getStringListInternal(columnIndex)); case BYTES: return Value.bytesArray(isNull ? null : getBytesListInternal(columnIndex)); case TIMESTAMP: return Value.timestampArray(isNull ? null : getTimestampListInternal(columnIndex)); case DATE: return Value.dateArray(isNull ? null : getDateListInternal(columnIndex)); case STRUCT: return Value.structArray( columnType.getArrayElementType(), isNull ? null : getStructListInternal(columnIndex)); default: throw new IllegalArgumentException( "Invalid array value type " + this.type.getArrayElementType()); } default: throw new IllegalArgumentException("Invalid value type " + this.type); } } @Override protected Struct getStructInternal(int columnIndex) { return (Struct) rowData.get(columnIndex); } @Override protected boolean[] getBooleanArrayInternal(int columnIndex) { @SuppressWarnings("unchecked") // We know ARRAY<BOOL> produces a List<Boolean>. List<Boolean> values = (List<Boolean>) rowData.get(columnIndex); boolean[] r = new boolean[values.size()]; for (int i = 0; i < values.size(); ++i) { if (values.get(i) == null) { throw throwNotNull(columnIndex); } r[i] = values.get(i); } return r; } @Override @SuppressWarnings("unchecked") // We know ARRAY<BOOL> produces a List<Boolean>. protected List<Boolean> getBooleanListInternal(int columnIndex) { return Collections.unmodifiableList((List<Boolean>) rowData.get(columnIndex)); } @Override protected long[] getLongArrayInternal(int columnIndex) { return getLongListInternal(columnIndex).toPrimitiveArray(columnIndex); } @Override protected Int64Array getLongListInternal(int columnIndex) { return (Int64Array) rowData.get(columnIndex); } @Override protected double[] getDoubleArrayInternal(int columnIndex) { return getDoubleListInternal(columnIndex).toPrimitiveArray(columnIndex); } @Override protected Float64Array getDoubleListInternal(int columnIndex) { return (Float64Array) rowData.get(columnIndex); } @Override @SuppressWarnings("unchecked") // We know ARRAY<NUMERIC> produces a List<BigDecimal>. protected List<BigDecimal> getBigDecimalListInternal(int columnIndex) { return (List<BigDecimal>) rowData.get(columnIndex); } @Override @SuppressWarnings("unchecked") // We know ARRAY<STRING> produces a List<String>. protected List<String> getStringListInternal(int columnIndex) { return Collections.unmodifiableList((List<String>) rowData.get(columnIndex)); } @Override @SuppressWarnings("unchecked") // We know ARRAY<String> produces a List<String>. protected List<String> getJsonListInternal(int columnIndex) { return Collections.unmodifiableList((List<String>) rowData.get(columnIndex)); } @Override @SuppressWarnings("unchecked") // We know ARRAY<BYTES> produces a List<ByteArray>. protected List<ByteArray> getBytesListInternal(int columnIndex) { return Collections.unmodifiableList((List<ByteArray>) rowData.get(columnIndex)); } @Override @SuppressWarnings("unchecked") // We know ARRAY<TIMESTAMP> produces a List<Timestamp>. protected List<Timestamp> getTimestampListInternal(int columnIndex) { return Collections.unmodifiableList((List<Timestamp>) rowData.get(columnIndex)); } @Override @SuppressWarnings("unchecked") // We know ARRAY<DATE> produces a List<Date>. protected List<Date> getDateListInternal(int columnIndex) { return Collections.unmodifiableList((List<Date>) rowData.get(columnIndex)); } @Override @SuppressWarnings("unchecked") // We know ARRAY<STRUCT<...>> produces a List<STRUCT>. protected List<Struct> getStructListInternal(int columnIndex) { return Collections.unmodifiableList((List<Struct>) rowData.get(columnIndex)); } } @VisibleForTesting interface CloseableIterator<T> extends Iterator<T> { /** * Closes the iterator, freeing any underlying resources. * * @param message a message to include in the final RPC status */ void close(@Nullable String message); boolean isWithBeginTransaction(); } /** Adapts a streaming read/query call into an iterator over partial result sets. */ @VisibleForTesting static class GrpcStreamIterator extends AbstractIterator<PartialResultSet> implements CloseableIterator<PartialResultSet> { private static final Logger logger = Logger.getLogger(GrpcStreamIterator.class.getName()); private static final PartialResultSet END_OF_STREAM = PartialResultSet.newBuilder().build(); private final ConsumerImpl consumer = new ConsumerImpl(); private final BlockingQueue<PartialResultSet> stream; private final Statement statement; private SpannerRpc.StreamingCall call; private volatile boolean withBeginTransaction; private SpannerException error; @VisibleForTesting GrpcStreamIterator(int prefetchChunks) { this(null, prefetchChunks); } @VisibleForTesting GrpcStreamIterator(Statement statement, int prefetchChunks) { this.statement = statement; // One extra to allow for END_OF_STREAM message. this.stream = new LinkedBlockingQueue<>(prefetchChunks + 1); } protected final SpannerRpc.ResultStreamConsumer consumer() { return consumer; } public void setCall(SpannerRpc.StreamingCall call, boolean withBeginTransaction) { this.call = call; this.withBeginTransaction = withBeginTransaction; } @Override public void close(@Nullable String message) { if (call != null) { call.cancel(message); } } @Override public boolean isWithBeginTransaction() { return withBeginTransaction; } @Override protected final PartialResultSet computeNext() { PartialResultSet next; try { // TODO: Ideally honor io.grpc.Context while blocking here. In practice, // cancellation/deadline results in an error being delivered to "stream", which // should mean that we do not block significantly longer afterwards, but it would // be more robust to use poll() with a timeout. next = stream.take(); } catch (InterruptedException e) { // Treat interrupt as a request to cancel the read. throw SpannerExceptionFactory.propagateInterrupt(e); } if (next != END_OF_STREAM) { call.request(1); return next; } // All done - close() no longer needs to cancel the call. call = null; if (error != null) { throw SpannerExceptionFactory.newSpannerException(error); } endOfData(); return null; } private void addToStream(PartialResultSet results) { // We assume that nothing from the user will interrupt gRPC event threads. Uninterruptibles.putUninterruptibly(stream, results); } private class ConsumerImpl implements SpannerRpc.ResultStreamConsumer { @Override public void onPartialResultSet(PartialResultSet results) { addToStream(results); } @Override public void onCompleted() { addToStream(END_OF_STREAM); } @Override public void onError(SpannerException e) { if (statement != null) { if (logger.isLoggable(Level.FINEST)) { // Include parameter values if logging level is set to FINEST or higher. e = SpannerExceptionFactory.newSpannerExceptionPreformatted( e.getErrorCode(), String.format("%s - Statement: '%s'", e.getMessage(), statement.toString()), e); logger.log(Level.FINEST, "Error executing statement", e); } else { e = SpannerExceptionFactory.newSpannerExceptionPreformatted( e.getErrorCode(), String.format("%s - Statement: '%s'", e.getMessage(), statement.getSql()), e); } } error = e; addToStream(END_OF_STREAM); } } } /** * Wraps an iterator over partial result sets, supporting resuming RPCs on error. This class keeps * track of the most recent resume token seen, and will buffer partial result set chunks that do * not have a resume token until one is seen or buffer space is exceeded, which reduces the chance * of yielding data to the caller that cannot be resumed. */ @VisibleForTesting abstract static class ResumableStreamIterator extends AbstractIterator<PartialResultSet> implements CloseableIterator<PartialResultSet> { private static final RetrySettings STREAMING_RETRY_SETTINGS = SpannerStubSettings.newBuilder().executeStreamingSqlSettings().getRetrySettings(); private static final Logger logger = Logger.getLogger(ResumableStreamIterator.class.getName()); private final BackOff backOff = newBackOff(); private final LinkedList<PartialResultSet> buffer = new LinkedList<>(); private final int maxBufferSize; private final Span span; private CloseableIterator<PartialResultSet> stream; private ByteString resumeToken; private boolean finished; /** * Indicates whether it is currently safe to retry RPCs. This will be {@code false} if we have * reached the maximum buffer size without seeing a restart token; in this case, we will drain * the buffer and remain in this state until we see a new restart token. */ private boolean safeToRetry = true; protected ResumableStreamIterator(int maxBufferSize, String streamName, Span parent) { checkArgument(maxBufferSize >= 0); this.maxBufferSize = maxBufferSize; this.span = tracer.spanBuilderWithExplicitParent(streamName, parent).startSpan(); } private static ExponentialBackOff newBackOff() { return new ExponentialBackOff.Builder() .setMultiplier(STREAMING_RETRY_SETTINGS.getRetryDelayMultiplier()) .setInitialIntervalMillis( Math.max(10, (int) STREAMING_RETRY_SETTINGS.getInitialRetryDelay().toMillis())) .setMaxIntervalMillis( Math.max(1000, (int) STREAMING_RETRY_SETTINGS.getMaxRetryDelay().toMillis())) .setMaxElapsedTimeMillis(Integer.MAX_VALUE) // Prevent Backoff.STOP from getting returned. .build(); } private static void backoffSleep(Context context, BackOff backoff) throws SpannerException { backoffSleep(context, nextBackOffMillis(backoff)); } private static long nextBackOffMillis(BackOff backoff) throws SpannerException { try { return backoff.nextBackOffMillis(); } catch (IOException e) { throw newSpannerException(ErrorCode.INTERNAL, e.getMessage(), e); } } private static void backoffSleep(Context context, long backoffMillis) throws SpannerException { tracer .getCurrentSpan() .addAnnotation( "Backing off", ImmutableMap.of("Delay", AttributeValue.longAttributeValue(backoffMillis))); final CountDownLatch latch = new CountDownLatch(1); final Context.CancellationListener listener = ignored -> { // Wakeup on cancellation / DEADLINE_EXCEEDED. latch.countDown(); }; context.addListener(listener, DirectExecutor.INSTANCE); try { if (backoffMillis == BackOff.STOP) { // Highly unlikely but we handle it just in case. backoffMillis = STREAMING_RETRY_SETTINGS.getMaxRetryDelay().toMillis(); } if (latch.await(backoffMillis, TimeUnit.MILLISECONDS)) { // Woken by context cancellation. throw newSpannerExceptionForCancellation(context, null); } } catch (InterruptedException interruptExcept) { throw newSpannerExceptionForCancellation(context, interruptExcept); } finally { context.removeListener(listener); } } private enum DirectExecutor implements Executor { INSTANCE; @Override public void execute(Runnable command) { command.run(); } } abstract CloseableIterator<PartialResultSet> startStream(@Nullable ByteString resumeToken); @Override public void close(@Nullable String message) { if (stream != null) { stream.close(message); span.end(TraceUtil.END_SPAN_OPTIONS); stream = null; } } @Override public boolean isWithBeginTransaction() { return stream != null && stream.isWithBeginTransaction(); } @Override protected PartialResultSet computeNext() { Context context = Context.current(); while (true) { // Eagerly start stream before consuming any buffered items. if (stream == null) { span.addAnnotation( "Starting/Resuming stream", ImmutableMap.of( "ResumeToken", AttributeValue.stringAttributeValue( resumeToken == null ? "null" : resumeToken.toStringUtf8()))); try (Scope s = tracer.withSpan(span)) { // When start a new stream set the Span as current to make the gRPC Span a child of // this Span. stream = checkNotNull(startStream(resumeToken)); } } // Buffer contains items up to a resume token or has reached capacity: flush. if (!buffer.isEmpty() && (finished || !safeToRetry || !buffer.getLast().getResumeToken().isEmpty())) { return buffer.pop(); } try { if (stream.hasNext()) { PartialResultSet next = stream.next(); boolean hasResumeToken = !next.getResumeToken().isEmpty(); if (hasResumeToken) { resumeToken = next.getResumeToken(); safeToRetry = true; } // If the buffer is empty and this chunk has a resume token or we cannot resume safely // anyway, we can yield it immediately rather than placing it in the buffer to be // returned on the next iteration. if ((hasResumeToken || !safeToRetry) && buffer.isEmpty()) { return next; } buffer.add(next); if (buffer.size() > maxBufferSize && buffer.getLast().getResumeToken().isEmpty()) { // We need to flush without a restart token. Errors encountered until we see // such a token will fail the read. safeToRetry = false; } } else { finished = true; if (buffer.isEmpty()) { endOfData(); return null; } } } catch (SpannerException e) { if (safeToRetry && e.isRetryable()) { span.addAnnotation( "Stream broken. Safe to retry", TraceUtil.getExceptionAnnotations(e)); logger.log(Level.FINE, "Retryable exception, will sleep and retry", e); // Truncate any items in the buffer before the last retry token. while (!buffer.isEmpty() && buffer.getLast().getResumeToken().isEmpty()) { buffer.removeLast(); } assert buffer.isEmpty() || buffer.getLast().getResumeToken().equals(resumeToken); stream = null; try (Scope s = tracer.withSpan(span)) { long delay = e.getRetryDelayInMillis(); if (delay != -1) { backoffSleep(context, delay); } else { backoffSleep(context, backOff); } } continue; } span.addAnnotation("Stream broken. Not safe to retry"); TraceUtil.setWithFailure(span, e); throw e; } catch (RuntimeException e) { span.addAnnotation("Stream broken. Not safe to retry"); TraceUtil.setWithFailure(span, e); throw e; } } } } static double valueProtoToFloat64(com.google.protobuf.Value proto) { if (proto.getKindCase() == KindCase.STRING_VALUE) { switch (proto.getStringValue()) { case "-Infinity": return Double.NEGATIVE_INFINITY; case "Infinity": return Double.POSITIVE_INFINITY; case "NaN": return Double.NaN; default: // Fall-through to handling below to produce an error. } } if (proto.getKindCase() != KindCase.NUMBER_VALUE) { throw newSpannerException( ErrorCode.INTERNAL, "Invalid value for column type " + Type.float64() + " expected NUMBER_VALUE or STRING_VALUE with value one of" + " \"Infinity\", \"-Infinity\", or \"NaN\" but was " + proto.getKindCase() + (proto.getKindCase() == KindCase.STRING_VALUE ? " with value \"" + proto.getStringValue() + "\"" : "")); } return proto.getNumberValue(); } static NullPointerException throwNotNull(int columnIndex) { throw new NullPointerException( "Cannot call array getter for column " + columnIndex + " with null elements"); } /** * Memory-optimized base class for {@code ARRAY<INT64>} and {@code ARRAY<FLOAT64>} types. Both of * these involve conversions from the type yielded by JSON parsing, which are {@code String} and * {@code BigDecimal} respectively. Rather than construct new wrapper objects for each array * element, we use primitive arrays and a {@code BitSet} to track nulls. */ abstract static class PrimitiveArray<T, A> extends AbstractList<T> { private final A data; private final BitSet nulls; private final int size; PrimitiveArray(ListValue protoList) { this.size = protoList.getValuesCount(); A data = newArray(size); BitSet nulls = new BitSet(size); for (int i = 0; i < protoList.getValuesCount(); ++i) { if (protoList.getValues(i).getKindCase() == KindCase.NULL_VALUE) { nulls.set(i); } else { setProto(data, i, protoList.getValues(i)); } } this.data = data; this.nulls = nulls; } PrimitiveArray(A data, BitSet nulls, int size) { this.data = data; this.nulls = nulls; this.size = size; } abstract A newArray(int size); abstract void setProto(A array, int i, com.google.protobuf.Value protoValue); abstract T get(A array, int i); @Override public T get(int index) { if (index < 0 || index >= size) { throw new ArrayIndexOutOfBoundsException("index=" + index + " size=" + size); } return nulls.get(index) ? null : get(data, index); } @Override public int size() { return size; } A toPrimitiveArray(int columnIndex) { if (nulls.length() > 0) { throw throwNotNull(columnIndex); } A r = newArray(size); System.arraycopy(data, 0, r, 0, size); return r; } } static class Int64Array extends PrimitiveArray<Long, long[]> { Int64Array(ListValue protoList) { super(protoList); } Int64Array(long[] data, BitSet nulls) { super(data, nulls, data.length); } @Override long[] newArray(int size) { return new long[size]; } @Override void setProto(long[] array, int i, com.google.protobuf.Value protoValue) { array[i] = Long.parseLong(protoValue.getStringValue()); } @Override Long get(long[] array, int i) { return array[i]; } } static class Float64Array extends PrimitiveArray<Double, double[]> { Float64Array(ListValue protoList) { super(protoList); } Float64Array(double[] data, BitSet nulls) { super(data, nulls, data.length); } @Override double[] newArray(int size) { return new double[size]; } @Override void setProto(double[] array, int i, com.google.protobuf.Value protoValue) { array[i] = valueProtoToFloat64(protoValue); } @Override Double get(double[] array, int i) { return array[i]; } } protected abstract GrpcStruct currRow(); @Override public Struct getCurrentRowAsStruct() { return currRow().immutableCopy(); } @Override protected boolean getBooleanInternal(int columnIndex) { return currRow().getBooleanInternal(columnIndex); } @Override protected long getLongInternal(int columnIndex) { return currRow().getLongInternal(columnIndex); } @Override protected double getDoubleInternal(int columnIndex) { return currRow().getDoubleInternal(columnIndex); } @Override protected BigDecimal getBigDecimalInternal(int columnIndex) { return currRow().getBigDecimalInternal(columnIndex); } @Override protected String getStringInternal(int columnIndex) { return currRow().getStringInternal(columnIndex); } @Override protected String getJsonInternal(int columnIndex) { return currRow().getJsonInternal(columnIndex); } @Override protected ByteArray getBytesInternal(int columnIndex) { return currRow().getBytesInternal(columnIndex); } @Override protected Timestamp getTimestampInternal(int columnIndex) { return currRow().getTimestampInternal(columnIndex); } @Override protected Date getDateInternal(int columnIndex) { return currRow().getDateInternal(columnIndex); } @Override protected Value getValueInternal(int columnIndex) { return currRow().getValueInternal(columnIndex); } @Override protected boolean[] getBooleanArrayInternal(int columnIndex) { return currRow().getBooleanArrayInternal(columnIndex); } @Override protected List<Boolean> getBooleanListInternal(int columnIndex) { return currRow().getBooleanListInternal(columnIndex); } @Override protected long[] getLongArrayInternal(int columnIndex) { return currRow().getLongArrayInternal(columnIndex); } @Override protected List<Long> getLongListInternal(int columnIndex) { return currRow().getLongListInternal(columnIndex); } @Override protected double[] getDoubleArrayInternal(int columnIndex) { return currRow().getDoubleArrayInternal(columnIndex); } @Override protected List<Double> getDoubleListInternal(int columnIndex) { return currRow().getDoubleListInternal(columnIndex); } @Override protected List<BigDecimal> getBigDecimalListInternal(int columnIndex) { return currRow().getBigDecimalListInternal(columnIndex); } @Override protected List<String> getStringListInternal(int columnIndex) { return currRow().getStringListInternal(columnIndex); } @Override protected List<String> getJsonListInternal(int columnIndex) { return currRow().getJsonListInternal(columnIndex); } @Override protected List<ByteArray> getBytesListInternal(int columnIndex) { return currRow().getBytesListInternal(columnIndex); } @Override protected List<Timestamp> getTimestampListInternal(int columnIndex) { return currRow().getTimestampListInternal(columnIndex); } @Override protected List<Date> getDateListInternal(int columnIndex) { return currRow().getDateListInternal(columnIndex); } @Override protected List<Struct> getStructListInternal(int columnIndex) { return currRow().getStructListInternal(columnIndex); } @Override public boolean isNull(int columnIndex) { return currRow().isNull(columnIndex); } }
looker-open-source/java-spanner
google-cloud-spanner/src/main/java/com/google/cloud/spanner/AbstractResultSet.java
Java
apache-2.0
50,481
/* * Copyright 2010 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.jscomp.SpecializeModule.SpecializationState; import com.google.javascript.rhino.Node; /** * Tests for {@link SpecializeModule}. * * @author dcc@google.com (Devin Coughlin) */ public class SpecializeModuleTest extends CompilerTestCase { private static final String SHARED_EXTERNS = "var alert = function() {}"; public SpecializeModuleTest() { super(SHARED_EXTERNS); } private PassFactory inlineFunctions = new PassFactory("inlineFunctions", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new InlineFunctions(compiler, compiler.getUniqueNameIdSupplier(), true, false, true); } }; private PassFactory removeUnusedPrototypeProperties = new PassFactory("removeUnusedPrototypeProperties", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new RemoveUnusedPrototypeProperties(compiler, false, false); } }; private PassFactory devirtualizePrototypeMethods = new PassFactory("devirtualizePrototypeMethods", true) { @Override protected CompilerPass createInternal(AbstractCompiler compiler) { return new DevirtualizePrototypeMethods(compiler); } }; @Override protected CompilerPass getProcessor(final Compiler compiler) { final SpecializeModule specializeModule = new SpecializeModule(compiler, devirtualizePrototypeMethods, inlineFunctions, removeUnusedPrototypeProperties); return new CompilerPass() { public void process(Node externs, Node root) { specializeModule.process(externs, root); /* Make sure variables are declared before used */ new VarCheck(compiler).process(externs, root); } }; } @Override public void setUp() throws Exception { super.setUp(); enableNormalize(); } public void testSpecializeInline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return 6};" + "A();", // m2 "A();" + "B();" + "B = function() {return 7};" + "A();" + "B();" ); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "A();" + "B();" + "B = function() {return 7};" + "A();" + "B();" }); } public void testSpecializeCascadedInline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return C()};" + "var C = function() {return 6};" + "A();", // m2 "B = function() {return 7};" + "A();"); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B, C;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return C()};" + /* Removed from m1, so add to m2 */ "C = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "A();" }); } public void testSpecializeInlineWithMultipleDependents() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B());A()};" + "var B = function() {return 6};" + "A();", // m2 "B = function() {return 7};" + "A();", // m3 "A();" ); test(modules, new String[] { // m1 "var A = function() {alert(6);A()};" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "A();", "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "A();", }); } public void testSpecializeInlineWithNamespaces() { JSModule[] modules = createModuleStar( // m1 "var ns = {};" + /* Recursion in A() prevents inline of A*/ "ns.A = function() {alert(B());ns.A()};" + "var B = function() {return 6};" + "ns.A();", // m2 "B = function() {return 7};" + "ns.A();"); test(modules, new String[] { // m1 "var ns = {};" + "ns.A = function() {alert(6);ns.A()};" + /* Specialized A */ "ns.A();" + "var B;", // m2 "ns.A = function() {alert(B());ns.A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "B = function() {return 7};" + "ns.A();" }); } public void testSpecializeInlineWithRegularFunctions() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "A();", // m2 "B = function() {return 7};" + "A();"); test(modules, new String[] { // m1 "function A() {alert(6);A()}" + /* Specialized A */ "A();" + "var B;", // m2 "A = function() {alert(B());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ /* Start of original m2 */ "B = function() {return 7};" + "A();" }); } public void testDontSpecializeLocalNonAnonymousFunctions() { /* normalize result, but not expected */ enableNormalize(false); JSModule[] modules = createModuleStar( // m1 "(function(){var noSpecialize = " + "function() {alert(6)};noSpecialize()})()", // m2 ""); test(modules, new String[] { // m1 "(function(){var noSpecialize = " + "function() {alert(6)};noSpecialize()})()", // m2 "" }); } public void testAddDummyVarsForRemovedFunctions() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var A = function() {alert(B() + C());A()};" + "var B = function() {return 6};" + "var C = function() {return 8};" + "A();", // m2 "" + "A();"); test(modules, new String[] { // m1 "var A = function() {alert(6 + 8);A()};" + /* Specialized A */ "A();" + "var B, C;", // m2 "A = function() {alert(B() + C());A()};" + /* Unspecialized A */ "B = function() {return 6};" + /* Removed from m1, so add to m2 */ "C = function() {return 8};" + /* Removed from m1, so add to m2 */ "A();" }); } public void testSpecializeRemoveUnusedProperties() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "Foo.prototype.c = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "var aliasA = Foo.prototype.a;" + "var x = new Foo();" + "x.a();", // m2 "Foo.prototype.b = function() {return 6};" + "Foo.prototype.c = function() {return 7};" }); } public void testDontSpecializeAliasedFunctions_inline() { JSModule[] modules = createModuleStar( // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "var aliasA = A;" + "A();", // m2 "B = function() {return 7};" + "B();"); test(modules, new String[] { // m1 /* Recursion in A() prevents inline of A*/ "function A() {alert(B());A()}" + "function B() {return 6}" + "var aliasA = A;" + "A();", // m2 "B = function() {return 7};" + "B();" }); } public void testDontSpecializeAliasedFunctions_remove_unused_properties() { JSModule[] modules = createModuleStar( // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "var aliasB = Foo.prototype.b;" + "Foo.prototype.c = function() {return 7};" + "Foo.prototype.d = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();" + "var aliasC = (new Foo).c", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a()};" + "Foo.prototype.b = function() {return 6};" + "var aliasB = Foo.prototype.b;" + "Foo.prototype.c = function() {return 7};" + "var aliasA = Foo.prototype.a;" + // Prevents devirtualization of a "var x = new Foo();" + "x.a();" + "var aliasC = (new Foo).c", // m2 "Foo.prototype.d = function() {return 7};" }); } public void testSpecializeDevirtualizePrototypeMethods() { JSModule[] modules = createModuleStar( // m1 "/** @constructor */" + "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {this.a();return 7};" + "Foo.prototype.b = function() {this.a()};" + "var x = new Foo();" + "x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "var JSCompiler_StaticMethods_a =" + "function(JSCompiler_StaticMethods_a$self) {" + "JSCompiler_StaticMethods_a(JSCompiler_StaticMethods_a$self);" + "return 7" + "};" + "var x = new Foo();" + "JSCompiler_StaticMethods_a(x);", // m2 "Foo.prototype.a = function() {this.a();return 7};" + "Foo.prototype.b = function() {this.a()};" }); } public void testSpecializeDevirtualizePrototypeMethodsWithInline() { JSModule[] modules = createModuleStar( // m1 "/** @constructor */" + "var Foo = function(){};" + /* constructor */ "Foo.prototype.a = function() {return 7};" + "var x = new Foo();" + "var z = x.a();", // m2 ""); test(modules, new String[] { // m1 "var Foo = function(){};" + /* constructor */ "var x = new Foo();" + "var z = 7;", // m2 "Foo.prototype.a = function() {return 7};" }); } /** * Tests for {@link SpecializeModule.SpecializationState}. */ public static class SpecializeModuleSpecializationStateTest extends CompilerTestCase { Compiler lastCompiler; SpecializationState lastState; @Override public CompilerPass getProcessor(final Compiler compiler) { lastCompiler = compiler; return new CompilerPass() { public void process(Node externs, Node root) { SimpleDefinitionFinder defFinder = new SimpleDefinitionFinder(compiler); defFinder.process(externs, root); SimpleFunctionAliasAnalysis functionAliasAnalysis = new SimpleFunctionAliasAnalysis(); functionAliasAnalysis.analyze(defFinder); lastState = new SpecializationState(functionAliasAnalysis); } }; } public void testRemovedFunctions() { testSame("function F(){}\nvar G = function(a){};"); assertEquals(ImmutableSet.of(), lastState.getRemovedFunctions()); Node functionF = findFunction("F"); lastState.reportRemovedFunction(functionF, functionF.getParent()); assertEquals(ImmutableSet.of(functionF), lastState.getRemovedFunctions()); Node functionG = findFunction("F"); lastState.reportRemovedFunction(functionG, functionF.getParent()); assertEquals(ImmutableSet.of(functionF, functionG), lastState.getRemovedFunctions()); assertEquals(ImmutableSet.of(), lastState.getSpecializedFunctions()); } public void testSpecializedFunctions() { testSame("function F(){}\nvar G = function(a){};"); assertEquals(ImmutableSet.of(), lastState.getSpecializedFunctions()); Node functionF = findFunction("F"); lastState.reportSpecializedFunction(functionF); assertEquals(ImmutableSet.of(functionF), lastState.getSpecializedFunctions()); Node functionG = findFunction("F"); lastState.reportSpecializedFunction(functionG); assertEquals(ImmutableSet.of(functionF, functionG), lastState.getSpecializedFunctions()); assertEquals(ImmutableSet.of(), lastState.getRemovedFunctions()); } public void testCanFixupFunction() { testSame("function F(){}\n" + "var G = function(a){};\n" + "var ns = {};" + "ns.H = function(){};" + "var ns2 = {I : function anon1(){}};" + "(function anon2(){})();"); assertTrue(lastState.canFixupFunction(findFunction("F"))); assertTrue(lastState.canFixupFunction(findFunction("G"))); assertTrue(lastState.canFixupFunction(findFunction("ns.H"))); assertFalse(lastState.canFixupFunction(findFunction("anon1"))); assertFalse(lastState.canFixupFunction(findFunction("anon2"))); // Can't guarantee safe fixup for aliased functions testSame("function A(){}\n" + "var aliasA = A;\n"); assertFalse(lastState.canFixupFunction(findFunction("A"))); } private Node findFunction(String name) { FunctionFinder f = new FunctionFinder(name); new NodeTraversal(lastCompiler, f).traverse(lastCompiler.jsRoot); assertNotNull("Couldn't find " + name, f.found); return f.found; } /** * Quick Traversal to find a given function in the AST. */ private class FunctionFinder extends AbstractPostOrderCallback { Node found = null; final String target; FunctionFinder(String target) { this.target = target; } public void visit(NodeTraversal t, Node n, Node parent) { if (NodeUtil.isFunction(n) && target.equals(NodeUtil.getFunctionName(n))) { found = n; } } } } }
JonathanWalsh/Granule-Closure-Compiler
test/com/google/javascript/jscomp/SpecializeModuleTest.java
Java
apache-2.0
16,009
package semver import ( "fmt" "strings" "github.com/blang/semver" "github.com/pivotal-cf/go-pivnet/v7/logger" ) type SemverConverter struct { logger logger.Logger } func NewSemverConverter(logger logger.Logger) *SemverConverter { return &SemverConverter{logger} } // ToValidSemver attempts to return the input as valid semver. // If the input fails to parse as semver, it appends .0 or .0.0 to the input and retries // If this is still not valid semver, it returns an error func (s SemverConverter) ToValidSemver(input string) (semver.Version, error) { v, err := semver.Parse(input) if err == nil { return v, nil } s.logger.Info(fmt.Sprintf( "failed to parse semver: '%s', appending zeros and trying again", input, )) maybeSemver := input segs := strings.SplitN(maybeSemver, ".", 3) switch len(segs) { case 2: maybeSemver += ".0" case 1: maybeSemver += ".0.0" } v, err = semver.Parse(maybeSemver) if err == nil { return v, nil } s.logger.Info(fmt.Sprintf( "still failed to parse semver: '%s', giving up", maybeSemver, )) return semver.Version{}, err }
pivotal-cf-experimental/pivnet-resource
semver/semver.go
GO
apache-2.0
1,101
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock import six import yaml from heat.common import config from heat.common import exception from heat.common import template_format from heat.tests.common import HeatTestCase from heat.tests import utils class JsonToYamlTest(HeatTestCase): def setUp(self): super(JsonToYamlTest, self).setUp() self.expected_test_count = 2 self.longMessage = True self.maxDiff = None def test_convert_all_templates(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'templates') template_test_count = 0 for (json_str, yml_str, file_name) in self.convert_all_json_to_yaml(path): self.compare_json_vs_yaml(json_str, yml_str, file_name) template_test_count += 1 if template_test_count >= self.expected_test_count: break self.assertTrue(template_test_count >= self.expected_test_count, 'Expected at least %d templates to be tested, not %d' % (self.expected_test_count, template_test_count)) def compare_json_vs_yaml(self, json_str, yml_str, file_name): yml = template_format.parse(yml_str) self.assertEqual(u'2012-12-12', yml[u'HeatTemplateFormatVersion'], file_name) self.assertFalse(u'AWSTemplateFormatVersion' in yml, file_name) del(yml[u'HeatTemplateFormatVersion']) jsn = template_format.parse(json_str) if u'AWSTemplateFormatVersion' in jsn: del(jsn[u'AWSTemplateFormatVersion']) self.assertEqual(yml, jsn, file_name) def convert_all_json_to_yaml(self, dirpath): for path in os.listdir(dirpath): if not path.endswith('.template') and not path.endswith('.json'): continue f = open(os.path.join(dirpath, path), 'r') json_str = f.read() yml_str = template_format.convert_json_to_yaml(json_str) yield (json_str, yml_str, f.name) class YamlMinimalTest(HeatTestCase): def _parse_template(self, tmpl_str, msg_str): parse_ex = self.assertRaises(ValueError, template_format.parse, tmpl_str) self.assertIn(msg_str, six.text_type(parse_ex)) def test_long_yaml(self): template = {'HeatTemplateFormatVersion': '2012-12-12'} config.cfg.CONF.set_override('max_template_size', 1024) template['Resources'] = ['a'] * (config.cfg.CONF.max_template_size / 3) limit = config.cfg.CONF.max_template_size long_yaml = yaml.safe_dump(template) self.assertTrue(len(long_yaml) > limit) ex = self.assertRaises(exception.RequestLimitExceeded, template_format.parse, long_yaml) msg = ('Request limit exceeded: Template exceeds maximum allowed size ' '(1024 bytes)') self.assertEqual(msg, six.text_type(ex)) def test_parse_no_version_format(self): yaml = '' self._parse_template(yaml, 'Template format version not found') yaml2 = '''Parameters: {} Mappings: {} Resources: {} Outputs: {} ''' self._parse_template(yaml2, 'Template format version not found') def test_parse_string_template(self): tmpl_str = 'just string' msg = 'The template is not a JSON object or YAML mapping.' self._parse_template(tmpl_str, msg) def test_parse_invalid_yaml_and_json_template(self): tmpl_str = '{test' msg = 'line 1, column 1' self._parse_template(tmpl_str, msg) def test_parse_json_document(self): tmpl_str = '["foo" , "bar"]' msg = 'The template is not a JSON object or YAML mapping.' self._parse_template(tmpl_str, msg) def test_parse_empty_json_template(self): tmpl_str = '{}' msg = 'Template format version not found' self._parse_template(tmpl_str, msg) def test_parse_yaml_template(self): tmpl_str = 'heat_template_version: 2013-05-23' expected = {'heat_template_version': '2013-05-23'} self.assertEqual(expected, template_format.parse(tmpl_str)) class YamlParseExceptions(HeatTestCase): scenarios = [ ('scanner', dict(raised_exception=yaml.scanner.ScannerError())), ('parser', dict(raised_exception=yaml.parser.ParserError())), ('reader', dict(raised_exception=yaml.reader.ReaderError('', '', '', '', ''))), ] def test_parse_to_value_exception(self): text = 'not important' with mock.patch.object(yaml, 'load') as yaml_loader: yaml_loader.side_effect = self.raised_exception self.assertRaises(ValueError, template_format.parse, text) class JsonYamlResolvedCompareTest(HeatTestCase): def setUp(self): super(JsonYamlResolvedCompareTest, self).setUp() self.longMessage = True self.maxDiff = None def load_template(self, file_name): filepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'templates', file_name) f = open(filepath) t = template_format.parse(f.read()) f.close() return t def compare_stacks(self, json_file, yaml_file, parameters): t1 = self.load_template(json_file) t2 = self.load_template(yaml_file) del(t1[u'AWSTemplateFormatVersion']) t1[u'HeatTemplateFormatVersion'] = t2[u'HeatTemplateFormatVersion'] stack1 = utils.parse_stack(t1, parameters) stack2 = utils.parse_stack(t2, parameters) # compare resources separately so that resolved static data # is compared t1nr = dict(stack1.t.t) del(t1nr['Resources']) t2nr = dict(stack2.t.t) del(t2nr['Resources']) self.assertEqual(t1nr, t2nr) self.assertEqual(set(stack1.keys()), set(stack2.keys())) for key in stack1: self.assertEqual(stack1[key].t, stack2[key].t) def test_neutron_resolved(self): self.compare_stacks('Neutron.template', 'Neutron.yaml', {}) def test_wordpress_resolved(self): self.compare_stacks('WordPress_Single_Instance.template', 'WordPress_Single_Instance.yaml', {'KeyName': 'test'})
redhat-openstack/heat
heat/tests/test_template_format.py
Python
apache-2.0
7,015
/** * jetbrick-template * http://subchen.github.io/jetbrick-template/ * * Copyright 2010-2014 Guoqiang Chen. All rights reserved. * Email: subchen@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrick.template.resource; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.atomic.AtomicLong; import jetbrick.template.utils.UnsafeByteArrayInputStream; /** * 以源码形式存在的资源. * * @since 1.1.3 * @author Guoqiang Chen */ public class SourceCodeResource extends Resource { private static final String ENCODING = "utf-8"; private static AtomicLong index = new AtomicLong(); private final String source; public SourceCodeResource(String source) { super("/unknown/file." + index.incrementAndGet(), ENCODING); this.source = source; } @Override public String getAbsolutePath() { return "(unknown)"; } @Override public long lastModified() { return 0; } @Override public InputStream getInputStream() throws IOException { return new UnsafeByteArrayInputStream(source.getBytes(ENCODING)); } @Override public char[] getSource() { return source.toCharArray(); } @Override public char[] getSource(String encoding) { return source.toCharArray(); } }
subchen/jetbrick-template-1x
src/main/java/jetbrick/template/resource/SourceCodeResource.java
Java
apache-2.0
1,866
# Copyright 2015 - StackStorm, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_config import cfg import requests from mistral.actions import std_actions from mistral.db.v2 import api as db_api from mistral.services import workflows as wf_service from mistral.tests.unit import base as test_base from mistral.tests.unit.engine import base from mistral.workflow import states # Use the set_default method to set value otherwise in certain test cases # the change in value is not permanent. cfg.CONF.set_default('auth_enable', False, group='pecan') ENV = { '__actions': { 'std.http': { 'auth': 'librarian:password123', 'timeout': 30, } } } EXPECTED_ENV_AUTH = ('librarian', 'password123') WORKFLOW1 = """ --- version: "2.0" wf1: type: direct tasks: task1: action: std.http url="https://api.library.org/books" publish: result: <% $ %> """ WORKFLOW2 = """ --- version: "2.0" wf2: type: direct tasks: task1: action: std.http url="https://api.library.org/books" timeout=60 publish: result: <% $ %> """ WORKFLOW1_WITH_ITEMS = """ --- version: "2.0" wf1_with_items: type: direct input: - links tasks: task1: with-items: link in <% $.links %> action: std.http url=<% $.link %> publish: result: <% $ %> """ WORKFLOW2_WITH_ITEMS = """ --- version: "2.0" wf2_with_items: type: direct input: - links tasks: task1: with-items: link in <% $.links %> action: std.http url=<% $.link %> timeout=60 publish: result: <% $ %> """ class ActionDefaultTest(base.EngineTestCase): @mock.patch.object( requests, 'request', mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK'))) @mock.patch.object( std_actions.HTTPAction, 'is_sync', mock.MagicMock(return_value=True)) def test_action_defaults_from_env(self): wf_service.create_workflows(WORKFLOW1) wf_ex = self.engine.start_workflow('wf1', env=ENV) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(states.SUCCESS, wf_ex.state) self._assert_single_item(wf_ex.task_executions, name='task1') requests.request.assert_called_with( 'GET', 'https://api.library.org/books', params=None, data=None, headers=None, cookies=None, allow_redirects=None, proxies=None, verify=None, auth=EXPECTED_ENV_AUTH, timeout=ENV['__actions']['std.http']['timeout']) @mock.patch.object( requests, 'request', mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK'))) @mock.patch.object( std_actions.HTTPAction, 'is_sync', mock.MagicMock(return_value=True)) def test_action_defaults_from_env_not_applied(self): wf_service.create_workflows(WORKFLOW2) wf_ex = self.engine.start_workflow('wf2', env=ENV) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(states.SUCCESS, wf_ex.state) self._assert_single_item(wf_ex.task_executions, name='task1') requests.request.assert_called_with( 'GET', 'https://api.library.org/books', params=None, data=None, headers=None, cookies=None, allow_redirects=None, proxies=None, verify=None, auth=EXPECTED_ENV_AUTH, timeout=60 ) @mock.patch.object( requests, 'request', mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK'))) @mock.patch.object( std_actions.HTTPAction, 'is_sync', mock.MagicMock(return_value=True)) def test_with_items_action_defaults_from_env(self): wf_service.create_workflows(WORKFLOW1_WITH_ITEMS) wf_input = { 'links': [ 'https://api.library.org/books', 'https://api.library.org/authors' ] } wf_ex = self.engine.start_workflow( 'wf1_with_items', wf_input=wf_input, env=ENV ) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(states.SUCCESS, wf_ex.state) self._assert_single_item(wf_ex.task_executions, name='task1') calls = [mock.call('GET', url, params=None, data=None, headers=None, cookies=None, allow_redirects=None, proxies=None, auth=EXPECTED_ENV_AUTH, verify=None, timeout=ENV['__actions']['std.http']['timeout']) for url in wf_input['links']] requests.request.assert_has_calls(calls, any_order=True) @mock.patch.object( requests, 'request', mock.MagicMock(return_value=test_base.FakeHTTPResponse('', 200, 'OK'))) @mock.patch.object( std_actions.HTTPAction, 'is_sync', mock.MagicMock(return_value=True)) def test_with_items_action_defaults_from_env_not_applied(self): wf_service.create_workflows(WORKFLOW2_WITH_ITEMS) wf_input = { 'links': [ 'https://api.library.org/books', 'https://api.library.org/authors' ] } wf_ex = self.engine.start_workflow( 'wf2_with_items', wf_input=wf_input, env=ENV ) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(states.SUCCESS, wf_ex.state) self._assert_single_item(wf_ex.task_executions, name='task1') calls = [mock.call('GET', url, params=None, data=None, headers=None, cookies=None, allow_redirects=None, proxies=None, auth=EXPECTED_ENV_AUTH, verify=None, timeout=60) for url in wf_input['links']] requests.request.assert_has_calls(calls, any_order=True)
StackStorm/mistral
mistral/tests/unit/engine/test_action_defaults.py
Python
apache-2.0
6,915
<?php namespace Bigbank\Gcm; /** * Gcm response parser */ class Response { /** * Unique ID (number) identifying the multicast message. * * @var integer */ private $multicastId = null; /** * Unique id identifying the single message. * * Only have value if single or topic message is sent to google * * @var int */ private $messageId = null; /** * Number of messages that were processed without an error. * * @var integer */ private $success = null; /** * Number of messages that could not be processed. * * @var integer */ private $failure = null; /** * Number of results that contain a canonical registration ID. * * @var integer */ private $canonicalIds = null; /** * Holds single message error * * @var string */ private $error = null; /** * Array of objects representing the status of the messages processed. * The objects are listed in the same order as the request * (i.e., for each registration ID in the request, its result is listed in the same index in the response) * and they can have these fields: * message_id: String representing the message when it was successfully processed. * registration_id: If set, means that GCM processed the message but it has another canonical * registration ID for that device, so sender should replace the IDs on future requests * (otherwise they might be rejected). This field is never set if there is an error in the request. * error: String describing an error that occurred while processing the message for that recipient. * The possible values are the same as documented in the above table, plus "Unavailable" * (meaning GCM servers were busy and could not process the message for that particular recipient, * so it could be retried). * * @var array */ private $results = []; /** * @param Message $message * @param string $responseBody json string of google cloud message server response * * @throws Exception */ public function __construct(Message $message, $responseBody) { $data = \json_decode($responseBody, true); if ($data === null) { throw new Exception("Malformed response body. " . $responseBody, Exception::MALFORMED_RESPONSE); } if (!$data['error']) { $this->messageId = (isset($data['message_id'])) ? $data['message_id'] : null; $this->multicastId = $data['multicast_id']; $this->failure = $data['failure']; $this->success = (!$this->multicastId) ? 1 : $data['success']; $this->canonicalIds = $data['canonical_ids']; $this->results = []; $this->parseResults($message, $data); } else { $this->error = $data['error']; $this->messageId = (isset($data['message_id'])) ? $data['message_id'] : null; $this->failure = (!isset($data['failure'])) ? 1 : $data['failure']; } } /** * @return int */ public function getMulticastId() { return $this->multicastId; } /** * @return int|null */ public function getMessageId() { return $this->messageId; } /** * @return int */ public function getSuccessCount() { return $this->success; } /** * @return int */ public function getFailureCount() { return $this->failure; } /** * @return int */ public function getNewRegistrationIdsCount() { return $this->canonicalIds; } /** * @return array */ public function getResults() { return $this->results; } /** * @return string */ public function getError() { return $this->error; } /** * Return an array of expired registration ids linked to new id * All old registration ids must be updated to new ones in DB * * @return array oldRegistrationId => newRegistrationId */ public function getNewRegistrationIds() { if ($this->getNewRegistrationIdsCount() == 0) { return []; } $filteredResults = array_filter($this->results, function ($result) { return isset($result['registration_id']); }); $data = array_map(function ($result) { return $result['registration_id']; }, $filteredResults); return $data; } /** * Returns an array containing invalid registration ids * They must be removed from DB because the application was uninstalled from the device. * * @return array */ public function getInvalidRegistrationIds() { if ($this->getFailureCount() == 0) { return []; } $filteredResults = array_filter($this->results, function ($result) { return ( isset($result['error']) && ( ($result['error'] == "NotRegistered") || ($result['error'] == "InvalidRegistration") ) ); }); return array_keys($filteredResults); } /** * Returns an array of registration ids for which you must resend a message, * cause devices are not available now. * * @return array */ public function getUnavailableRegistrationIds() { if ($this->getFailureCount() == 0) { return []; } $filteredResults = array_filter($this->results, function ($result) { return ( isset($result['error']) && ($result['error'] == "Unavailable") ); }); return array_keys($filteredResults); } /** * Parse result array with correct data * * @param Message $message * @param array $response */ private function parseResults(Message $message, array $response) { if (is_array($message->getRecipients())) { foreach ($message->getRecipients() as $key => $registrationId) { $this->results[$registrationId] = $response['results'][$key]; } } else { $this->results[$message->getRecipients()] = $response['results']; } } }
bigbank-as/GCM
src/Bigbank/Gcm/Response.php
PHP
apache-2.0
6,729
package org.jboss.examples.ticketmonster.model; import static javax.persistence.CascadeType.ALL; import static javax.persistence.FetchType.EAGER; import static javax.persistence.GenerationType.IDENTITY; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import org.hibernate.validator.constraints.NotEmpty; /** * <p> * Represents a single venue * </p> * * @author Shane Bryzak * @author Pete Muir */ /* * We suppress the warning about not specifying a serialVersionUID, as we are still developing this app, and want the JVM to * generate the serialVersionUID for us. When we put this app into production, we'll generate and embed the serialVersionUID */ @SuppressWarnings("serial") @Entity public class Venue implements Serializable { /* Declaration of fields */ /** * The synthetic id of the object. */ @Id @GeneratedValue(strategy = IDENTITY) private Long id; /** * <p> * The name of the event. * </p> * * <p> * The name of the event forms it's natural identity and cannot be shared between events. * </p> * * <p> * The name must not be null and must be one or more characters, the Bean Validation constrain <code>@NotEmpty</code> * enforces this. * </p> */ @Column(unique = true) @NotEmpty private String name; /** * The address of the venue */ private Address address = new Address(); /** * A description of the venue */ private String description; /** * <p> * A set of sections in the venue * </p> * * <p> * The <code>@OneToMany<code> JPA mapping establishes this relationship. TODO Explain EAGER fetch. * This relationship is bi-directional (a section knows which venue it is part of), and the <code>mappedBy</code> * attribute establishes this. We cascade all persistence operations to the set of performances, so, for example if a venue * is removed, then all of it's sections will also be removed. * </p> */ @OneToMany(cascade = ALL, fetch = EAGER, mappedBy = "venue") private Set<Section> sections = new HashSet<Section>(); /** * The capacity of the venue */ private int capacity; /** * An optional media item to entice punters to the venue. The <code>@ManyToOne</code> establishes the relationship. */ @ManyToOne private MediaItem mediaItem; /* Boilerplate getters and setters */ public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Address getAddress() { return address; } public void setAddress(Address address) { this.address = address; } public MediaItem getMediaItem() { return mediaItem; } public void setMediaItem(MediaItem description) { this.mediaItem = description; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Set<Section> getSections() { return sections; } public void setSections(Set<Section> sections) { this.sections = sections; } public int getCapacity() { return capacity; } public void setCapacity(int capacity) { this.capacity = capacity; } /* toString(), equals() and hashCode() for Venue, using the natural identity of the object */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Venue venue = (Venue) o; if (address != null ? !address.equals(venue.address) : venue.address != null) return false; if (name != null ? !name.equals(venue.name) : venue.name != null) return false; return true; } @Override public int hashCode() { int result = name != null ? name.hashCode() : 0; result = 31 * result + (address != null ? address.hashCode() : 0); return result; } @Override public String toString() { return name; } }
jim-minter/ose3-demos
git/monster/src/main/java/org/jboss/examples/ticketmonster/model/Venue.java
Java
apache-2.0
4,603
import re import unicodedata from collections import defaultdict from typing import Any, Dict, List, Optional, Sequence, Union from django.conf import settings from django.core.exceptions import ValidationError from django.db.models.query import QuerySet from django.forms.models import model_to_dict from django.utils.translation import gettext as _ from typing_extensions import TypedDict from zulip_bots.custom_exceptions import ConfigValidationError from zerver.lib.avatar import avatar_url, get_avatar_field from zerver.lib.cache import ( bulk_cached_fetch, realm_user_dict_fields, user_profile_by_id_cache_key, user_profile_cache_key_id, ) from zerver.lib.exceptions import OrganizationAdministratorRequired from zerver.lib.request import JsonableError from zerver.lib.timezone import canonicalize_timezone from zerver.models import ( CustomProfileField, CustomProfileFieldValue, Realm, Service, UserProfile, get_realm_user_dicts, get_user_profile_by_id_in_realm, ) def check_full_name(full_name_raw: str) -> str: full_name = full_name_raw.strip() if len(full_name) > UserProfile.MAX_NAME_LENGTH: raise JsonableError(_("Name too long!")) if len(full_name) < UserProfile.MIN_NAME_LENGTH: raise JsonableError(_("Name too short!")) for character in full_name: if unicodedata.category(character)[0] == "C" or character in UserProfile.NAME_INVALID_CHARS: raise JsonableError(_("Invalid characters in name!")) # Names ending with e.g. `|15` could be ambiguous for # sloppily-written parsers of our Markdown syntax for mentioning # users with ambiguous names, and likely have no real use, so we # ban them. if re.search(r"\|\d+$", full_name_raw): raise JsonableError(_("Invalid format!")) return full_name # NOTE: We don't try to absolutely prevent 2 bots from having the same # name (e.g. you can get there by reactivating a deactivated bot after # making a new bot with the same name). This is just a check designed # to make it unlikely to happen by accident. def check_bot_name_available(realm_id: int, full_name: str) -> None: dup_exists = UserProfile.objects.filter( realm_id=realm_id, full_name=full_name.strip(), is_active=True, ).exists() if dup_exists: raise JsonableError(_("Name is already in use!")) def check_short_name(short_name_raw: str) -> str: short_name = short_name_raw.strip() if len(short_name) == 0: raise JsonableError(_("Bad name or username")) return short_name def check_valid_bot_config(bot_type: int, service_name: str, config_data: Dict[str, str]) -> None: if bot_type == UserProfile.INCOMING_WEBHOOK_BOT: from zerver.lib.integrations import WEBHOOK_INTEGRATIONS config_options = None for integration in WEBHOOK_INTEGRATIONS: if integration.name == service_name: # key: validator config_options = {c[1]: c[2] for c in integration.config_options} break if not config_options: raise JsonableError(_("Invalid integration '{}'.").format(service_name)) missing_keys = set(config_options.keys()) - set(config_data.keys()) if missing_keys: raise JsonableError( _("Missing configuration parameters: {}").format( missing_keys, ) ) for key, validator in config_options.items(): value = config_data[key] error = validator(key, value) if error: raise JsonableError(_("Invalid {} value {} ({})").format(key, value, error)) elif bot_type == UserProfile.EMBEDDED_BOT: try: from zerver.lib.bot_lib import get_bot_handler bot_handler = get_bot_handler(service_name) if hasattr(bot_handler, "validate_config"): bot_handler.validate_config(config_data) except ConfigValidationError: # The exception provides a specific error message, but that # message is not tagged translatable, because it is # triggered in the external zulip_bots package. # TODO: Think of some clever way to provide a more specific # error message. raise JsonableError(_("Invalid configuration data!")) # Adds an outgoing webhook or embedded bot service. def add_service( name: str, user_profile: UserProfile, base_url: Optional[str] = None, interface: Optional[int] = None, token: Optional[str] = None, ) -> None: Service.objects.create( name=name, user_profile=user_profile, base_url=base_url, interface=interface, token=token ) def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None: # Realm administrators can always add bot if user_profile.is_realm_admin: return if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE: return if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY: raise OrganizationAdministratorRequired() if ( user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS and bot_type == UserProfile.DEFAULT_BOT ): raise OrganizationAdministratorRequired() def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None: if bot_type not in user_profile.allowed_bot_types: raise JsonableError(_("Invalid bot type")) def check_valid_interface_type(interface_type: Optional[int]) -> None: if interface_type not in Service.ALLOWED_INTERFACE_TYPES: raise JsonableError(_("Invalid interface type")) def is_administrator_role(role: int) -> bool: return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER} def bulk_get_users( emails: List[str], realm: Optional[Realm], base_query: "QuerySet[UserProfile]" = None ) -> Dict[str, UserProfile]: if base_query is None: assert realm is not None query = UserProfile.objects.filter(realm=realm, is_active=True) realm_id = realm.id else: # WARNING: Currently, this code path only really supports one # version of `base_query` being used (because otherwise, # they'll share the cache, which can screw up the filtering). # If you're using this flow, you'll need to re-do any filters # in base_query in the code itself; base_query is just a perf # optimization. query = base_query realm_id = 0 def fetch_users_by_email(emails: List[str]) -> List[UserProfile]: # This should be just # # UserProfile.objects.select_related("realm").filter(email__iexact__in=emails, # realm=realm) # # But chaining __in and __iexact doesn't work with Django's # ORM, so we have the following hack to construct the relevant where clause where_clause = "upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)" return query.select_related("realm").extra(where=[where_clause], params=(emails,)) def user_to_email(user_profile: UserProfile) -> str: return user_profile.email.lower() return bulk_cached_fetch( # Use a separate cache key to protect us from conflicts with # the get_user cache. lambda email: "bulk_get_users:" + user_profile_cache_key_id(email, realm_id), fetch_users_by_email, [email.lower() for email in emails], id_fetcher=user_to_email, ) def get_user_id(user: UserProfile) -> int: return user.id def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]: # TODO: Consider adding a flag to control whether deactivated # users should be included. def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]: return list(UserProfile.objects.filter(id__in=user_ids).select_related()) user_profiles_by_id: Dict[int, UserProfile] = bulk_cached_fetch( cache_key_function=user_profile_by_id_cache_key, query_function=fetch_users_by_id, object_ids=user_ids, id_fetcher=get_user_id, ) found_user_ids = user_profiles_by_id.keys() missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids] if missed_user_ids: raise JsonableError(_("Invalid user ID: {}").format(missed_user_ids[0])) user_profiles = list(user_profiles_by_id.values()) for user_profile in user_profiles: if user_profile.realm != realm: raise JsonableError(_("Invalid user ID: {}").format(user_profile.id)) return user_profiles def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile: try: target = get_user_profile_by_id_in_realm(user_id, user_profile.realm) except UserProfile.DoesNotExist: raise JsonableError(_("No such bot")) if not target.is_bot: raise JsonableError(_("No such bot")) if not user_profile.can_admin_user(target): raise JsonableError(_("Insufficient permission")) return target def access_user_by_id( user_profile: UserProfile, target_user_id: int, *, allow_deactivated: bool = False, allow_bots: bool = False, for_admin: bool, ) -> UserProfile: """Master function for accessing another user by ID in API code; verifies the user ID is in the same realm, and if requested checks for administrative privileges, with flags for various special cases. """ try: target = get_user_profile_by_id_in_realm(target_user_id, user_profile.realm) except UserProfile.DoesNotExist: raise JsonableError(_("No such user")) if target.is_bot and not allow_bots: raise JsonableError(_("No such user")) if not target.is_active and not allow_deactivated: raise JsonableError(_("User is deactivated")) if not for_admin: # Administrative access is not required just to read a user. return target if not user_profile.can_admin_user(target): raise JsonableError(_("Insufficient permission")) return target class Accounts(TypedDict): realm_name: str realm_id: int full_name: str avatar: Optional[str] def get_accounts_for_email(email: str) -> List[Accounts]: profiles = ( UserProfile.objects.select_related("realm") .filter( delivery_email__iexact=email.strip(), is_active=True, realm__deactivated=False, is_bot=False, ) .order_by("date_joined") ) accounts: List[Accounts] = [] for profile in profiles: accounts.append( dict( realm_name=profile.realm.name, realm_id=profile.realm.id, full_name=profile.full_name, avatar=avatar_url(profile), ) ) return accounts def get_api_key(user_profile: UserProfile) -> str: return user_profile.api_key def get_all_api_keys(user_profile: UserProfile) -> List[str]: # Users can only have one API key for now return [user_profile.api_key] def validate_user_custom_profile_field( realm_id: int, field: CustomProfileField, value: Union[int, str, List[int]] ) -> Union[int, str, List[int]]: validators = CustomProfileField.FIELD_VALIDATORS field_type = field.field_type var_name = f"{field.name}" if field_type in validators: validator = validators[field_type] return validator(var_name, value) elif field_type == CustomProfileField.SELECT: choice_field_validator = CustomProfileField.SELECT_FIELD_VALIDATORS[field_type] field_data = field.field_data # Put an assertion so that mypy doesn't complain. assert field_data is not None return choice_field_validator(var_name, field_data, value) elif field_type == CustomProfileField.USER: user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type] return user_field_validator(realm_id, value, False) else: raise AssertionError("Invalid field type") def validate_user_custom_profile_data( realm_id: int, profile_data: List[Dict[str, Union[int, str, List[int]]]] ) -> None: # This function validate all custom field values according to their field type. for item in profile_data: field_id = item["id"] try: field = CustomProfileField.objects.get(id=field_id) except CustomProfileField.DoesNotExist: raise JsonableError(_("Field id {id} not found.").format(id=field_id)) try: validate_user_custom_profile_field(realm_id, field, item["value"]) except ValidationError as error: raise JsonableError(error.message) def can_access_delivery_email(user_profile: UserProfile) -> bool: realm = user_profile.realm if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS: return user_profile.is_realm_admin if realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_MODERATORS: return user_profile.is_realm_admin or user_profile.is_moderator return False def format_user_row( realm: Realm, acting_user: Optional[UserProfile], row: Dict[str, Any], client_gravatar: bool, user_avatar_url_field_optional: bool, custom_profile_field_data: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: """Formats a user row returned by a database fetch using .values(*realm_user_dict_fields) into a dictionary representation of that user for API delivery to clients. The acting_user argument is used for permissions checks. """ is_admin = is_administrator_role(row["role"]) is_owner = row["role"] == UserProfile.ROLE_REALM_OWNER is_guest = row["role"] == UserProfile.ROLE_GUEST is_bot = row["is_bot"] result = dict( email=row["email"], user_id=row["id"], avatar_version=row["avatar_version"], is_admin=is_admin, is_owner=is_owner, is_guest=is_guest, is_billing_admin=row["is_billing_admin"], role=row["role"], is_bot=is_bot, full_name=row["full_name"], timezone=canonicalize_timezone(row["timezone"]), is_active=row["is_active"], date_joined=row["date_joined"].isoformat(), ) # Zulip clients that support using `GET /avatar/{user_id}` as a # fallback if we didn't send an avatar URL in the user object pass # user_avatar_url_field_optional in client_capabilities. # # This is a major network performance optimization for # organizations with 10,000s of users where we would otherwise # send avatar URLs in the payload (either because most users have # uploaded avatars or because EMAIL_ADDRESS_VISIBILITY_ADMINS # prevents the older client_gravatar optimization from helping). # The performance impact is large largely because the hashes in # avatar URLs structurally cannot compress well. # # The user_avatar_url_field_optional gives the server sole # discretion in deciding for which users we want to send the # avatar URL (Which saves clients an RTT at the cost of some # bandwidth). At present, the server looks at `long_term_idle` to # decide which users to include avatars for, piggy-backing on a # different optimization for organizations with 10,000s of users. include_avatar_url = not user_avatar_url_field_optional or not row["long_term_idle"] if include_avatar_url: result["avatar_url"] = get_avatar_field( user_id=row["id"], realm_id=realm.id, email=row["delivery_email"], avatar_source=row["avatar_source"], avatar_version=row["avatar_version"], medium=False, client_gravatar=client_gravatar, ) if acting_user is not None and can_access_delivery_email(acting_user): result["delivery_email"] = row["delivery_email"] if is_bot: result["bot_type"] = row["bot_type"] if row["email"] in settings.CROSS_REALM_BOT_EMAILS: result["is_cross_realm_bot"] = True # Note that bot_owner_id can be None with legacy data. result["bot_owner_id"] = row["bot_owner_id"] elif custom_profile_field_data is not None: result["profile_data"] = custom_profile_field_data return result def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]: # What we're trying to do is simulate the user_profile having been # fetched from a QuerySet using `.values(*realm_user_dict_fields)` # even though we fetched UserProfile objects. This is messier # than it seems. # # What we'd like to do is just call model_to_dict(user, # fields=realm_user_dict_fields). The problem with this is # that model_to_dict has a different convention than # `.values()` in its handling of foreign keys, naming them as # e.g. `bot_owner`, not `bot_owner_id`; we work around that # here. # # This could be potentially simplified in the future by # changing realm_user_dict_fields to name the bot owner with # the less readable `bot_owner` (instead of `bot_owner_id`). user_row = model_to_dict(user_profile, fields=[*realm_user_dict_fields, "bot_owner"]) user_row["bot_owner_id"] = user_row["bot_owner"] del user_row["bot_owner"] return user_row def get_cross_realm_dicts() -> List[Dict[str, Any]]: users = bulk_get_users( list(settings.CROSS_REALM_BOT_EMAILS), None, base_query=UserProfile.objects.filter(realm__string_id=settings.SYSTEM_BOT_REALM), ).values() result = [] for user in users: # Important: We filter here, is addition to in # `base_query`, because of how bulk_get_users shares its # cache with other UserProfile caches. if user.realm.string_id != settings.SYSTEM_BOT_REALM: # nocoverage continue user_row = user_profile_to_user_row(user) # Because we want to avoid clients becing exposed to the # implementation detail that these bots are self-owned, we # just set bot_owner_id=None. user_row["bot_owner_id"] = None result.append( format_user_row( user.realm, acting_user=user, row=user_row, client_gravatar=False, user_avatar_url_field_optional=False, custom_profile_field_data=None, ) ) return result def get_custom_profile_field_values( custom_profile_field_values: List[CustomProfileFieldValue], ) -> Dict[int, Dict[str, Any]]: profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict) for profile_field in custom_profile_field_values: user_id = profile_field.user_profile_id if profile_field.field.is_renderable(): profiles_by_user_id[user_id][str(profile_field.field_id)] = { "value": profile_field.value, "rendered_value": profile_field.rendered_value, } else: profiles_by_user_id[user_id][str(profile_field.field_id)] = { "value": profile_field.value, } return profiles_by_user_id def get_raw_user_data( realm: Realm, acting_user: Optional[UserProfile], *, target_user: Optional[UserProfile] = None, client_gravatar: bool, user_avatar_url_field_optional: bool, include_custom_profile_fields: bool = True, ) -> Dict[int, Dict[str, str]]: """Fetches data about the target user(s) appropriate for sending to acting_user via the standard format for the Zulip API. If target_user is None, we fetch all users in the realm. """ profiles_by_user_id = None custom_profile_field_data = None # target_user is an optional parameter which is passed when user data of a specific user # is required. It is 'None' otherwise. if target_user is not None: user_dicts = [user_profile_to_user_row(target_user)] else: user_dicts = get_realm_user_dicts(realm.id) if include_custom_profile_fields: base_query = CustomProfileFieldValue.objects.select_related("field") # TODO: Consider optimizing this query away with caching. if target_user is not None: custom_profile_field_values = base_query.filter(user_profile=target_user) else: custom_profile_field_values = base_query.filter(field__realm_id=realm.id) profiles_by_user_id = get_custom_profile_field_values(custom_profile_field_values) result = {} for row in user_dicts: if profiles_by_user_id is not None: custom_profile_field_data = profiles_by_user_id.get(row["id"], {}) result[row["id"]] = format_user_row( realm, acting_user=acting_user, row=row, client_gravatar=client_gravatar, user_avatar_url_field_optional=user_avatar_url_field_optional, custom_profile_field_data=custom_profile_field_data, ) return result
punchagan/zulip
zerver/lib/users.py
Python
apache-2.0
21,312
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals import hashlib import os from elasticsearch import Elasticsearch, TransportError from elasticsearch.helpers import bulk_index from warehouse.utils import AttributeDict class Index(object): _index = "warehouse" def __init__(self, models, config): self.models = models self.config = config self.es = Elasticsearch( hosts=self.config.hosts, **self.config.get("client_options", {}) ) self.types = AttributeDict() def register(self, type_): obj = type_(self) self.types[obj._type] = obj def reindex(self, index=None, alias=True, keep_old=False): # Generate an Index Name for Warehouse index = "".join([ index if index is not None else self._index, hashlib.md5(os.urandom(16)).hexdigest()[:8], ]) # Create this index self.es.indices.create(index, { "mappings": { doc_type._type: doc_type.get_mapping() for doc_type in self.types.values() }, }) # Index everything into the new index for doc_type in self.types.values(): doc_type.index_all(index=index) # Update the alias unless we've been told not to if alias: self.update_alias(self._index, index, keep_old=keep_old) def update_alias(self, alias, index, keep_old=False): # Get the old index from ElasticSearch try: old_index = self.es.indices.get_alias(self._index).keys()[0] except TransportError as exc: if not exc.status_code == 404: raise old_index = None # Remove the alias to the old index if it exists if old_index is not None: actions = [{"remove": {"index": old_index, "alias": alias}}] else: actions = [] # Add the alias to the new index actions += [{"add": {"index": index, "alias": alias}}] # Update To the New Index self.es.indices.update_aliases({"actions": actions}) # Delete the old index if it exists and unless we're keeping it if not keep_old and old_index is not None: self.es.indices.delete(old_index) class BaseMapping(object): SEARCH_LIMIT = 25 def __init__(self, index): self.index = index def get_mapping(self): raise NotImplementedError def get_indexable(self): raise NotImplementedError def extract_id(self, item): raise NotImplementedError def extract_document(self, item): raise NotImplementedError def index_all(self, index=None): # Determine which index we are indexing into _index = index if index is not None else self.index._index # Bulk Index our documents bulk_index( self.index.es, [ { "_index": _index, "_type": self._type, "_id": self.extract_id(item), "_source": self.extract_document(item), } for item in self.get_indexable() ], ) def search(self, query): raise NotImplementedError
mattrobenolt/warehouse
warehouse/search/indexes.py
Python
apache-2.0
3,926
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.components; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.wm.IdeGlassPane; import com.intellij.ui.IdeBorderFactory; import com.intellij.util.ArrayUtil; import com.intellij.util.ReflectionUtil; import com.intellij.util.ui.ButtonlessScrollBarUI; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.RegionPainter; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.LineBorder; import javax.swing.plaf.ScrollBarUI; import javax.swing.plaf.ScrollPaneUI; import javax.swing.plaf.UIResource; import javax.swing.plaf.basic.BasicScrollBarUI; import javax.swing.plaf.basic.BasicScrollPaneUI; import java.awt.*; import java.awt.event.InputEvent; import java.awt.event.MouseEvent; import java.awt.event.MouseWheelEvent; import java.awt.event.MouseWheelListener; import java.lang.reflect.Field; public class JBScrollPane extends JScrollPane { /** * This key is used to specify which colors should use the scroll bars on the pane. * If a client property is set to {@code true} the bar's brightness * will be modified according to the view's background. * * @see UIUtil#putClientProperty * @see UIUtil#isUnderDarcula */ public static final Key<Boolean> BRIGHTNESS_FROM_VIEW = Key.create("JB_SCROLL_PANE_BRIGHTNESS_FROM_VIEW"); @Deprecated public static final RegionPainter<Float> THUMB_PAINTER = ScrollPainter.EditorThumb.DEFAULT; @Deprecated public static final RegionPainter<Float> THUMB_DARK_PAINTER = ScrollPainter.EditorThumb.DARCULA; @Deprecated public static final RegionPainter<Float> MAC_THUMB_PAINTER = ScrollPainter.EditorThumb.Mac.DEFAULT; @Deprecated public static final RegionPainter<Float> MAC_THUMB_DARK_PAINTER = ScrollPainter.EditorThumb.Mac.DARCULA; private static final Logger LOG = Logger.getInstance(JBScrollPane.class); private int myViewportBorderWidth = -1; private boolean myHasOverlayScrollbars; private volatile boolean myBackgroundRequested; // avoid cyclic references public JBScrollPane(int viewportWidth) { init(false); myViewportBorderWidth = viewportWidth; updateViewportBorder(); } public JBScrollPane() { init(); } public JBScrollPane(Component view) { super(view); init(); } public JBScrollPane(int vsbPolicy, int hsbPolicy) { super(vsbPolicy, hsbPolicy); init(); } public JBScrollPane(Component view, int vsbPolicy, int hsbPolicy) { super(view, vsbPolicy, hsbPolicy); init(); } @Override public Color getBackground() { Color color = super.getBackground(); if (!myBackgroundRequested && EventQueue.isDispatchThread() && Registry.is("ide.scroll.background.auto")) { if (!isBackgroundSet() || color instanceof UIResource) { Component child = getViewport(); if (child != null) { try { myBackgroundRequested = true; return child.getBackground(); } finally { myBackgroundRequested = false; } } } } return color; } static Color getViewBackground(JScrollPane pane) { if (pane == null) return null; JViewport viewport = pane.getViewport(); if (viewport == null) return null; Component view = viewport.getView(); if (view == null) return null; return view.getBackground(); } public static JScrollPane findScrollPane(Component c) { if (c == null) return null; if (!(c instanceof JViewport)) { Container vp = c.getParent(); if (vp instanceof JViewport) c = vp; } c = c.getParent(); if (!(c instanceof JScrollPane)) return null; return (JScrollPane)c; } private void init() { init(true); } private void init(boolean setupCorners) { setLayout(Registry.is("ide.scroll.new.layout") ? new Layout() : new ScrollPaneLayout()); if (setupCorners) { setupCorners(); } } protected void setupCorners() { setBorder(IdeBorderFactory.createBorder()); setCorner(UPPER_RIGHT_CORNER, new Corner(UPPER_RIGHT_CORNER)); setCorner(UPPER_LEFT_CORNER, new Corner(UPPER_LEFT_CORNER)); setCorner(LOWER_RIGHT_CORNER, new Corner(LOWER_RIGHT_CORNER)); setCorner(LOWER_LEFT_CORNER, new Corner(LOWER_LEFT_CORNER)); } @Override public void setUI(ScrollPaneUI ui) { super.setUI(ui); updateViewportBorder(); if (ui instanceof BasicScrollPaneUI) { try { Field field = BasicScrollPaneUI.class.getDeclaredField("mouseScrollListener"); field.setAccessible(true); Object value = field.get(ui); if (value instanceof MouseWheelListener) { MouseWheelListener oldListener = (MouseWheelListener)value; MouseWheelListener newListener = event -> { if (isScrollEvent(event)) { Object source = event.getSource(); if (source instanceof JScrollPane) { JScrollPane pane = (JScrollPane)source; if (pane.isWheelScrollingEnabled()) { JScrollBar bar = event.isShiftDown() ? pane.getHorizontalScrollBar() : pane.getVerticalScrollBar(); if (bar != null && bar.isVisible()) oldListener.mouseWheelMoved(event); } } } }; field.set(ui, newListener); // replace listener if field updated successfully removeMouseWheelListener(oldListener); addMouseWheelListener(newListener); } } catch (Exception exception) { LOG.warn(exception); } } } @Override public boolean isOptimizedDrawingEnabled() { if (getLayout() instanceof Layout) { return isOptimizedDrawingEnabledFor(getVerticalScrollBar()) && isOptimizedDrawingEnabledFor(getHorizontalScrollBar()); } return !myHasOverlayScrollbars; } /** * Returns {@code false} for visible translucent scroll bars, or {@code true} otherwise. * It is needed to repaint translucent scroll bars on viewport repainting. */ private static boolean isOptimizedDrawingEnabledFor(JScrollBar bar) { return bar == null || bar.isOpaque() || !bar.isVisible(); } private void updateViewportBorder() { if (getViewportBorder() instanceof ViewportBorder) { setViewportBorder(new ViewportBorder(myViewportBorderWidth >= 0 ? myViewportBorderWidth : 1)); } } public static ViewportBorder createIndentBorder() { return new ViewportBorder(2); } @Override public JScrollBar createVerticalScrollBar() { return new MyScrollBar(Adjustable.VERTICAL); } @NotNull @Override public JScrollBar createHorizontalScrollBar() { return new MyScrollBar(Adjustable.HORIZONTAL); } @Override protected JViewport createViewport() { return new JBViewport(); } @SuppressWarnings("deprecation") @Override public void layout() { LayoutManager layout = getLayout(); ScrollPaneLayout scrollLayout = layout instanceof ScrollPaneLayout ? (ScrollPaneLayout)layout : null; // Now we let JScrollPane layout everything as necessary super.layout(); if (layout instanceof Layout) return; if (scrollLayout != null) { // Now it's time to jump in and expand the viewport so it fits the whole area // (taking into consideration corners, headers and other stuff). myHasOverlayScrollbars = relayoutScrollbars( this, scrollLayout, myHasOverlayScrollbars // If last time we did relayouting, we should restore it back. ); } else { myHasOverlayScrollbars = false; } } private boolean relayoutScrollbars(@NotNull JComponent container, @NotNull ScrollPaneLayout layout, boolean forceRelayout) { JViewport viewport = layout.getViewport(); if (viewport == null) return false; JScrollBar vsb = layout.getVerticalScrollBar(); JScrollBar hsb = layout.getHorizontalScrollBar(); JViewport colHead = layout.getColumnHeader(); JViewport rowHead = layout.getRowHeader(); Rectangle viewportBounds = viewport.getBounds(); boolean extendViewportUnderVScrollbar = vsb != null && shouldExtendViewportUnderScrollbar(vsb); boolean extendViewportUnderHScrollbar = hsb != null && shouldExtendViewportUnderScrollbar(hsb); boolean hasOverlayScrollbars = extendViewportUnderVScrollbar || extendViewportUnderHScrollbar; if (!hasOverlayScrollbars && !forceRelayout) return false; container.setComponentZOrder(viewport, container.getComponentCount() - 1); if (vsb != null) container.setComponentZOrder(vsb, 0); if (hsb != null) container.setComponentZOrder(hsb, 0); if (extendViewportUnderVScrollbar) { int x2 = Math.max(vsb.getX() + vsb.getWidth(), viewportBounds.x + viewportBounds.width); viewportBounds.x = Math.min(viewportBounds.x, vsb.getX()); viewportBounds.width = x2 - viewportBounds.x; } if (extendViewportUnderHScrollbar) { int y2 = Math.max(hsb.getY() + hsb.getHeight(), viewportBounds.y + viewportBounds.height); viewportBounds.y = Math.min(viewportBounds.y, hsb.getY()); viewportBounds.height = y2 - viewportBounds.y; } if (extendViewportUnderVScrollbar) { if (hsb != null) { Rectangle scrollbarBounds = hsb.getBounds(); scrollbarBounds.width = viewportBounds.x + viewportBounds.width - scrollbarBounds.x; hsb.setBounds(scrollbarBounds); } if (colHead != null) { Rectangle headerBounds = colHead.getBounds(); headerBounds.width = viewportBounds.width; colHead.setBounds(headerBounds); } hideFromView(layout.getCorner(UPPER_RIGHT_CORNER)); hideFromView(layout.getCorner(LOWER_RIGHT_CORNER)); } if (extendViewportUnderHScrollbar) { if (vsb != null) { Rectangle scrollbarBounds = vsb.getBounds(); scrollbarBounds.height = viewportBounds.y + viewportBounds.height - scrollbarBounds.y; vsb.setBounds(scrollbarBounds); } if (rowHead != null) { Rectangle headerBounds = rowHead.getBounds(); headerBounds.height = viewportBounds.height; rowHead.setBounds(headerBounds); } hideFromView(layout.getCorner(LOWER_LEFT_CORNER)); hideFromView(layout.getCorner(LOWER_RIGHT_CORNER)); } viewport.setBounds(viewportBounds); return hasOverlayScrollbars; } private boolean shouldExtendViewportUnderScrollbar(@Nullable JScrollBar scrollbar) { if (scrollbar == null || !scrollbar.isVisible()) return false; return isOverlaidScrollbar(scrollbar); } protected boolean isOverlaidScrollbar(@Nullable JScrollBar scrollbar) { if (!ButtonlessScrollBarUI.isMacOverlayScrollbarSupported()) return false; ScrollBarUI vsbUI = scrollbar == null ? null : scrollbar.getUI(); return vsbUI instanceof ButtonlessScrollBarUI && !((ButtonlessScrollBarUI)vsbUI).alwaysShowTrack(); } private static void hideFromView(Component component) { if (component == null) return; component.setBounds(-10, -10, 1, 1); } private class MyScrollBar extends ScrollBar implements IdeGlassPane.TopComponent { public MyScrollBar(int orientation) { super(orientation); } @Override public void updateUI() { ScrollBarUI ui = getUI(); if (ui instanceof DefaultScrollBarUI) return; setUI(JBScrollBar.createUI(this)); } @Override public boolean canBePreprocessed(MouseEvent e) { return JBScrollPane.canBePreprocessed(e, this); } } public static boolean canBePreprocessed(MouseEvent e, JScrollBar bar) { if (e.getID() == MouseEvent.MOUSE_MOVED || e.getID() == MouseEvent.MOUSE_PRESSED) { ScrollBarUI ui = bar.getUI(); if (ui instanceof BasicScrollBarUI) { BasicScrollBarUI bui = (BasicScrollBarUI)ui; try { Rectangle rect = (Rectangle)ReflectionUtil.getDeclaredMethod(BasicScrollBarUI.class, "getThumbBounds", ArrayUtil.EMPTY_CLASS_ARRAY).invoke(bui); Point point = SwingUtilities.convertPoint(e.getComponent(), e.getX(), e.getY(), bar); return !rect.contains(point); } catch (Exception e1) { return true; } } else if (ui instanceof DefaultScrollBarUI) { DefaultScrollBarUI dui = (DefaultScrollBarUI)ui; Point point = e.getLocationOnScreen(); SwingUtilities.convertPointFromScreen(point, bar); return !dui.isThumbContains(point.x, point.y); } } return true; } private static class Corner extends JPanel { private final String myPos; public Corner(String pos) { myPos = pos; ScrollColorProducer.setBackground(this); ScrollColorProducer.setForeground(this); } @Override protected void paintComponent(Graphics g) { g.setColor(getBackground()); g.fillRect(0, 0, getWidth(), getHeight()); if (SystemInfo.isMac || !Registry.is("ide.scroll.track.border.paint")) return; g.setColor(getForeground()); int x2 = getWidth() - 1; int y2 = getHeight() - 1; if (myPos == UPPER_LEFT_CORNER || myPos == UPPER_RIGHT_CORNER) { g.drawLine(0, y2, x2, y2); } if (myPos == LOWER_LEFT_CORNER || myPos == LOWER_RIGHT_CORNER) { g.drawLine(0, 0, x2, 0); } if (myPos == UPPER_LEFT_CORNER || myPos == LOWER_LEFT_CORNER) { g.drawLine(x2, 0, x2, y2); } if (myPos == UPPER_RIGHT_CORNER || myPos == LOWER_RIGHT_CORNER) { g.drawLine(0, 0, 0, y2); } } } private static class ViewportBorder extends LineBorder { public ViewportBorder(int thickness) { super(null, thickness); } @Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { updateColor(c); super.paintBorder(c, g, x, y, width, height); } private void updateColor(Component c) { if (!(c instanceof JScrollPane)) return; lineColor = getViewBackground((JScrollPane)c); } } /** * These client properties modify a scroll pane layout. * Use the class object as a property key. * * @see #putClientProperty(Object, Object) */ public enum Flip { NONE, VERTICAL, HORIZONTAL, BOTH } /** * These client properties show a component position on a scroll pane. * It is set by internal layout manager of the scroll pane. */ public enum Alignment { TOP, LEFT, RIGHT, BOTTOM; public static Alignment get(JComponent component) { if (component != null) { Object property = component.getClientProperty(Alignment.class); if (property instanceof Alignment) return (Alignment)property; Container parent = component.getParent(); if (parent instanceof JScrollPane) { JScrollPane pane = (JScrollPane)parent; if (component == pane.getColumnHeader()) { return TOP; } if (component == pane.getHorizontalScrollBar()) { return BOTTOM; } boolean ltr = pane.getComponentOrientation().isLeftToRight(); if (component == pane.getVerticalScrollBar()) { return ltr ? RIGHT : LEFT; } if (component == pane.getRowHeader()) { return ltr ? LEFT : RIGHT; } } // assume alignment for a scroll bar, // which is not contained in a scroll pane if (component instanceof JScrollBar) { JScrollBar bar = (JScrollBar)component; switch (bar.getOrientation()) { case Adjustable.HORIZONTAL: return BOTTOM; case Adjustable.VERTICAL: return bar.getComponentOrientation().isLeftToRight() ? RIGHT : LEFT; } } } return null; } } /** * ScrollPaneLayout implementation that supports * ScrollBar flipping and non-opaque ScrollBars. */ private static class Layout extends ScrollPaneLayout { private static final Insets EMPTY_INSETS = new Insets(0, 0, 0, 0); @Override public void layoutContainer(Container parent) { JScrollPane pane = (JScrollPane)parent; // Calculate inner bounds of the scroll pane Rectangle bounds = new Rectangle(pane.getWidth(), pane.getHeight()); JBInsets.removeFrom(bounds, pane.getInsets()); // Determine positions of scroll bars on the scroll pane Object property = pane.getClientProperty(Flip.class); Flip flip = property instanceof Flip ? (Flip)property : Flip.NONE; boolean hsbOnTop = flip == Flip.BOTH || flip == Flip.VERTICAL; boolean vsbOnLeft = pane.getComponentOrientation().isLeftToRight() ? flip == Flip.BOTH || flip == Flip.HORIZONTAL : flip == Flip.NONE || flip == Flip.VERTICAL; // If there's a visible row header remove the space it needs. // The row header is treated as if it were fixed width, arbitrary height. Rectangle rowHeadBounds = new Rectangle(bounds.x, 0, 0, 0); if (rowHead != null && rowHead.isVisible()) { rowHeadBounds.width = min(bounds.width, rowHead.getPreferredSize().width); bounds.width -= rowHeadBounds.width; if (vsbOnLeft) { rowHeadBounds.x += bounds.width; } else { bounds.x += rowHeadBounds.width; } } // If there's a visible column header remove the space it needs. // The column header is treated as if it were fixed height, arbitrary width. Rectangle colHeadBounds = new Rectangle(0, bounds.y, 0, 0); if (colHead != null && colHead.isVisible()) { colHeadBounds.height = min(bounds.height, colHead.getPreferredSize().height); bounds.height -= colHeadBounds.height; if (hsbOnTop) { colHeadBounds.y += bounds.height; } else { bounds.y += colHeadBounds.height; } } // If there's a JScrollPane.viewportBorder, remove the space it occupies Border border = pane.getViewportBorder(); Insets insets = border == null ? null : border.getBorderInsets(parent); JBInsets.removeFrom(bounds, insets); if (insets == null) insets = EMPTY_INSETS; // At this point: // colHeadBounds is correct except for its width and x // rowHeadBounds is correct except for its height and y // bounds - the space available for the viewport and scroll bars // Once we're through computing the dimensions of these three parts // we can go back and set the bounds for the corners and the dimensions of // colHeadBounds.x, colHeadBounds.width, rowHeadBounds.y, rowHeadBounds.height. boolean isEmpty = bounds.width < 0 || bounds.height < 0; Component view = viewport == null ? null : viewport.getView(); Dimension viewPreferredSize = view == null ? new Dimension() : view.getPreferredSize(); if (view instanceof JComponent) JBViewport.fixPreferredSize(viewPreferredSize, (JComponent)view, vsb, hsb); Dimension viewportExtentSize = viewport == null ? new Dimension() : viewport.toViewCoordinates(bounds.getSize()); // If the view is tracking the viewports width we don't bother with a horizontal scrollbar. // If the view is tracking the viewports height we don't bother with a vertical scrollbar. Scrollable scrollable = null; boolean viewTracksViewportWidth = false; boolean viewTracksViewportHeight = false; // Don't bother checking the Scrollable methods if there is no room for the viewport, // we aren't going to show any scroll bars in this case anyway. if (!isEmpty && view instanceof Scrollable) { scrollable = (Scrollable)view; viewTracksViewportWidth = scrollable.getScrollableTracksViewportWidth(); viewTracksViewportHeight = scrollable.getScrollableTracksViewportHeight(); } // If there's a vertical scroll bar and we need one, allocate space for it. // A vertical scroll bar is considered to be fixed width, arbitrary height. boolean vsbOpaque = false; boolean vsbNeeded = false; int vsbPolicy = pane.getVerticalScrollBarPolicy(); if (!isEmpty && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) { vsbNeeded = vsbPolicy == VERTICAL_SCROLLBAR_ALWAYS || !viewTracksViewportHeight && viewPreferredSize.height > viewportExtentSize.height; } Rectangle vsbBounds = new Rectangle(0, bounds.y - insets.top, 0, 0); if (vsb != null) { if (!SystemInfo.isMac && view instanceof JTable) vsb.setOpaque(true); vsbOpaque = vsb.isOpaque(); if (vsbNeeded) { adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft); if (vsbOpaque && viewport != null) { viewportExtentSize = viewport.toViewCoordinates(bounds.getSize()); } } } // If there's a horizontal scroll bar and we need one, allocate space for it. // A horizontal scroll bar is considered to be fixed height, arbitrary width. boolean hsbOpaque = false; boolean hsbNeeded = false; int hsbPolicy = pane.getHorizontalScrollBarPolicy(); if (!isEmpty && hsbPolicy != HORIZONTAL_SCROLLBAR_NEVER) { hsbNeeded = hsbPolicy == HORIZONTAL_SCROLLBAR_ALWAYS || !viewTracksViewportWidth && viewPreferredSize.width > viewportExtentSize.width; } Rectangle hsbBounds = new Rectangle(bounds.x - insets.left, 0, 0, 0); if (hsb != null) { if (!SystemInfo.isMac && view instanceof JTable) hsb.setOpaque(true); hsbOpaque = hsb.isOpaque(); if (hsbNeeded) { adjustForHSB(bounds, insets, hsbBounds, hsbOpaque, hsbOnTop); if (hsbOpaque && viewport != null) { // If we added the horizontal scrollbar and reduced the vertical space // we may have to add the vertical scrollbar, if that hasn't been done so already. if (vsb != null && !vsbNeeded && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) { viewportExtentSize = viewport.toViewCoordinates(bounds.getSize()); vsbNeeded = viewPreferredSize.height > viewportExtentSize.height; if (vsbNeeded) adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft); } } } } // Set the size of the viewport first, and then recheck the Scrollable methods. // Some components base their return values for the Scrollable methods on the size of the viewport, // so that if we don't ask after resetting the bounds we may have gotten the wrong answer. if (viewport != null) { viewport.setBounds(bounds); if (scrollable != null && hsbOpaque && vsbOpaque) { viewTracksViewportWidth = scrollable.getScrollableTracksViewportWidth(); viewTracksViewportHeight = scrollable.getScrollableTracksViewportHeight(); viewportExtentSize = viewport.toViewCoordinates(bounds.getSize()); boolean vsbNeededOld = vsbNeeded; if (vsb != null && vsbPolicy == VERTICAL_SCROLLBAR_AS_NEEDED) { boolean vsbNeededNew = !viewTracksViewportHeight && viewPreferredSize.height > viewportExtentSize.height; if (vsbNeeded != vsbNeededNew) { vsbNeeded = vsbNeededNew; if (vsbNeeded) { adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft); } else if (vsbOpaque) { bounds.width += vsbBounds.width; } if (vsbOpaque) viewportExtentSize = viewport.toViewCoordinates(bounds.getSize()); } } boolean hsbNeededOld = hsbNeeded; if (hsb != null && hsbPolicy == HORIZONTAL_SCROLLBAR_AS_NEEDED) { boolean hsbNeededNew = !viewTracksViewportWidth && viewPreferredSize.width > viewportExtentSize.width; if (hsbNeeded != hsbNeededNew) { hsbNeeded = hsbNeededNew; if (hsbNeeded) { adjustForHSB(bounds, insets, hsbBounds, hsbOpaque, hsbOnTop); } else if (hsbOpaque) { bounds.height += hsbBounds.height; } if (hsbOpaque && vsb != null && !vsbNeeded && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) { viewportExtentSize = viewport.toViewCoordinates(bounds.getSize()); vsbNeeded = viewPreferredSize.height > viewportExtentSize.height; if (vsbNeeded) adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft); } } } if (hsbNeededOld != hsbNeeded || vsbNeededOld != vsbNeeded) { viewport.setBounds(bounds); // You could argue that we should recheck the Scrollable methods again until they stop changing, // but they might never stop changing, so we stop here and don't do any additional checks. } } } // Set the bounds of the row header. rowHeadBounds.y = bounds.y - insets.top; rowHeadBounds.height = bounds.height + insets.top + insets.bottom; if (rowHead != null) { rowHead.setBounds(rowHeadBounds); rowHead.putClientProperty(Alignment.class, vsbOnLeft ? Alignment.RIGHT : Alignment.LEFT); } // Set the bounds of the column header. colHeadBounds.x = bounds.x - insets.left; colHeadBounds.width = bounds.width + insets.left + insets.right; if (colHead != null) { colHead.setBounds(colHeadBounds); colHead.putClientProperty(Alignment.class, hsbOnTop ? Alignment.BOTTOM : Alignment.TOP); } // Calculate overlaps for translucent scroll bars int overlapWidth = 0; int overlapHeight = 0; if (vsbNeeded && !vsbOpaque && hsbNeeded && !hsbOpaque) { overlapWidth = vsbBounds.width; // shrink horizontally //overlapHeight = hsbBounds.height; // shrink vertically } // Set the bounds of the vertical scroll bar. vsbBounds.y = bounds.y - insets.top; vsbBounds.height = bounds.height + insets.top + insets.bottom; if (vsb != null) { vsb.setVisible(vsbNeeded); if (vsbNeeded) { if (vsbOpaque && colHead != null && UIManager.getBoolean("ScrollPane.fillUpperCorner")) { if ((vsbOnLeft ? upperLeft : upperRight) == null) { // This is used primarily for GTK L&F, which needs to extend // the vertical scrollbar to fill the upper corner near the column header. // Note that we skip this step (and use the default behavior) // if the user has set a custom corner component. if (!hsbOnTop) vsbBounds.y -= colHeadBounds.height; vsbBounds.height += colHeadBounds.height; } } int overlapY = !hsbOnTop ? 0 : overlapHeight; vsb.setBounds(vsbBounds.x, vsbBounds.y + overlapY, vsbBounds.width, vsbBounds.height - overlapHeight); vsb.putClientProperty(Alignment.class, vsbOnLeft ? Alignment.LEFT : Alignment.RIGHT); } // Modify the bounds of the translucent scroll bar. if (!vsbOpaque) { if (!vsbOnLeft) vsbBounds.x += vsbBounds.width; vsbBounds.width = 0; } } // Set the bounds of the horizontal scroll bar. hsbBounds.x = bounds.x - insets.left; hsbBounds.width = bounds.width + insets.left + insets.right; if (hsb != null) { hsb.setVisible(hsbNeeded); if (hsbNeeded) { if (hsbOpaque && rowHead != null && UIManager.getBoolean("ScrollPane.fillLowerCorner")) { if ((vsbOnLeft ? lowerRight : lowerLeft) == null) { // This is used primarily for GTK L&F, which needs to extend // the horizontal scrollbar to fill the lower corner near the row header. // Note that we skip this step (and use the default behavior) // if the user has set a custom corner component. if (!vsbOnLeft) hsbBounds.x -= rowHeadBounds.width; hsbBounds.width += rowHeadBounds.width; } } int overlapX = !vsbOnLeft ? 0 : overlapWidth; hsb.setBounds(hsbBounds.x + overlapX, hsbBounds.y, hsbBounds.width - overlapWidth, hsbBounds.height); hsb.putClientProperty(Alignment.class, hsbOnTop ? Alignment.TOP : Alignment.BOTTOM); } // Modify the bounds of the translucent scroll bar. if (!hsbOpaque) { if (!hsbOnTop) hsbBounds.y += hsbBounds.height; hsbBounds.height = 0; } } // Set the bounds of the corners. if (lowerLeft != null) { lowerLeft.setBounds(vsbOnLeft ? vsbBounds.x : rowHeadBounds.x, hsbOnTop ? colHeadBounds.y : hsbBounds.y, vsbOnLeft ? vsbBounds.width : rowHeadBounds.width, hsbOnTop ? colHeadBounds.height : hsbBounds.height); } if (lowerRight != null) { lowerRight.setBounds(vsbOnLeft ? rowHeadBounds.x : vsbBounds.x, hsbOnTop ? colHeadBounds.y : hsbBounds.y, vsbOnLeft ? rowHeadBounds.width : vsbBounds.width, hsbOnTop ? colHeadBounds.height : hsbBounds.height); } if (upperLeft != null) { upperLeft.setBounds(vsbOnLeft ? vsbBounds.x : rowHeadBounds.x, hsbOnTop ? hsbBounds.y : colHeadBounds.y, vsbOnLeft ? vsbBounds.width : rowHeadBounds.width, hsbOnTop ? hsbBounds.height : colHeadBounds.height); } if (upperRight != null) { upperRight.setBounds(vsbOnLeft ? rowHeadBounds.x : vsbBounds.x, hsbOnTop ? hsbBounds.y : colHeadBounds.y, vsbOnLeft ? rowHeadBounds.width : vsbBounds.width, hsbOnTop ? hsbBounds.height : colHeadBounds.height); } if (!vsbOpaque && vsbNeeded || !hsbOpaque && hsbNeeded) { fixComponentZOrder(vsb, 0); fixComponentZOrder(viewport, -1); } } private static void fixComponentZOrder(Component component, int index) { if (component != null) { Container parent = component.getParent(); synchronized (parent.getTreeLock()) { if (index < 0) index += parent.getComponentCount(); parent.setComponentZOrder(component, index); } } } private void adjustForVSB(Rectangle bounds, Insets insets, Rectangle vsbBounds, boolean vsbOpaque, boolean vsbOnLeft) { vsbBounds.width = !vsb.isEnabled() ? 0 : min(bounds.width, vsb.getPreferredSize().width); if (vsbOnLeft) { vsbBounds.x = bounds.x - insets.left/* + vsbBounds.width*/; if (vsbOpaque) bounds.x += vsbBounds.width; } else { vsbBounds.x = bounds.x + bounds.width + insets.right - vsbBounds.width; } if (vsbOpaque) bounds.width -= vsbBounds.width; } private void adjustForHSB(Rectangle bounds, Insets insets, Rectangle hsbBounds, boolean hsbOpaque, boolean hsbOnTop) { hsbBounds.height = !hsb.isEnabled() ? 0 : min(bounds.height, hsb.getPreferredSize().height); if (hsbOnTop) { hsbBounds.y = bounds.y - insets.top/* + hsbBounds.height*/; if (hsbOpaque) bounds.y += hsbBounds.height; } else { hsbBounds.y = bounds.y + bounds.height + insets.bottom - hsbBounds.height; } if (hsbOpaque) bounds.height -= hsbBounds.height; } private static int min(int one, int two) { return Math.max(0, Math.min(one, two)); } } /** * Indicates whether the specified event is not consumed and does not have unexpected modifiers. * * @param event a mouse wheel event to check for validity * @return {@code true} if the specified event is valid, {@code false} otherwise */ public static boolean isScrollEvent(@NotNull MouseWheelEvent event) { if (event.isConsumed()) return false; // event should not be consumed already if (event.getWheelRotation() == 0) return false; // any rotation expected (forward or backward) return 0 == (SCROLL_MODIFIERS & event.getModifiers()); } private static final int SCROLL_MODIFIERS = // event modifiers allowed during scrolling ~InputEvent.SHIFT_MASK & ~InputEvent.SHIFT_DOWN_MASK & // for horizontal scrolling ~InputEvent.BUTTON1_MASK & ~InputEvent.BUTTON1_DOWN_MASK; // for selection }
hurricup/intellij-community
platform/platform-api/src/com/intellij/ui/components/JBScrollPane.java
Java
apache-2.0
33,437
package cn.xishan.oftenporter.porter.core.init; import cn.xishan.oftenporter.porter.core.advanced.IConfigData; import com.alibaba.fastjson.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Created by https://github.com/CLovinr on 2018-12-21. */ public class DealSharpProperties { private static final Logger LOGGER = LoggerFactory.getLogger(DealSharpProperties.class); private static class PropOne { private String propKey, originValue; private int startIndex, endIndex; public PropOne(String propKey, String originValue, int startIndex, int endIndex) { this.propKey = propKey; this.originValue = originValue; this.startIndex = startIndex; this.endIndex = endIndex; } public String getPropKey() { return propKey; } public String replace(String propValue) { String str = originValue.substring(0, startIndex) + propValue + originValue.substring(endIndex); return str; } } /** * 替换所有的#{propertyName}. * * @param string * @param properties * @param forEmpty 如果不为null,则用于替换所有不存在的属性。 * @return */ public static String replaceSharpProperties(String string, Map<String, ?> properties, String forEmpty) { for (Map.Entry<String, ?> entry : properties.entrySet()) { if (string.contains("#{" + entry.getKey() + "}")) { String rs; // if (entry.getValue() instanceof Map || entry.getValue() instanceof Collection) // { // rs = JSON.toJSONString(entry.getValue()); // } else // { // rs = String.valueOf(entry.getValue()); // } if (entry.getValue() instanceof CharSequence) { rs = String.valueOf(entry.getValue()); } else if (entry.getValue() == null) { rs = ""; } else { rs = JSON.toJSONString(entry.getValue()); } string = string.replace("#{" + entry.getKey() + "}", rs); } } if (forEmpty != null) { string = string.replaceAll("#\\{[^{}]+\\}", forEmpty);//去掉未设置的 } return string; } /** * 替换#{properName}变量。 * * @param srcMap 待替换属性值的map * @param propertiesMap 提供属性的map */ public static void dealSharpProperties(Map srcMap, Map propertiesMap) { dealSharpProperties(srcMap, propertiesMap, false); } /** * 替换#{properName}变量。 * * @param srcMap 待替换属性值的map * @param propertiesMap 提供属性的map * @param keepNotFound 是否保留未找到的变量。 */ public static void dealSharpProperties(Map srcMap, Map propertiesMap, boolean keepNotFound) { Set<String> containsVar = null; boolean isFirst = true; boolean hasSet = true; //处理properties while (hasSet) { hasSet = false; Collection<String> nameCollection; if (isFirst) { nameCollection = srcMap.keySet(); } else { nameCollection = containsVar; } containsVar = new HashSet<>(); for (String properName : nameCollection) { Object value = srcMap.get(properName); if (!(value instanceof CharSequence)) { continue; } String valueString = String.valueOf(value); PropOne propOne = getPropertiesKey(String.valueOf(valueString)); if (propOne != null && propOne.getPropKey().equals(properName)) { throw new RuntimeException( "can not set property of \"" + properName + "\" with value \"" + valueString + "\",prop name eq value attr name"); } else if (propOne != null) { containsVar.add(properName); if (LOGGER.isDebugEnabled()) { LOGGER.debug("replace sharp property:key={},replace-attr={},origin-value={}", properName, propOne.getPropKey(), valueString); } String replaceStr = null; if (propertiesMap.containsKey(propOne.getPropKey())) { replaceStr = String.valueOf(propertiesMap.get(propOne.getPropKey())); } else { if (keepNotFound) { containsVar.remove(properName); } else { replaceStr = ""; LOGGER.warn("proper value with key '{}' is empty", propOne.getPropKey()); } } if (replaceStr != null) { String newValue = propOne.replace(replaceStr); srcMap.put(properName, newValue); if (LOGGER.isDebugEnabled()) { LOGGER.debug("replace sharp property:key={},new-value={}", properName, newValue); } } hasSet = true; } } isFirst = false; } } static void dealProperties(IConfigData configData) { Set<String> containsVar = null; boolean isFirst = true; boolean hasSet = true; //处理properties while (hasSet) { hasSet = false; Collection<String> nameCollection; if (isFirst) { nameCollection = configData.propertyNames(); } else { nameCollection = containsVar; } containsVar = new HashSet<>(); for (String properName : nameCollection) { Object value = configData.get(properName); if (!(value instanceof CharSequence)) { continue; } String valueString = String.valueOf(value); PropOne propOne = getPropertiesKey(String.valueOf(valueString)); if (propOne != null && propOne.getPropKey().equals(properName)) { throw new RuntimeException( "can not set property of " + properName + " with value \"" + valueString + "\""); } else if (propOne != null) { containsVar.add(properName); if (LOGGER.isDebugEnabled()) { LOGGER.debug("replace sharp property:key={},replace-attr={},origin-value={}", properName, propOne.getPropKey(), valueString); } String replaceStr; if (configData.contains(propOne.getPropKey())) { replaceStr = configData.getString(propOne.getPropKey()); } else { replaceStr = ""; LOGGER.warn("proper value with key '{}' is empty", propOne.getPropKey()); } String newValue = propOne.replace(replaceStr); configData.set(properName, newValue); if (LOGGER.isDebugEnabled()) { LOGGER.debug("replace sharp property:key={},new-value={}", properName, newValue); } hasSet = true; } } isFirst = false; } } private static final Pattern PROPERTIES_PATTERN = Pattern.compile("#\\{([^{}]+)}"); private static PropOne getPropertiesKey(String value) { Matcher matcher = PROPERTIES_PATTERN.matcher(value); if (matcher.find()) { PropOne propOne = new PropOne(matcher.group(1).trim(), value, matcher.start(), matcher.end()); return propOne; } else { return null; } } }
gzxishan/OftenPorter
Porter-Core/src/main/java/cn/xishan/oftenporter/porter/core/init/DealSharpProperties.java
Java
apache-2.0
8,870
package dk.dbc.kafka.dispatch.sources; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Optional; /** * Source for reading InputStreams line-by-line * @author Adam Tulinius */ public class InputStreamSource extends Source<String> { private BufferedReader reader; public InputStreamSource(InputStream inputStream) { this.reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); } @Override public Optional<String> next() throws IOException { String line = reader.readLine(); if (line != null) { return Optional.of(line); } else { return Optional.empty(); } } }
DBCDK/kafka-dispatch
src/main/java/dk/dbc/kafka/dispatch/sources/InputStreamSource.java
Java
apache-2.0
818
/// <reference path="browser/ambient/angular-protractor/index.d.ts" /> /// <reference path="browser/ambient/angular/index.d.ts" /> /// <reference path="browser/ambient/assertion-error/index.d.ts" /> /// <reference path="browser/ambient/chai/index.d.ts" /> /// <reference path="browser/ambient/jquery/index.d.ts" /> /// <reference path="browser/ambient/lodash/index.d.ts" /> /// <reference path="browser/ambient/mocha/index.d.ts" /> /// <reference path="browser/ambient/selenium-webdriver/index.d.ts" /> /// <reference path="browser/ambient/sinon-chai/index.d.ts" /> /// <reference path="browser/ambient/sinon/index.d.ts" /> /// <reference path="browser/ambient/socket.io-client/index.d.ts" />
jensim/myStream.js
typings/browser.d.ts
TypeScript
apache-2.0
693
/* Copyright 2022 Rancher Labs, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by main. DO NOT EDIT. package v3 import ( "context" "time" "github.com/rancher/lasso/pkg/client" "github.com/rancher/lasso/pkg/controller" v3 "github.com/rancher/rancher/pkg/apis/management.cattle.io/v3" "github.com/rancher/wrangler/pkg/generic" "k8s.io/apimachinery/pkg/api/equality" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" utilruntime "k8s.io/apimachinery/pkg/util/runtime" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/tools/cache" ) type PodSecurityPolicyTemplateHandler func(string, *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) type PodSecurityPolicyTemplateController interface { generic.ControllerMeta PodSecurityPolicyTemplateClient OnChange(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) OnRemove(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) Enqueue(name string) EnqueueAfter(name string, duration time.Duration) Cache() PodSecurityPolicyTemplateCache } type PodSecurityPolicyTemplateClient interface { Create(*v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) Update(*v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) Delete(name string, options *metav1.DeleteOptions) error Get(name string, options metav1.GetOptions) (*v3.PodSecurityPolicyTemplate, error) List(opts metav1.ListOptions) (*v3.PodSecurityPolicyTemplateList, error) Watch(opts metav1.ListOptions) (watch.Interface, error) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v3.PodSecurityPolicyTemplate, err error) } type PodSecurityPolicyTemplateCache interface { Get(name string) (*v3.PodSecurityPolicyTemplate, error) List(selector labels.Selector) ([]*v3.PodSecurityPolicyTemplate, error) AddIndexer(indexName string, indexer PodSecurityPolicyTemplateIndexer) GetByIndex(indexName, key string) ([]*v3.PodSecurityPolicyTemplate, error) } type PodSecurityPolicyTemplateIndexer func(obj *v3.PodSecurityPolicyTemplate) ([]string, error) type podSecurityPolicyTemplateController struct { controller controller.SharedController client *client.Client gvk schema.GroupVersionKind groupResource schema.GroupResource } func NewPodSecurityPolicyTemplateController(gvk schema.GroupVersionKind, resource string, namespaced bool, controller controller.SharedControllerFactory) PodSecurityPolicyTemplateController { c := controller.ForResourceKind(gvk.GroupVersion().WithResource(resource), gvk.Kind, namespaced) return &podSecurityPolicyTemplateController{ controller: c, client: c.Client(), gvk: gvk, groupResource: schema.GroupResource{ Group: gvk.Group, Resource: resource, }, } } func FromPodSecurityPolicyTemplateHandlerToHandler(sync PodSecurityPolicyTemplateHandler) generic.Handler { return func(key string, obj runtime.Object) (ret runtime.Object, err error) { var v *v3.PodSecurityPolicyTemplate if obj == nil { v, err = sync(key, nil) } else { v, err = sync(key, obj.(*v3.PodSecurityPolicyTemplate)) } if v == nil { return nil, err } return v, err } } func (c *podSecurityPolicyTemplateController) Updater() generic.Updater { return func(obj runtime.Object) (runtime.Object, error) { newObj, err := c.Update(obj.(*v3.PodSecurityPolicyTemplate)) if newObj == nil { return nil, err } return newObj, err } } func UpdatePodSecurityPolicyTemplateDeepCopyOnChange(client PodSecurityPolicyTemplateClient, obj *v3.PodSecurityPolicyTemplate, handler func(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error)) (*v3.PodSecurityPolicyTemplate, error) { if obj == nil { return obj, nil } copyObj := obj.DeepCopy() newObj, err := handler(copyObj) if newObj != nil { copyObj = newObj } if obj.ResourceVersion == copyObj.ResourceVersion && !equality.Semantic.DeepEqual(obj, copyObj) { return client.Update(copyObj) } return copyObj, err } func (c *podSecurityPolicyTemplateController) AddGenericHandler(ctx context.Context, name string, handler generic.Handler) { c.controller.RegisterHandler(ctx, name, controller.SharedControllerHandlerFunc(handler)) } func (c *podSecurityPolicyTemplateController) AddGenericRemoveHandler(ctx context.Context, name string, handler generic.Handler) { c.AddGenericHandler(ctx, name, generic.NewRemoveHandler(name, c.Updater(), handler)) } func (c *podSecurityPolicyTemplateController) OnChange(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) { c.AddGenericHandler(ctx, name, FromPodSecurityPolicyTemplateHandlerToHandler(sync)) } func (c *podSecurityPolicyTemplateController) OnRemove(ctx context.Context, name string, sync PodSecurityPolicyTemplateHandler) { c.AddGenericHandler(ctx, name, generic.NewRemoveHandler(name, c.Updater(), FromPodSecurityPolicyTemplateHandlerToHandler(sync))) } func (c *podSecurityPolicyTemplateController) Enqueue(name string) { c.controller.Enqueue("", name) } func (c *podSecurityPolicyTemplateController) EnqueueAfter(name string, duration time.Duration) { c.controller.EnqueueAfter("", name, duration) } func (c *podSecurityPolicyTemplateController) Informer() cache.SharedIndexInformer { return c.controller.Informer() } func (c *podSecurityPolicyTemplateController) GroupVersionKind() schema.GroupVersionKind { return c.gvk } func (c *podSecurityPolicyTemplateController) Cache() PodSecurityPolicyTemplateCache { return &podSecurityPolicyTemplateCache{ indexer: c.Informer().GetIndexer(), resource: c.groupResource, } } func (c *podSecurityPolicyTemplateController) Create(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) { result := &v3.PodSecurityPolicyTemplate{} return result, c.client.Create(context.TODO(), "", obj, result, metav1.CreateOptions{}) } func (c *podSecurityPolicyTemplateController) Update(obj *v3.PodSecurityPolicyTemplate) (*v3.PodSecurityPolicyTemplate, error) { result := &v3.PodSecurityPolicyTemplate{} return result, c.client.Update(context.TODO(), "", obj, result, metav1.UpdateOptions{}) } func (c *podSecurityPolicyTemplateController) Delete(name string, options *metav1.DeleteOptions) error { if options == nil { options = &metav1.DeleteOptions{} } return c.client.Delete(context.TODO(), "", name, *options) } func (c *podSecurityPolicyTemplateController) Get(name string, options metav1.GetOptions) (*v3.PodSecurityPolicyTemplate, error) { result := &v3.PodSecurityPolicyTemplate{} return result, c.client.Get(context.TODO(), "", name, result, options) } func (c *podSecurityPolicyTemplateController) List(opts metav1.ListOptions) (*v3.PodSecurityPolicyTemplateList, error) { result := &v3.PodSecurityPolicyTemplateList{} return result, c.client.List(context.TODO(), "", result, opts) } func (c *podSecurityPolicyTemplateController) Watch(opts metav1.ListOptions) (watch.Interface, error) { return c.client.Watch(context.TODO(), "", opts) } func (c *podSecurityPolicyTemplateController) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (*v3.PodSecurityPolicyTemplate, error) { result := &v3.PodSecurityPolicyTemplate{} return result, c.client.Patch(context.TODO(), "", name, pt, data, result, metav1.PatchOptions{}, subresources...) } type podSecurityPolicyTemplateCache struct { indexer cache.Indexer resource schema.GroupResource } func (c *podSecurityPolicyTemplateCache) Get(name string) (*v3.PodSecurityPolicyTemplate, error) { obj, exists, err := c.indexer.GetByKey(name) if err != nil { return nil, err } if !exists { return nil, errors.NewNotFound(c.resource, name) } return obj.(*v3.PodSecurityPolicyTemplate), nil } func (c *podSecurityPolicyTemplateCache) List(selector labels.Selector) (ret []*v3.PodSecurityPolicyTemplate, err error) { err = cache.ListAll(c.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v3.PodSecurityPolicyTemplate)) }) return ret, err } func (c *podSecurityPolicyTemplateCache) AddIndexer(indexName string, indexer PodSecurityPolicyTemplateIndexer) { utilruntime.Must(c.indexer.AddIndexers(map[string]cache.IndexFunc{ indexName: func(obj interface{}) (strings []string, e error) { return indexer(obj.(*v3.PodSecurityPolicyTemplate)) }, })) } func (c *podSecurityPolicyTemplateCache) GetByIndex(indexName, key string) (result []*v3.PodSecurityPolicyTemplate, err error) { objs, err := c.indexer.ByIndex(indexName, key) if err != nil { return nil, err } result = make([]*v3.PodSecurityPolicyTemplate, 0, len(objs)) for _, obj := range objs { result = append(result, obj.(*v3.PodSecurityPolicyTemplate)) } return result, nil }
rancher/rancher
pkg/generated/controllers/management.cattle.io/v3/podsecuritypolicytemplate.go
GO
apache-2.0
9,408
package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Go extends StdCommand { public Go(){} private String[] access={"GO","WALK"}; public String[] getAccessWords(){return access;} public int energyExpenseFactor(){return 1;} public void ridersBehind(Vector riders, Room sourceRoom, Room destRoom, int directionCode, boolean flee) { if(riders!=null) for(int r=0;r<riders.size();r++) { Rider rider=(Rider)riders.elementAt(r); if(rider instanceof MOB) { MOB rMOB=(MOB)rider; if((rMOB.location()==sourceRoom) ||(rMOB.location()==destRoom)) { boolean fallOff=false; if(rMOB.location()==sourceRoom) { if(rMOB.riding()!=null) rMOB.tell("You ride "+rMOB.riding().name()+" "+Directions.getDirectionName(directionCode)+"."); if(!move(rMOB,directionCode,flee,false,true,false)) fallOff=true; } if(fallOff) { if(rMOB.riding()!=null) rMOB.tell("You fall off "+rMOB.riding().name()+"!"); rMOB.setRiding(null); } } else rMOB.setRiding(null); } else if(rider instanceof Item) { Item rItem=(Item)rider; if((rItem.owner()==sourceRoom) ||(rItem.owner()==destRoom)) destRoom.bringItemHere(rItem,-1,false); else rItem.setRiding(null); } } } public static Vector addRiders(Rider theRider, Rideable riding, Vector riders) { if((riding!=null)&&(riding.mobileRideBasis())) for(int r=0;r<riding.numRiders();r++) { Rider rider=riding.fetchRider(r); if((rider!=null) &&(rider!=theRider) &&(!riders.contains(rider))) { riders.addElement(rider); if(rider instanceof Rideable) addRiders(theRider,(Rideable)rider,riders); } } return riders; } public Vector ridersAhead(Rider theRider, Room sourceRoom, Room destRoom, int directionCode, boolean flee) { Vector riders=new Vector(); Rideable riding=theRider.riding(); Vector rideables=new Vector(); while((riding!=null)&&(riding.mobileRideBasis())) { rideables.addElement(riding); addRiders(theRider,riding,riders); if((riding instanceof Rider)&&((Rider)riding).riding()!=theRider.riding()) riding=((Rider)riding).riding(); else riding=null; } if(theRider instanceof Rideable) addRiders(theRider,(Rideable)theRider,riders); for(int r=riders.size()-1;r>=0;r--) { Rider R=(Rider)riders.elementAt(r); if((R instanceof Rideable)&&(((Rideable)R).numRiders()>0)) { if(!rideables.contains(R)) rideables.addElement(R); riders.removeElement(R); } } for(int r=0;r<rideables.size();r++) { riding=(Rideable)rideables.elementAt(r); if((riding instanceof Item) &&((sourceRoom).isContent((Item)riding))) destRoom.bringItemHere((Item)riding,-1,false); else if((riding instanceof MOB) &&((sourceRoom).isInhabitant((MOB)riding))) { ((MOB)riding).tell("You are ridden "+Directions.getDirectionName(directionCode)+"."); if(!move(((MOB)riding),directionCode,false,false,true,false)) { if(theRider instanceof MOB) ((MOB)theRider).tell(((MOB)riding).name()+" won't seem to let you go that way."); r=r-1; for(;r>=0;r--) { riding=(Rideable)rideables.elementAt(r); if((riding instanceof Item) &&((destRoom).isContent((Item)riding))) sourceRoom.bringItemHere((Item)riding,-1,false); else if((riding instanceof MOB) &&(((MOB)riding).isMonster()) &&((destRoom).isInhabitant((MOB)riding))) sourceRoom.bringMobHere((MOB)riding,false); } return null; } } } return riders; } public boolean move(MOB mob, int directionCode, boolean flee, boolean nolook, boolean noriders) { return move(mob,directionCode,flee,nolook,noriders,false); } public boolean move(MOB mob, int directionCode, boolean flee, boolean nolook, boolean noriders, boolean always) { if(directionCode<0) return false; if(mob==null) return false; Room thisRoom=mob.location(); if(thisRoom==null) return false; Room destRoom=thisRoom.getRoomInDir(directionCode); Exit exit=thisRoom.getExitInDir(directionCode); if(destRoom==null) { mob.tell("You can't go that way."); return false; } Exit opExit=thisRoom.getReverseExit(directionCode); String directionName=(directionCode==Directions.GATE)&&(exit!=null)?"through "+exit.name():Directions.getDirectionName(directionCode); String otherDirectionName=(Directions.getOpDirectionCode(directionCode)==Directions.GATE)&&(exit!=null)?exit.name():Directions.getFromDirectionName(Directions.getOpDirectionCode(directionCode)); int generalMask=always?CMMsg.MASK_ALWAYS:0; int leaveCode=generalMask|CMMsg.MSG_LEAVE; if(flee) leaveCode=generalMask|CMMsg.MSG_FLEE; CMMsg enterMsg=null; CMMsg leaveMsg=null; if((mob.riding()!=null)&&(mob.riding().mobileRideBasis())) { enterMsg=CMClass.getMsg(mob,destRoom,exit,generalMask|CMMsg.MSG_ENTER,null,CMMsg.MSG_ENTER,null,CMMsg.MSG_ENTER,"<S-NAME> ride(s) "+mob.riding().name()+" in from "+otherDirectionName+"."); leaveMsg=CMClass.getMsg(mob,thisRoom,opExit,leaveCode,((flee)?"You flee "+directionName+".":null),leaveCode,null,leaveCode,((flee)?"<S-NAME> flee(s) with "+mob.riding().name()+" "+directionName+".":"<S-NAME> ride(s) "+mob.riding().name()+" "+directionName+".")); } else { enterMsg=CMClass.getMsg(mob,destRoom,exit,generalMask|CMMsg.MSG_ENTER,null,CMMsg.MSG_ENTER,null,CMMsg.MSG_ENTER,"<S-NAME> "+CMLib.flags().dispositionString(mob,CMFlagLibrary.flag_arrives)+" from "+otherDirectionName+"."); leaveMsg=CMClass.getMsg(mob,thisRoom,opExit,leaveCode,((flee)?"You flee "+directionName+".":null),leaveCode,null,leaveCode,((flee)?"<S-NAME> flee(s) "+directionName+".":"<S-NAME> "+CMLib.flags().dispositionString(mob,CMFlagLibrary.flag_leaves)+" "+directionName+".")); } boolean gotoAllowed=CMSecurity.isAllowed(mob,destRoom,"GOTO"); if((exit==null)&&(!gotoAllowed)) { mob.tell("You can't go that way."); return false; } else if(exit==null) thisRoom.showHappens(CMMsg.MSG_OK_VISUAL,"The area to the "+directionName+" shimmers and becomes transparent."); else if((!exit.okMessage(mob,enterMsg))&&(!gotoAllowed)) return false; else if(!leaveMsg.target().okMessage(mob,leaveMsg)&&(!gotoAllowed)) return false; else if((opExit!=null)&&(!opExit.okMessage(mob,leaveMsg))&&(!gotoAllowed)) return false; else if(!enterMsg.target().okMessage(mob,enterMsg)&&(!gotoAllowed)) return false; else if(!mob.okMessage(mob,enterMsg)&&(!gotoAllowed)) return false; if(mob.riding()!=null) { if((!mob.riding().okMessage(mob,enterMsg))&&(!gotoAllowed)) return false; } else { if(!mob.isMonster()) for(int i=0;i<energyExpenseFactor();i++) mob.curState().expendEnergy(mob,mob.maxState(),true); if((!flee)&&(!mob.curState().adjMovement(-1,mob.maxState()))&&(!gotoAllowed)) { mob.tell("You are too tired."); return false; } if((mob.soulMate()==null)&&(mob.playerStats()!=null)&&(mob.riding()==null)&&(mob.location()!=null)) mob.playerStats().adjHygiene(mob.location().pointsPerMove(mob)); } Vector riders=null; if(!noriders) { riders=ridersAhead(mob,(Room)leaveMsg.target(),(Room)enterMsg.target(),directionCode,flee); if(riders==null) return false; } Vector enterTrailersSoFar=null; Vector leaveTrailersSoFar=null; if((leaveMsg.trailerMsgs()!=null)&&(leaveMsg.trailerMsgs().size()>0)) { leaveTrailersSoFar=new Vector(); leaveTrailersSoFar.addAll(leaveMsg.trailerMsgs()); leaveMsg.trailerMsgs().clear(); } if((enterMsg.trailerMsgs()!=null)&&(enterMsg.trailerMsgs().size()>0)) { enterTrailersSoFar=new Vector(); enterTrailersSoFar.addAll(enterMsg.trailerMsgs()); enterMsg.trailerMsgs().clear(); } if(exit!=null) exit.executeMsg(mob,enterMsg); if(mob.location()!=null) mob.location().delInhabitant(mob); ((Room)leaveMsg.target()).send(mob,leaveMsg); if(enterMsg.target()==null) { ((Room)leaveMsg.target()).bringMobHere(mob,false); mob.tell("You can't go that way."); return false; } mob.setLocation((Room)enterMsg.target()); ((Room)enterMsg.target()).addInhabitant(mob); ((Room)enterMsg.target()).send(mob,enterMsg); if(opExit!=null) opExit.executeMsg(mob,leaveMsg); if(!nolook) { CMLib.commands().postLook(mob,true); if((!mob.isMonster()) &&(CMath.bset(mob.getBitmap(),MOB.ATT_AUTOWEATHER)) &&(((Room)enterMsg.target())!=null) &&((thisRoom.domainType()&Room.INDOORS)>0) &&((((Room)enterMsg.target()).domainType()&Room.INDOORS)==0) &&(((Room)enterMsg.target()).getArea().getClimateObj().weatherType(((Room)enterMsg.target()))!=Climate.WEATHER_CLEAR) &&(((Room)enterMsg.target()).isInhabitant(mob))) mob.tell("\n\r"+((Room)enterMsg.target()).getArea().getClimateObj().weatherDescription(((Room)enterMsg.target()))); } if(!noriders) ridersBehind(riders,(Room)leaveMsg.target(),(Room)enterMsg.target(),directionCode,flee); if(!flee) for(int f=0;f<mob.numFollowers();f++) { MOB follower=mob.fetchFollower(f); if(follower!=null) { if((follower.amFollowing()==mob) &&((follower.location()==thisRoom)||(follower.location()==destRoom))) { if((follower.location()==thisRoom)&&(CMLib.flags().aliveAwakeMobile(follower,true))) { if(CMath.bset(follower.getBitmap(),MOB.ATT_AUTOGUARD)) thisRoom.show(follower,null,null,CMMsg.MSG_OK_ACTION,"<S-NAME> remain(s) on guard here."); else { follower.tell("You follow "+mob.name()+" "+Directions.getDirectionName(directionCode)+"."); if(!move(follower,directionCode,false,false,false,false)) { //follower.setFollowing(null); } } } } //else // follower.setFollowing(null); } } if((leaveTrailersSoFar!=null)&&(leaveMsg.target() instanceof Room)) for(int t=0;t<leaveTrailersSoFar.size();t++) ((Room)leaveMsg.target()).send(mob,(CMMsg)leaveTrailersSoFar.elementAt(t)); if((enterTrailersSoFar!=null)&&(enterMsg.target() instanceof Room)) for(int t=0;t<enterTrailersSoFar.size();t++) ((Room)enterMsg.target()).send(mob,(CMMsg)enterTrailersSoFar.elementAt(t)); return true; } protected Command stander=null; protected Vector ifneccvec=null; public void standIfNecessary(MOB mob, int metaFlags) throws java.io.IOException { if((ifneccvec==null)||(ifneccvec.size()!=2)) { ifneccvec=new Vector(); ifneccvec.addElement("STAND"); ifneccvec.addElement("IFNECESSARY"); } if(stander==null) stander=CMClass.getCommand("Stand"); if((stander!=null)&&(ifneccvec!=null)) stander.execute(mob,ifneccvec,metaFlags); } public boolean execute(MOB mob, Vector commands, int metaFlags) throws java.io.IOException { standIfNecessary(mob,metaFlags); if((commands.size()>3) &&(commands.firstElement() instanceof Integer)) { return move(mob, ((Integer)commands.elementAt(0)).intValue(), ((Boolean)commands.elementAt(1)).booleanValue(), ((Boolean)commands.elementAt(2)).booleanValue(), ((Boolean)commands.elementAt(3)).booleanValue(),false); } String whereStr=CMParms.combine(commands,1); Room R=mob.location(); int direction=-1; if(whereStr.equalsIgnoreCase("OUT")) { if(!CMath.bset(R.domainType(),Room.INDOORS)) { mob.tell("You aren't indoors."); return false; } for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--) if((R.getExitInDir(d)!=null) &&(R.getRoomInDir(d)!=null) &&(!CMath.bset(R.getRoomInDir(d).domainType(),Room.INDOORS))) { if(direction>=0) { mob.tell("Which way out? Try North, South, East, etc.."); return false; } direction=d; } if(direction<0) { mob.tell("There is no direct way out of this place. Try a direction."); return false; } } if(direction<0) direction=Directions.getGoodDirectionCode(whereStr); if(direction<0) { Environmental E=null; if(R!=null) E=R.fetchFromRoomFavorItems(null,whereStr,Item.WORNREQ_UNWORNONLY); if(E instanceof Rideable) { Command C=CMClass.getCommand("Enter"); return C.execute(mob,commands,metaFlags); } if((E instanceof Exit)&&(R!=null)) { for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--) if(R.getExitInDir(d)==E) { direction=d; break;} } } String doing=(String)commands.elementAt(0); if(direction>=0) move(mob,direction,false,false,false,false); else { boolean doneAnything=false; if(commands.size()>2) for(int v=1;v<commands.size();v++) { int num=1; String s=(String)commands.elementAt(v); if(CMath.s_int(s)>0) { num=CMath.s_int(s); v++; if(v<commands.size()) s=(String)commands.elementAt(v); } else if(("NSEWUDnsewud".indexOf(s.charAt(s.length()-1))>=0) &&(CMath.s_int(s.substring(0,s.length()-1))>0)) { num=CMath.s_int(s.substring(0,s.length()-1)); s=s.substring(s.length()-1); } direction=Directions.getGoodDirectionCode(s); if(direction>=0) { doneAnything=true; for(int i=0;i<num;i++) { if(mob.isMonster()) { if(!move(mob,direction,false,false,false,false)) return false; } else { Vector V=new Vector(); V.addElement(doing); V.addElement(Directions.getDirectionName(direction)); mob.enqueCommand(V,metaFlags,0); } } } else break; } if(!doneAnything) mob.tell(CMStrings.capitalizeAndLower(doing)+" which direction?\n\rTry north, south, east, west, up, or down."); } return false; } public double actionsCost(MOB mob, Vector cmds){ double cost=CMath.div(CMProps.getIntVar(CMProps.SYSTEMI_DEFCMDTIME),100.0); if((mob!=null)&&(CMath.bset(mob.getBitmap(),MOB.ATT_AUTORUN))) cost /= 4.0; return cost; } public boolean canBeOrdered(){return true;} }
robjcaskey/Unofficial-Coffee-Mud-Upstream
com/planet_ink/coffee_mud/Commands/Go.java
Java
apache-2.0
16,256
#!/usr/bin/python2.7 from __future__ import print_function # -*- coding: utf-8 -*- import wx import threading import lcm import random import Forseti import configurator BLUE = (24, 25, 141) GOLD = (241, 169, 50) class TeamPanel(wx.Panel): def __init__(self, remote, letter, number, name, colour, *args, **kwargs): super(TeamPanel, self).__init__(*args, **kwargs) self.remote = remote self.InitUI(letter, number, name, colour) def InitUI(self, letter, number, name, colour=None): if colour is not None: self.SetBackgroundColour(colour) dc = wx.ScreenDC() self.num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) self.num_ctrl.AppendText(str(number)) self.get_button = wx.Button(self, label='Get', size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) self.get_button.Bind(wx.EVT_BUTTON, self.do_get_name) self.name_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 16, dc.GetCharHeight())) self.name_ctrl.AppendText(name) name_num_box = wx.BoxSizer(wx.HORIZONTAL) name_num_box.Add(wx.StaticText(self, label=letter, size=(dc.GetCharWidth() * 0.6, dc.GetCharHeight()))) name_num_box.Add(self.num_ctrl) name_num_box.Add(self.get_button) name_num_box.Add(self.name_ctrl) #button_box = wx.BoxSizer(wx.HORIZONTAL) #button_box.Add(wx.Button(self, label='Reset')) #button_box.Add(wx.Button(self, label='Configure')) #button_box.Add(wx.Button(self, label='Disable')) self.vbox = wx.BoxSizer(wx.VERTICAL) self.vbox.Add(name_num_box, flag=wx.CENTER) #vbox.Add(button_box, flag=wx.CENTER) self.SetSizer(self.vbox) self.Show(True) def do_get_name(self, event): self.name = configurator.get_team_name(self.number) @property def name(self): return self.name_ctrl.GetValue() @name.setter def name(self, val): self.name_ctrl.SetValue(val) @property def number(self): try: return int(self.num_ctrl.GetValue()) except ValueError: return 0 @number.setter def number(self, val): self.num_ctrl.SetValue(str(val)) class MatchControl(wx.Panel): def __init__(self, remote, *args, **kwargs): super(MatchControl, self).__init__(*args, **kwargs) self.remote = remote self.InitUI() def InitUI(self): vbox = wx.BoxSizer(wx.VERTICAL) dc = wx.ScreenDC() match_number = wx.BoxSizer(wx.HORIZONTAL) match_number.Add(wx.StaticText(self, label='Match #'.format(1))) self.match_num_ctrl = wx.TextCtrl(self, size=(dc.GetCharWidth() * 2, dc.GetCharHeight())) match_number.Add(self.match_num_ctrl) vbox.Add(match_number, flag=wx.CENTER) teamSizer = wx.GridSizer(3, 2) self.team_panels = [ TeamPanel(self.remote, 'A', 0, 'Unknown Team', BLUE, self), TeamPanel(self.remote, 'C', 0, 'Unknown Team', GOLD, self), TeamPanel(self.remote, 'B', 0, 'Unknown Team', BLUE, self), TeamPanel(self.remote, 'D', 0, 'Unknown Team', GOLD, self), ] teamSizer.AddMany( [wx.StaticText(self, label='Blue Team'), wx.StaticText(self, label='Gold Team')] + [(panel, 0) for panel in self.team_panels]) vbox.Add(teamSizer, flag=wx.CENTER) buttons = wx.BoxSizer(wx.HORIZONTAL) self.init_button = wx.Button(self, label='Init') self.init_button.Bind(wx.EVT_BUTTON, self.do_init) self.go_button = wx.Button(self, label='GO!') self.go_button.Bind(wx.EVT_BUTTON, self.do_go) self.pause_button = wx.Button(self, label='Pause') self.pause_button.Bind(wx.EVT_BUTTON, self.do_pause) #self.save_button = wx.Button(self, label='Save') #self.save_button.Bind(wx.EVT_BUTTON, self.do_save) self.time_text = wx.StaticText(self, label='0:00') self.stage_text = wx.StaticText(self, label='Unknown') self.remote.time_text = self.time_text #buttons.Add(self.save_button, flag=wx.LEFT) buttons.Add(self.init_button) buttons.Add(self.go_button) buttons.Add(self.pause_button) buttons.Add(self.time_text) buttons.Add(self.stage_text) vbox.Add(buttons, flag=wx.CENTER) self.SetSizer(vbox) self.Show(True) def do_go(self, e): self.remote.do_go() def do_pause(self, e): self.remote.do_pause() def do_save(self, e): self.remote.do_save(self.get_match()) def do_init(self, e): self.remote.do_init(self.get_match()) def _set_match_panel(self, match, team_idx, panel_idx): match.team_numbers[team_idx] = self.team_panels[panel_idx].number match.team_names[team_idx] = self.team_panels[panel_idx].name def _set_panel_match(self, match, team_idx, panel_idx): self.team_panels[panel_idx].number = match.team_numbers[team_idx] self.team_panels[panel_idx].name = match.team_names[team_idx] def get_match(self): match = Forseti.Match() self._set_match_panel(match, 0, 0) self._set_match_panel(match, 1, 2) self._set_match_panel(match, 2, 1) self._set_match_panel(match, 3, 3) try: match.match_number = int(self.match_num_ctrl.GetValue()) except ValueError: match.match_number = random.getrandbits(31) return match def set_match(self, match): self._set_panel_match(match, 0, 0) self._set_panel_match(match, 1, 2) self._set_panel_match(match, 2, 1) self._set_panel_match(match, 3, 3) self.match_num_ctrl.SetValue(str(match.match_number)) def set_time(self, match): self.time_text.SetLabel(format_time(match.game_time_so_far)) self.stage_text.SetLabel(match.stage_name) class ScheduleControl(wx.Panel): def __init__(self, remote, match_control, *args, **kwargs): self.remote = remote super(ScheduleControl, self).__init__(*args, **kwargs) self.InitUI() self.remote.match_list_box = self.match_list self.match_control = match_control def InitUI(self): self.match_list = wx.ListBox(self) self.match_list.Bind(wx.EVT_LISTBOX, self.choose_match) hbox = wx.BoxSizer(wx.HORIZONTAL) self.load_button = wx.Button(self, label='Load All') self.load_button.Bind(wx.EVT_BUTTON, self.do_load) hbox.Add(self.load_button) self.clear_first = wx.CheckBox(self, label='Clear first') self.clear_first.SetValue(True) hbox.Add(self.clear_first) vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(self.match_list, 1, wx.EXPAND) vbox.Add(hbox) self.SetSizer(vbox) self.Show(True) def do_load(self, e): self.remote.do_load(self.clear_first.GetValue()) def choose_match(self, event): self.match_control.set_match(event.GetClientData()) class MainWindow(wx.Frame): def __init__(self, remote, *args, **kwargs): super(MainWindow, self).__init__(*args, **kwargs) self.remote = remote self.InitUI() def InitUI(self): menubar = wx.MenuBar() fileMenu = wx.Menu() fitem = fileMenu.Append(wx.ID_EXIT, 'Quit', 'Quit application') menubar.Append(fileMenu, '&File') self.SetMenuBar(menubar) match_control = MatchControl(self.remote, self) schedule_control = ScheduleControl(self.remote, match_control, self) self.remote.match_control = match_control vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add(match_control, 0, wx.ALIGN_CENTER | wx.ALIGN_TOP, 8) vbox.Add(schedule_control, 1, wx.EXPAND | wx.ALIGN_CENTER | wx.ALL, 8) self.Bind(wx.EVT_MENU, self.OnQuit, fitem) self.SetSize((800, 600)) self.SetSizer(vbox) self.SetTitle('Forseti Dashboard') self.Centre() self.Show(True) def OnQuit(self, e): self.Close() def format_match(match): print(match.match_number) print(match.team_names) print(match.team_numbers) return '{}: {} ({}) & {} ({}) vs. {} ({}) & {} ({})'.format( match.match_number, match.team_names[0], match.team_numbers[0], match.team_names[1], match.team_numbers[1], match.team_names[2], match.team_numbers[2], match.team_names[3], match.team_numbers[3], ) class Remote(object): def __init__(self): self.lc = lcm.LCM('udpm://239.255.76.67:7667?ttl=1') self.lc.subscribe('Schedule/Schedule', self.handle_schedule) self.lc.subscribe('Timer/Time', self.handle_time) self.match_list_box = None self.match_control = None self.thread = threading.Thread(target=self._loop) self.thread.daemon = True def start(self): self.thread.start() def _loop(self): while True: try: self.lc.handle() except Exception as ex: print('Got exception while handling lcm message', ex) def handle_schedule(self, channel, data): msg = Forseti.Schedule.decode(data) for i in range(msg.num_matches): self.match_list_box.Insert(format_match(msg.matches[i]), i, msg.matches[i]) def handle_time(self, channel, data): msg = Forseti.Time.decode(data) #wx.CallAfter(self.time_text.SetLabel, format_time(msg.game_time_so_far)) wx.CallAfter(self.match_control.set_time, msg) def do_load(self, clear_first): if clear_first: self.match_list_box.Clear() msg = Forseti.ScheduleLoadCommand() msg.clear_first = clear_first print('Requesting load') self.lc.publish('Schedule/Load', msg.encode()) def do_save(self, match): self.lc.publish('Match/Save', match.encode()) def do_init(self, match): self.lc.publish('Match/Init', match.encode()) def do_time_ctrl(self, command): msg = Forseti.TimeControl() msg.command_name = command self.lc.publish('Timer/Control', msg.encode()) def do_go(self): self.do_time_ctrl('start') def do_pause(self): self.do_time_ctrl('pause') def format_time(seconds): return '{}:{:02}'.format(seconds // 60, seconds % 60) def main(): app = wx.App() remote = Remote() MainWindow(remote, None) remote.start() remote.do_load(False) app.MainLoop() if __name__ == '__main__': main()
pioneers/forseti
wxdash.py
Python
apache-2.0
10,763
/* * Copyright 2013-2020 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.plugins; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.ui.ClickListener; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import consulo.awt.TargetAWT; import consulo.localize.LocalizeValue; import javax.annotation.Nonnull; import javax.swing.*; import java.awt.event.MouseEvent; import java.util.function.Function; /** * @author VISTALL * @since 03/12/2020 */ public class LabelPopup extends JLabel { private final LocalizeValue myPrefix; public LabelPopup(LocalizeValue prefix, Function<LabelPopup, ? extends ActionGroup> groupBuilder) { myPrefix = prefix; setForeground(UIUtil.getLabelDisabledForeground()); setBorder(JBUI.Borders.empty(1, 1, 1, 5)); setIcon(TargetAWT.to(AllIcons.General.ComboArrow)); setHorizontalTextPosition(SwingConstants.LEADING); new ClickListener() { @Override public boolean onClick(@Nonnull MouseEvent event, int clickCount) { LabelPopup component = LabelPopup.this; JBPopupFactory.getInstance() .createActionGroupPopup(myPrefix.get(), groupBuilder.apply(component), DataManager.getInstance().getDataContext(component), JBPopupFactory.ActionSelectionAid.SPEEDSEARCH, true) .showUnderneathOf(component); return true; } }.installOn(this); } public void setPrefixedText(LocalizeValue tagValue) { setText(LocalizeValue.join(myPrefix, LocalizeValue.space(), tagValue).get()); } }
consulo/consulo
modules/base/platform-impl/src/main/java/com/intellij/ide/plugins/LabelPopup.java
Java
apache-2.0
2,225
package yaputil import ( "io/ioutil" "net" "regexp" ) var ( nsRegex = regexp.MustCompile(`(?m)^nameserver\s+([0-9a-fA-F\.:]+)`) ) func LookupIP(host string) (ips []net.IP, err error) { return net.LookupIP(host) } func GetLocalNameServers() ([]string, error) { b, err := ioutil.ReadFile("/etc/resolv.conf") if err != nil { return nil, err } nameservers := make([]string, 0, 4) for _, m := range nsRegex.FindAllStringSubmatch(string(b), -1) { nameservers = append(nameservers, m[1]) } return nameservers, nil }
yaproxy/yap
yaputil/lookup.go
GO
apache-2.0
529
// Copyright (C) 2014 Space Monkey, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build cgo package openssl /* #include <openssl/crypto.h> #include <openssl/ssl.h> #include <openssl/err.h> #include <openssl/conf.h> static long SSL_set_options_not_a_macro(SSL* ssl, long options) { return SSL_set_options(ssl, options); } static long SSL_get_options_not_a_macro(SSL* ssl) { return SSL_get_options(ssl); } static long SSL_clear_options_not_a_macro(SSL* ssl, long options) { return SSL_clear_options(ssl, options); } extern int verify_ssl_cb(int ok, X509_STORE_CTX* store); */ import "C" import ( "os" "unsafe" ) type SSLTLSExtErr int const ( SSLTLSExtErrOK SSLTLSExtErr = C.SSL_TLSEXT_ERR_OK SSLTLSExtErrAlertWarning SSLTLSExtErr = C.SSL_TLSEXT_ERR_ALERT_WARNING SSLTLSEXTErrAlertFatal SSLTLSExtErr = C.SSL_TLSEXT_ERR_ALERT_FATAL SSLTLSEXTErrNoAck SSLTLSExtErr = C.SSL_TLSEXT_ERR_NOACK ) var ( ssl_idx = C.SSL_get_ex_new_index(0, nil, nil, nil, nil) ) //export get_ssl_idx func get_ssl_idx() C.int { return ssl_idx } type SSL struct { ssl *C.SSL verify_cb VerifyCallback } //export verify_ssl_cb_thunk func verify_ssl_cb_thunk(p unsafe.Pointer, ok C.int, ctx *C.X509_STORE_CTX) C.int { defer func() { if err := recover(); err != nil { logger.Critf("openssl: verify callback panic'd: %v", err) os.Exit(1) } }() verify_cb := (*SSL)(p).verify_cb // set up defaults just in case verify_cb is nil if verify_cb != nil { store := &CertificateStoreCtx{ctx: ctx} if verify_cb(ok == 1, store) { ok = 1 } else { ok = 0 } } return ok } // Wrapper around SSL_get_servername. Returns server name according to rfc6066 // http://tools.ietf.org/html/rfc6066. func (s *SSL) GetServername() string { return C.GoString(C.SSL_get_servername(s.ssl, C.TLSEXT_NAMETYPE_host_name)) } // GetOptions returns SSL options. See // https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html func (s *SSL) GetOptions() Options { return Options(C.SSL_get_options_not_a_macro(s.ssl)) } // SetOptions sets SSL options. See // https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html func (s *SSL) SetOptions(options Options) Options { return Options(C.SSL_set_options_not_a_macro(s.ssl, C.long(options))) } // ClearOptions clear SSL options. See // https://www.openssl.org/docs/ssl/SSL_CTX_set_options.html func (s *SSL) ClearOptions(options Options) Options { return Options(C.SSL_clear_options_not_a_macro(s.ssl, C.long(options))) } // SetVerify controls peer verification settings. See // http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) SetVerify(options VerifyOptions, verify_cb VerifyCallback) { s.verify_cb = verify_cb if verify_cb != nil { C.SSL_set_verify(s.ssl, C.int(options), (*[0]byte)(C.verify_ssl_cb)) } else { C.SSL_set_verify(s.ssl, C.int(options), nil) } } // SetVerifyMode controls peer verification setting. See // http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) SetVerifyMode(options VerifyOptions) { s.SetVerify(options, s.verify_cb) } // SetVerifyCallback controls peer verification setting. See // http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) SetVerifyCallback(verify_cb VerifyCallback) { s.SetVerify(s.VerifyMode(), verify_cb) } // GetVerifyCallback returns callback function. See // http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) GetVerifyCallback() VerifyCallback { return s.verify_cb } // VerifyMode returns peer verification setting. See // http://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) VerifyMode() VerifyOptions { return VerifyOptions(C.SSL_get_verify_mode(s.ssl)) } // SetVerifyDepth controls how many certificates deep the certificate // verification logic is willing to follow a certificate chain. See // https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) SetVerifyDepth(depth int) { C.SSL_set_verify_depth(s.ssl, C.int(depth)) } // GetVerifyDepth controls how many certificates deep the certificate // verification logic is willing to follow a certificate chain. See // https://www.openssl.org/docs/ssl/SSL_CTX_set_verify.html func (s *SSL) GetVerifyDepth() int { return int(C.SSL_get_verify_depth(s.ssl)) } // SetSSLCtx changes context to new one. Useful for Server Name Indication (SNI) // rfc6066 http://tools.ietf.org/html/rfc6066. See // http://stackoverflow.com/questions/22373332/serving-multiple-domains-in-one-box-with-sni func (s *SSL) SetSSLCtx(ctx *Ctx) { /* * SSL_set_SSL_CTX() only changes certs as of 1.0.0d * adjust other things we care about */ C.SSL_set_SSL_CTX(s.ssl, ctx.ctx) } //export sni_cb_thunk func sni_cb_thunk(p unsafe.Pointer, con *C.SSL, ad unsafe.Pointer, arg unsafe.Pointer) C.int { defer func() { if err := recover(); err != nil { logger.Critf("openssl: verify callback sni panic'd: %v", err) os.Exit(1) } }() sni_cb := (*Ctx)(p).sni_cb s := &SSL{ssl: con} // This attaches a pointer to our SSL struct into the SNI callback. C.SSL_set_ex_data(s.ssl, get_ssl_idx(), unsafe.Pointer(s)) // Note: this is ctx.sni_cb, not C.sni_cb return C.int(sni_cb(s)) }
xakep666/openssl
ssl.go
GO
apache-2.0
5,686
package org.support.project.knowledge.vo.notification.webhook; public class WebhookLongIdJson { public long id; }
support-project/knowledge
src/main/java/org/support/project/knowledge/vo/notification/webhook/WebhookLongIdJson.java
Java
apache-2.0
119
/** * @license Apache-2.0 * * Copyright (c) 2018 The Stdlib Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; // MODULES // var tape = require( 'tape' ); var Readable = require( 'readable-stream' ).Readable; var now = require( '@stdlib/time/now' ); var arcsine = require( '@stdlib/random/base/arcsine' ).factory; var isBuffer = require( '@stdlib/assert/is-buffer' ); var isnan = require( '@stdlib/math/base/assert/is-nan' ); var isUint32Array = require( '@stdlib/assert/is-uint32array' ); var UINT32_MAX = require( '@stdlib/constants/uint32/max' ); var Uint32Array = require( '@stdlib/array/uint32' ); var minstd = require( '@stdlib/random/base/minstd' ); var inspectStream = require( '@stdlib/streams/node/inspect-sink' ); var randomStream = require( './../lib/main.js' ); // TESTS // tape( 'main export is a function', function test( t ) { t.ok( true, __filename ); t.equal( typeof randomStream, 'function', 'main export is a function' ); t.end(); }); tape( 'the function throws an error if minimum support `a` is not a number primitive', function test( t ) { var values; var i; values = [ '5', null, true, false, void 0, NaN, [], {}, function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( value, 2.0 ); }; } }); tape( 'the function throws an error if maximum support `b` is not a number primitive', function test( t ) { var values; var i; values = [ '5', null, true, false, void 0, NaN, [], {}, function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, value ); }; } }); tape( 'the function throws an error if minimum support `a` is greater than or equal to maximum support `b`', function test( t ) { var values; var i; values = [ [ 0.0, 0.0 ], [ -2.0, -4.0 ], [ 2.0, 1.0 ] ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), RangeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( arr ) { return function badValue() { randomStream( arr[0], arr[1] ); }; } }); tape( 'the function throws an error if provided an options argument which is not an object', function test( t ) { var values; var i; values = [ 'abc', 5, null, true, false, void 0, NaN, [], function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, value ); }; } }); tape( 'the function throws an error if provided an invalid `iter` option', function test( t ) { var values; var i; values = [ 'abc', -5, 3.14, null, true, false, void 0, NaN, [], {}, function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'iter': value }); }; } }); tape( 'if provided a `prng` option which is not a function, the function throws an error', function test( t ) { var values; var i; values = [ '5', 3.14, NaN, true, false, null, void 0, [], {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'prng': value }); }; } }); tape( 'if provided a `copy` option which is not a boolean, the function throws an error', function test( t ) { var values; var i; values = [ '5', 5, NaN, null, void 0, {}, [], function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'copy': value }); }; } }); tape( 'if provided a `seed` which is not a positive integer or a non-empty array-like object, the function throws an error', function test( t ) { var values; var i; values = [ '5', 3.14, 0.0, -5.0, NaN, true, false, null, void 0, {}, [], function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'seed': value }); }; } }); tape( 'the function throws a range error if provided a `seed` which is an integer greater than the maximum unsigned 32-bit integer', function test( t ) { var values; var i; values = [ UINT32_MAX + 1, UINT32_MAX + 2, UINT32_MAX + 3 ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), RangeError, 'throws a range error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'seed': value }); }; } }); tape( 'if provided a `state` option which is not a Uint32Array, the function throws an error', function test( t ) { var values; var i; values = [ '5', 5, NaN, true, false, null, void 0, {}, [], function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'state': value }); }; } }); tape( 'if provided an invalid `state` option, the function throws an error', function test( t ) { var values; var i; values = [ new Uint32Array( 0 ), new Uint32Array( 10 ), new Uint32Array( 100 ) ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), RangeError, 'throws an error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'state': value }); }; } }); tape( 'if provided an invalid readable stream option, the function throws an error', function test( t ) { var values; var i; values = [ '5', 5, NaN, null, void 0, {}, [], function noop() {} ]; for ( i = 0; i < values.length; i++ ) { t.throws( badValue( values[i] ), TypeError, 'throws a type error when provided '+values[i] ); } t.end(); function badValue( value ) { return function badValue() { randomStream( 2.0, 5.0, { 'objectMode': value }); }; } }); tape( 'the function is a constructor which returns a readable stream', function test( t ) { var RandomStream = randomStream; var s; s = new RandomStream( 2.0, 5.0 ); t.equal( s instanceof Readable, true, 'returns expected value' ); t.end(); }); tape( 'the constructor does not require the `new` operator', function test( t ) { var RandomStream = randomStream; var s; s = randomStream( 2.0, 5.0 ); t.equal( s instanceof RandomStream, true, 'returns expected value' ); t.end(); }); tape( 'the constructor returns a readable stream (no new)', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( s instanceof Readable, true, 'returns expected value' ); t.end(); }); tape( 'the returned stream provides a method to destroy a stream (object)', function test( t ) { var count = 0; var s; s = randomStream( 2.0, 5.0 ); t.equal( typeof s.destroy, 'function', 'has destroy method' ); s.on( 'error', onError ); s.on( 'close', onClose ); s.destroy({ 'message': 'beep' }); function onError( err ) { count += 1; if ( err ) { t.ok( true, err.message ); } else { t.ok( false, 'does not error' ); } if ( count === 2 ) { t.end(); } } function onClose() { count += 1; t.ok( true, 'stream closes' ); if ( count === 2 ) { t.end(); } } }); tape( 'the returned stream provides a method to destroy a stream (error object)', function test( t ) { var count = 0; var s; s = randomStream( 2.0, 5.0 ); t.equal( typeof s.destroy, 'function', 'has destroy method' ); s.on( 'error', onError ); s.on( 'close', onClose ); s.destroy( new Error( 'beep' ) ); function onError( err ) { count += 1; if ( err ) { t.ok( true, err.message ); } else { t.ok( false, 'does not error' ); } if ( count === 2 ) { t.end(); } } function onClose() { count += 1; t.ok( true, 'stream closes' ); if ( count === 2 ) { t.end(); } } }); tape( 'the returned stream does not allow itself to be destroyed more than once', function test( t ) { var s; s = randomStream( 2.0, 5.0 ); s.on( 'error', onError ); s.on( 'close', onClose ); // If the stream is closed twice, the test will error... s.destroy(); s.destroy(); function onClose() { t.ok( true, 'stream closes' ); t.end(); } function onError( err ) { t.ok( false, err.message ); } }); tape( 'attached to the returned stream is the underlying PRNG', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( typeof s.PRNG, 'function', 'has property' ); s = randomStream( 2.0, 5.0, { 'prng': minstd.normalized }); t.equal( s.PRNG, minstd.normalized, 'has property' ); t.end(); }); tape( 'attached to the returned stream is the generator seed', function test( t ) { var s = randomStream( 2.0, 5.0, { 'seed': 12345 }); t.equal( isUint32Array( s.seed ), true, 'has property' ); t.equal( s.seed[ 0 ], 12345, 'equal to provided seed' ); s = randomStream( 2.0, 5.0, { 'seed': 12345, 'prng': minstd.normalized }); t.equal( s.seed, null, 'equal to `null`' ); t.end(); }); tape( 'attached to the returned stream is the generator seed (array seed)', function test( t ) { var actual; var seed; var s; var i; seed = [ 1234, 5678 ]; s = randomStream( 2.0, 5.0, { 'seed': seed }); actual = s.seed; t.equal( isUint32Array( actual ), true, 'has property' ); for ( i = 0; i < seed.length; i++ ) { t.equal( actual[ i ], seed[ i ], 'returns expected value for word '+i ); } t.end(); }); tape( 'attached to the returned stream is the generator seed length', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( typeof s.seedLength, 'number', 'has property' ); s = randomStream( 2.0, 5.0, { 'prng': minstd.normalized }); t.equal( s.seedLength, null, 'equal to `null`' ); t.end(); }); tape( 'attached to the returned stream is the generator state', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( isUint32Array( s.state ), true, 'has property' ); s = randomStream( 2.0, 5.0, { 'prng': minstd.normalized }); t.equal( s.state, null, 'equal to `null`' ); t.end(); }); tape( 'attached to the returned stream is the generator state length', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( typeof s.stateLength, 'number', 'has property' ); s = randomStream( 2.0, 5.0, { 'prng': minstd.normalized }); t.equal( s.stateLength, null, 'equal to `null`' ); t.end(); }); tape( 'attached to the returned stream is the generator state size', function test( t ) { var s = randomStream( 2.0, 5.0 ); t.equal( typeof s.byteLength, 'number', 'has property' ); s = randomStream( 2.0, 5.0, { 'prng': minstd.normalized }); t.equal( s.byteLength, null, 'equal to `null`' ); t.end(); }); tape( 'the constructor returns a stream for generating pseudorandom numbers from an arcsine distribution', function test( t ) { var iStream; var result; var rand; var opts; var s; // Note: we assume that the underlying generator is the following PRNG... rand = arcsine( 2.0, 5.0, { 'seed': 12345 }); opts = { 'seed': 12345, 'iter': 10, 'sep': '\n' }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect ); result = ''; s.pipe( iStream ); function inspect( chunk ) { t.equal( isBuffer( chunk ), true, 'returns a buffer' ); result += chunk.toString(); } function onEnd() { var i; t.pass( 'stream ended' ); result = result.split( '\n' ); t.equal( result.length, 10, 'has expected length' ); for ( i = 0; i < result.length; i++ ) { t.equal( parseFloat( result[ i ] ), rand(), 'returns expected value. i: ' + i + '.' ); } t.end(); } }); tape( 'the constructor returns a stream for generating pseudorandom numbers from an arcsine distribution (object mode)', function test( t ) { var iStream; var count; var rand; var opts; var s; // Note: we assume that the underlying generator is the following PRNG... rand = arcsine( 2.0, 5.0, { 'seed': 12345 }); opts = { 'seed': 12345, 'objectMode': true }; s = randomStream( 2.0, 5.0, opts ); s.on( 'close', onClose ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect ); count = 0; s.pipe( iStream ); function inspect( v ) { count += 1; t.equal( rand(), v, 'returns expected value. i: '+count+'.' ); if ( count >= 10 ) { s.destroy(); } } function onClose() { t.pass( 'stream closed' ); t.end(); } }); tape( 'the constructor supports limiting the number of iterations', function test( t ) { var iStream; var count; var niter; var opts; var s; niter = 10; count = 0; opts = { 'iter': niter, 'objectMode': true }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect ); s.pipe( iStream ); function inspect( v ) { count += 1; t.equal( typeof v, 'number', 'returns expected value' ); } function onEnd() { t.equal( count === niter, true, 'performs expected number of iterations' ); t.end(); } }); tape( 'by default, the constructor generates newline-delimited pseudorandom numbers', function test( t ) { var iStream; var result; var opts; var s; opts = { 'iter': 10 }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); iStream = inspectStream( inspect ); result = ''; s.pipe( iStream ); function inspect( chunk ) { result += chunk.toString(); } function onEnd() { var v; var i; result = result.split( '\n' ); t.equal( result.length, opts.iter, 'has expected length' ); for ( i = 0; i < result.length; i++ ) { v = parseFloat( result[ i ] ); t.equal( typeof v, 'number', 'returns expected value' ); t.equal( isnan( v ), false, 'is not NaN' ); } t.end(); } }); tape( 'the constructor supports providing a custom separator for streamed values', function test( t ) { var iStream; var result; var opts; var s; opts = { 'iter': 10, 'sep': '--++--' }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); iStream = inspectStream( inspect ); result = ''; s.pipe( iStream ); function inspect( chunk ) { result += chunk.toString(); } function onEnd() { var v; var i; result = result.split( opts.sep ); t.equal( result.length, opts.iter, 'has expected length' ); for ( i = 0; i < result.length; i++ ) { v = parseFloat( result[ i ] ); t.equal( typeof v, 'number', 'returns expected value' ); t.equal( isnan( v ), false, 'is not NaN' ); } t.end(); } }); tape( 'the constructor supports returning a seeded readable stream', function test( t ) { var iStream; var opts; var seed; var arr; var s1; var s2; var i; seed = now(); opts = { 'objectMode': true, 'seed': seed, 'iter': 10 }; s1 = randomStream( 2.0, 5.0, opts ); s1.on( 'end', onEnd1 ); s2 = randomStream( 2.0, 5.0, opts ); s2.on( 'end', onEnd2 ); t.notEqual( s1, s2, 'separate streams' ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect1 ); arr = []; i = 0; s1.pipe( iStream ); function inspect1( v ) { arr.push( v ); } function onEnd1() { var iStream; var opts; opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect2 ); s2.pipe( iStream ); } function inspect2( v ) { t.equal( v, arr[ i ], 'returns expected value' ); i += 1; } function onEnd2() { t.end(); } }); tape( 'the constructor supports specifying the underlying PRNG', function test( t ) { var iStream; var opts; var s; opts = { 'prng': minstd.normalized, 'objectMode': true, 'iter': 10 }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect ); s.pipe( iStream ); function inspect( v ) { t.equal( typeof v, 'number', 'returns a number' ); } function onEnd() { t.end(); } }); tape( 'the constructor supports providing a seeded underlying PRNG', function test( t ) { var iStream1; var iStream2; var randu; var seed; var opts; var FLG; var s1; var s2; var r1; var r2; seed = now(); randu = minstd.factory({ 'seed': seed }); opts = { 'prng': randu.normalized, 'objectMode': true, 'iter': 10 }; s1 = randomStream( 2.0, 5.0, opts ); s1.on( 'end', onEnd ); randu = minstd.factory({ 'seed': seed }); opts = { 'prng': randu.normalized, 'objectMode': true, 'iter': 10 }; s2 = randomStream( 2.0, 5.0, opts ); s2.on( 'end', onEnd ); t.notEqual( s1, s2, 'separate streams' ); opts = { 'objectMode': true }; iStream1 = inspectStream( opts, inspect1 ); iStream2 = inspectStream( opts, inspect2 ); r1 = []; r2 = []; s1.pipe( iStream1 ); s2.pipe( iStream2 ); function inspect1( v ) { r1.push( v ); } function inspect2( v ) { r2.push( v ); } function onEnd() { if ( FLG ) { t.deepEqual( r1, r2, 'streams expected values' ); return t.end(); } FLG = true; } }); tape( 'the constructor supports specifying the underlying generator state', function test( t ) { var iStream; var state; var count; var opts; var arr; var s; opts = { 'objectMode': true, 'iter': 10, 'siter': 5 }; s = randomStream( 2.0, 5.0, opts ); s.on( 'state', onState ); s.on( 'end', onEnd1 ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect1 ); count = 0; arr = []; // Move to a future state... s.pipe( iStream ); function onState( s ) { // Only capture the first emitted state... if ( !state ) { state = s; } } function inspect1( v ) { count += 1; if ( count > 5 ) { arr.push( v ); } } function onEnd1() { var iStream; var opts; var s; t.pass( 'first stream ended' ); // Create another stream using the captured state: opts = { 'objectMode': true, 'iter': 5, 'state': state }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd2 ); t.deepEqual( state, s.state, 'same state' ); // Create a new inspect stream: opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect2 ); // Replay previously generated values... count = 0; s.pipe( iStream ); } function inspect2( v ) { count += 1; t.equal( v, arr[ count-1 ], 'returns expected value. i: '+(count-1)+'.' ); } function onEnd2() { t.pass( 'second stream ended' ); t.end(); } }); tape( 'the constructor supports specifying a shared underlying generator state', function test( t ) { var iStream; var shared; var state; var count; var opts; var arr; var s; opts = { 'objectMode': true, 'iter': 10, 'siter': 4 }; s = randomStream( 2.0, 5.0, opts ); s.on( 'state', onState ); s.on( 'end', onEnd1 ); opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect1 ); count = 0; arr = []; // Move to a future state... s.pipe( iStream ); function onState( s ) { // Only capture the first emitted state... if ( !state ) { state = s; // Create a copy of the state (to prevent mutation) which will be shared by more than one PRNG: shared = new Uint32Array( state ); } } function inspect1( v ) { count += 1; if ( count > 4 ) { arr.push( v ); } } function onEnd1() { var iStream; var opts; var s; t.pass( 'first stream ended' ); // Create another stream using the captured state: opts = { 'objectMode': true, 'iter': 3, 'state': shared, 'copy': false }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd2 ); t.deepEqual( state, s.state, 'same state' ); // Create a new inspect stream: opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect2 ); // Replay previously generated values... count = 0; s.pipe( iStream ); } function inspect2( v ) { count += 1; t.equal( v, arr[ count-1 ], 'returns expected value. i: '+(count-1)+'.' ); } function onEnd2() { var iStream; var opts; var s; t.pass( 'second stream ended' ); // Create another stream using the captured state: opts = { 'objectMode': true, 'iter': 3, 'state': shared, 'copy': false }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd3 ); t.notDeepEqual( state, s.state, 'different state' ); // Create a new inspect stream: opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect3 ); // Continue replaying previously generated values... s.pipe( iStream ); } function inspect3( v ) { count += 1; t.equal( v, arr[ count-1 ], 'returns expected value. i: '+(count-1)+'.' ); } function onEnd3() { t.pass( 'third stream ended' ); t.end(); } }); tape( 'the returned stream supports setting the underlying generator state', function test( t ) { var iStream; var state; var rand; var opts; var arr; var s; var i; rand = arcsine( 2.0, 5.0 ); // Move to a future state... for ( i = 0; i < 5; i++ ) { rand(); } // Capture the current state: state = rand.state; // Move to a future state... arr = []; for ( i = 0; i < 5; i++ ) { arr.push( rand() ); } // Create a random stream: opts = { 'objectMode': true, 'iter': 5 }; s = randomStream( 2.0, 5.0, opts ); s.on( 'end', onEnd ); // Set the PRNG state: s.state = state; // Create a new inspect stream: opts = { 'objectMode': true }; iStream = inspectStream( opts, inspect ); // Replay previously generated values: i = 0; s.pipe( iStream ); function inspect( v ) { t.equal( v, arr[ i ], 'returns expected value. i: ' + i + '.' ); i += 1; } function onEnd() { t.end(); } });
stdlib-js/stdlib
lib/node_modules/@stdlib/random/streams/arcsine/test/test.main.js
JavaScript
apache-2.0
23,031
module ZendeskAPI # Creates put, post, delete class methods for custom resource methods. module Verbs class << self private # @macro [attach] container.create_verb # @method $1(method) # Executes a $1 using the passed in method as a path. # Reloads the resource's attributes if any are in the response body. # # Created method takes an optional options hash. Valid options to be passed in to the created method: reload (for caching, default: false) def create_verb(method_verb) define_method method_verb do |method| define_method "#{method}!" do |*method_args| opts = method_args.last.is_a?(Hash) ? method_args.pop : {} if method_verb == :any verb = opts.delete(:verb) raise(ArgumentError, ":verb required for method defined as :any") unless verb else verb = method_verb end @response = @client.connection.send(verb, "#{path}/#{method}") do |req| req.body = opts end return false unless @response.success? return false unless @response.body resource = nil if @response.body.is_a?(Hash) resource = @response.body[self.class.singular_resource_name] resource ||= @response.body.fetch(self.class.resource_name, []).detect { |res| res["id"] == id } end @attributes.replace @attributes.deep_merge(resource || {}) @attributes.clear_changes clear_associations true end define_method method do |*method_args| begin send("#{method}!", *method_args) rescue ZendeskAPI::Error::RecordInvalid => e @errors = e.errors false rescue ZendeskAPI::Error::ClientError false end end end end end create_verb :put create_verb :post create_verb :delete create_verb :any end end
zendesk/zendesk_api_client_rb
lib/zendesk_api/verbs.rb
Ruby
apache-2.0
2,079
/* * Hibernate, Relational Persistence for Idiomatic Java * * License: GNU Lesser General Public License (LGPL), version 2.1 or later * See the lgpl.txt file in the root directory or http://www.gnu.org/licenses/lgpl-2.1.html */ package org.hibernate.orm.type.descriptor.sql.internal; import java.time.temporal.TemporalAccessor; import javax.persistence.TemporalType; import org.hibernate.dialect.Dialect; import org.hibernate.orm.type.descriptor.internal.DateTimeUtils; import org.hibernate.orm.type.descriptor.java.spi.TemporalJavaTypeDescriptor; import org.hibernate.orm.type.descriptor.spi.WrapperOptions; /** * @author Steve Ebersole */ public class JdbcLiteralFormatterTemporal extends BasicJdbcLiteralFormatter { private final TemporalType precision; public JdbcLiteralFormatterTemporal(TemporalJavaTypeDescriptor javaTypeDescriptor, TemporalType precision) { super( javaTypeDescriptor ); this.precision = precision; // todo : add some validation of combos between javaTypeDescrptor#getPrecision and precision - log warnings } @Override protected TemporalJavaTypeDescriptor getJavaTypeDescriptor() { return (TemporalJavaTypeDescriptor) super.getJavaTypeDescriptor(); } @Override public String toJdbcLiteral(Object value, Dialect dialect, WrapperOptions wrapperOptions) { // for performance reasons, avoid conversions if we can if ( value instanceof java.util.Date ) { return DateTimeUtils.formatJdbcLiteralUsingPrecision( (java.util.Date) value, precision ); } else if ( value instanceof java.util.Calendar ) { return DateTimeUtils.formatJdbcLiteralUsingPrecision( (java.util.Calendar) value, precision ); } else if ( value instanceof TemporalAccessor ) { return DateTimeUtils.formatJdbcLiteralUsingPrecision( (TemporalAccessor) value, precision ); } switch ( getJavaTypeDescriptor().getPrecision() ) { case DATE: { return DateTimeUtils.formatJdbcLiteralUsingPrecision( unwrap( value, java.sql.Date.class, wrapperOptions ), precision ); } case TIME: { return DateTimeUtils.formatJdbcLiteralUsingPrecision( unwrap( value, java.sql.Time.class, wrapperOptions ), precision ); } default: { return DateTimeUtils.formatJdbcLiteralUsingPrecision( unwrap( value, java.util.Date.class, wrapperOptions ), precision ); } } } }
hibernate/hibernate-semantic-query
src/test/java/org/hibernate/orm/type/descriptor/sql/internal/JdbcLiteralFormatterTemporal.java
Java
apache-2.0
2,399
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.aiplatform.v1", manifest={"SpecialistPool",}, ) class SpecialistPool(proto.Message): r"""SpecialistPool represents customers' own workforce to work on their data labeling jobs. It includes a group of specialist managers and workers. Managers are responsible for managing the workers in this pool as well as customers' data labeling jobs associated with this pool. Customers create specialist pool as well as start data labeling jobs on Cloud, managers and workers handle the jobs using CrowdCompute console. Attributes: name (str): Required. The resource name of the SpecialistPool. display_name (str): Required. The user-defined name of the SpecialistPool. The name can be up to 128 characters long and can be consist of any UTF-8 characters. This field should be unique on project-level. specialist_managers_count (int): Output only. The number of managers in this SpecialistPool. specialist_manager_emails (Sequence[str]): The email addresses of the managers in the SpecialistPool. pending_data_labeling_jobs (Sequence[str]): Output only. The resource name of the pending data labeling jobs. specialist_worker_emails (Sequence[str]): The email addresses of workers in the SpecialistPool. """ name = proto.Field(proto.STRING, number=1,) display_name = proto.Field(proto.STRING, number=2,) specialist_managers_count = proto.Field(proto.INT32, number=3,) specialist_manager_emails = proto.RepeatedField(proto.STRING, number=4,) pending_data_labeling_jobs = proto.RepeatedField(proto.STRING, number=5,) specialist_worker_emails = proto.RepeatedField(proto.STRING, number=7,) __all__ = tuple(sorted(__protobuf__.manifest))
googleapis/python-aiplatform
google/cloud/aiplatform_v1/types/specialist_pool.py
Python
apache-2.0
2,601
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. namespace Microsoft.AspNetCore.Routing { /// <summary> /// Indicates whether ASP.NET routing is processing a URL from an HTTP request or generating a URL. /// </summary> public enum RouteDirection { /// <summary> /// A URL from a client is being processed. /// </summary> IncomingRequest, /// <summary> /// A URL is being created based on the route definition. /// </summary> UrlGeneration, } }
aspnet/AspNetCore
src/Http/Routing.Abstractions/src/RouteDirection.cs
C#
apache-2.0
622
package com.etiennelawlor.loop.network.models.response; import android.os.Parcel; import android.os.Parcelable; import com.google.gson.annotations.SerializedName; /** * Created by etiennelawlor on 5/23/15. */ public class Tag implements Parcelable { // region Fields @SerializedName("uri") private String uri; @SerializedName("name") private String name; @SerializedName("tag") private String tag; @SerializedName("canonical") private String canonical; // endregion // region Constructors public Tag() { } protected Tag(Parcel in) { this.uri = in.readString(); this.name = in.readString(); this.tag = in.readString(); this.canonical = in.readString(); } // endregion // region Getters public String getUri() { return uri; } public String getName() { return name; } public String getTag() { return tag; } public String getCanonical() { return canonical; } // endregion // region Setters public void setUri(String uri) { this.uri = uri; } public void setName(String name) { this.name = name; } public void setTag(String tag) { this.tag = tag; } public void setCanonical(String canonical) { this.canonical = canonical; } // endregion // region Parcelable Methods @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(this.uri); dest.writeString(this.name); dest.writeString(this.tag); dest.writeString(this.canonical); } // endregion public static final Parcelable.Creator<Tag> CREATOR = new Parcelable.Creator<Tag>() { @Override public Tag createFromParcel(Parcel source) { return new Tag(source); } @Override public Tag[] newArray(int size) { return new Tag[size]; } }; }
lawloretienne/Loop
app/src/main/java/com/etiennelawlor/loop/network/models/response/Tag.java
Java
apache-2.0
2,066
# -*- coding: utf-8 -*- # Copyright 2020 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Prints the env_setup banner for cmd.exe. This is done from Python as activating colors and printing ASCII art are not easy to do in cmd.exe. Activated colors also don't persist in the parent process. """ from __future__ import print_function import argparse import os import sys from .colors import Color, enable_colors # type: ignore _PIGWEED_BANNER = u''' ▒█████▄ █▓ ▄███▒ ▒█ ▒█ ░▓████▒ ░▓████▒ ▒▓████▄ ▒█░ █░ ░█▒ ██▒ ▀█▒ ▒█░ █ ▒█ ▒█ ▀ ▒█ ▀ ▒█ ▀█▌ ▒█▄▄▄█░ ░█▒ █▓░ ▄▄░ ▒█░ █ ▒█ ▒███ ▒███ ░█ █▌ ▒█▀ ░█░ ▓█ █▓ ░█░ █ ▒█ ▒█ ▄ ▒█ ▄ ░█ ▄█▌ ▒█ ░█░ ░▓███▀ ▒█▓▀▓█░ ░▓████▒ ░▓████▒ ▒▓████▀ ''' def print_banner(bootstrap, no_shell_file): """Print the Pigweed or project-specific banner""" enable_colors() print(Color.green('\n WELCOME TO...')) print(Color.magenta(_PIGWEED_BANNER)) if bootstrap: print( Color.green('\n BOOTSTRAP! Bootstrap may take a few minutes; ' 'please be patient')) print( Color.green( ' On Windows, this stage is extremely slow (~10 minutes).\n')) else: print( Color.green( '\n ACTIVATOR! This sets your console environment variables.\n' )) if no_shell_file: print(Color.bold_red('Error!\n')) print( Color.red(' Your Pigweed environment does not seem to be' ' configured.')) print(Color.red(' Run bootstrap.bat to perform initial setup.')) return 0 def parse(): """Parse command-line arguments.""" parser = argparse.ArgumentParser() parser.add_argument('--bootstrap', action='store_true') parser.add_argument('--no-shell-file', action='store_true') return parser.parse_args() def main(): """Script entry point.""" if os.name != 'nt': return 1 return print_banner(**vars(parse())) if __name__ == '__main__': sys.exit(main())
google/pigweed
pw_env_setup/py/pw_env_setup/windows_env_start.py
Python
apache-2.0
2,955
package rvc.ann; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.METHOD; import static java.lang.annotation.RetentionPolicy.RUNTIME; /** * @author nurmuhammad */ @Retention(RUNTIME) @Target(METHOD) public @interface OPTIONS { String value() default Constants.NULL_VALUE; boolean absolutePath() default false; }
nurmuhammad/rvc
src/main/java/rvc/ann/OPTIONS.java
Java
apache-2.0
399
package com.xiaojinzi.component.bean; import javax.lang.model.element.Element; /** * time : 2018/07/26 * * @author : xiaojinzi */ public class RouterDegradeAnnoBean { /** * 优先级 */ private int priority; /** * 是一个类实现了 RouterDegrade 接口 */ private Element rawType; public int getPriority() { return priority; } public void setPriority(int priority) { this.priority = priority; } public Element getRawType() { return rawType; } public void setRawType(Element rawType) { this.rawType = rawType; } }
xiaojinzi123/Component
ComponentCompiler/src/main/java/com/xiaojinzi/component/bean/RouterDegradeAnnoBean.java
Java
apache-2.0
632
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.executiongraph; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.Archiveable; import org.apache.flink.api.common.InputDependencyConstraint; import org.apache.flink.api.common.accumulators.Accumulator; import org.apache.flink.api.common.time.Time; import org.apache.flink.core.io.InputSplit; import org.apache.flink.runtime.JobException; import org.apache.flink.runtime.accumulators.StringifiedAccumulatorResult; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.CheckpointType; import org.apache.flink.runtime.checkpoint.JobManagerTaskRestore; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.clusterframework.types.SlotProfile; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutor; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptorFactory; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.instance.SlotSharingGroupId; import org.apache.flink.runtime.io.network.partition.PartitionTracker; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID; import org.apache.flink.runtime.jobmanager.scheduler.CoLocationConstraint; import org.apache.flink.runtime.jobmanager.scheduler.LocationPreferenceConstraint; import org.apache.flink.runtime.jobmanager.scheduler.NoResourceAvailableException; import org.apache.flink.runtime.jobmanager.scheduler.ScheduledUnit; import org.apache.flink.runtime.jobmanager.scheduler.SlotSharingGroup; import org.apache.flink.runtime.jobmanager.slots.TaskManagerGateway; import org.apache.flink.runtime.jobmaster.LogicalSlot; import org.apache.flink.runtime.jobmaster.SlotRequestId; import org.apache.flink.runtime.jobmaster.slotpool.SlotProvider; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.messages.StackTraceSampleResponse; import org.apache.flink.runtime.shuffle.PartitionDescriptor; import org.apache.flink.runtime.shuffle.ProducerDescriptor; import org.apache.flink.runtime.shuffle.ShuffleDescriptor; import org.apache.flink.runtime.state.KeyGroupRangeAssignment; import org.apache.flink.runtime.taskmanager.TaskManagerLocation; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.OptionalFailure; import org.apache.flink.util.function.ThrowingRunnable; import org.slf4j.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.Executor; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import java.util.function.Function; import java.util.stream.Collectors; import static org.apache.flink.runtime.deployment.TaskDeploymentDescriptorFactory.getConsumedPartitionShuffleDescriptor; import static org.apache.flink.runtime.execution.ExecutionState.CANCELED; import static org.apache.flink.runtime.execution.ExecutionState.CANCELING; import static org.apache.flink.runtime.execution.ExecutionState.CREATED; import static org.apache.flink.runtime.execution.ExecutionState.DEPLOYING; import static org.apache.flink.runtime.execution.ExecutionState.FAILED; import static org.apache.flink.runtime.execution.ExecutionState.FINISHED; import static org.apache.flink.runtime.execution.ExecutionState.RUNNING; import static org.apache.flink.runtime.execution.ExecutionState.SCHEDULED; import static org.apache.flink.util.Preconditions.checkNotNull; /** * A single execution of a vertex. While an {@link ExecutionVertex} can be executed multiple times * (for recovery, re-computation, re-configuration), this class tracks the state of a single execution * of that vertex and the resources. * * <h2>Lock free state transitions</h2> * * <p>In several points of the code, we need to deal with possible concurrent state changes and actions. * For example, while the call to deploy a task (send it to the TaskManager) happens, the task gets cancelled. * * <p>We could lock the entire portion of the code (decision to deploy, deploy, set state to running) such that * it is guaranteed that any "cancel command" will only pick up after deployment is done and that the "cancel * command" call will never overtake the deploying call. * * <p>This blocks the threads big time, because the remote calls may take long. Depending of their locking behavior, it * may even result in distributed deadlocks (unless carefully avoided). We therefore use atomic state updates and * occasional double-checking to ensure that the state after a completed call is as expected, and trigger correcting * actions if it is not. Many actions are also idempotent (like canceling). */ public class Execution implements AccessExecution, Archiveable<ArchivedExecution>, LogicalSlot.Payload { private static final AtomicReferenceFieldUpdater<Execution, ExecutionState> STATE_UPDATER = AtomicReferenceFieldUpdater.newUpdater(Execution.class, ExecutionState.class, "state"); private static final AtomicReferenceFieldUpdater<Execution, LogicalSlot> ASSIGNED_SLOT_UPDATER = AtomicReferenceFieldUpdater.newUpdater( Execution.class, LogicalSlot.class, "assignedResource"); private static final Logger LOG = ExecutionGraph.LOG; private static final int NUM_CANCEL_CALL_TRIES = 3; private static final int NUM_STOP_CALL_TRIES = 3; // -------------------------------------------------------------------------------------------- /** The executor which is used to execute futures. */ private final Executor executor; /** The execution vertex whose task this execution executes. */ private final ExecutionVertex vertex; /** The unique ID marking the specific execution instant of the task. */ private final ExecutionAttemptID attemptId; /** Gets the global modification version of the execution graph when this execution was created. * This version is bumped in the ExecutionGraph whenever a global failover happens. It is used * to resolve conflicts between concurrent modification by global and local failover actions. */ private final long globalModVersion; /** The timestamps when state transitions occurred, indexed by {@link ExecutionState#ordinal()}. */ private final long[] stateTimestamps; private final int attemptNumber; private final Time rpcTimeout; private final Collection<PartitionInfo> partitionInfos; /** A future that completes once the Execution reaches a terminal ExecutionState. */ private final CompletableFuture<ExecutionState> terminalStateFuture; private final CompletableFuture<?> releaseFuture; private final CompletableFuture<TaskManagerLocation> taskManagerLocationFuture; private volatile ExecutionState state = CREATED; private volatile LogicalSlot assignedResource; private volatile Throwable failureCause; // once assigned, never changes /** Information to restore the task on recovery, such as checkpoint id and task state snapshot. */ @Nullable private volatile JobManagerTaskRestore taskRestore; /** This field holds the allocation id once it was assigned successfully. */ @Nullable private volatile AllocationID assignedAllocationID; // ------------------------ Accumulators & Metrics ------------------------ /** Lock for updating the accumulators atomically. * Prevents final accumulators to be overwritten by partial accumulators on a late heartbeat. */ private final Object accumulatorLock = new Object(); /* Continuously updated map of user-defined accumulators */ private volatile Map<String, Accumulator<?, ?>> userAccumulators; private volatile IOMetrics ioMetrics; private Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions; // -------------------------------------------------------------------------------------------- /** * Creates a new Execution attempt. * * @param executor * The executor used to dispatch callbacks from futures and asynchronous RPC calls. * @param vertex * The execution vertex to which this Execution belongs * @param attemptNumber * The execution attempt number. * @param globalModVersion * The global modification version of the execution graph when this execution was created * @param startTimestamp * The timestamp that marks the creation of this Execution * @param rpcTimeout * The rpcTimeout for RPC calls like deploy/cancel/stop. */ public Execution( Executor executor, ExecutionVertex vertex, int attemptNumber, long globalModVersion, long startTimestamp, Time rpcTimeout) { this.executor = checkNotNull(executor); this.vertex = checkNotNull(vertex); this.attemptId = new ExecutionAttemptID(); this.rpcTimeout = checkNotNull(rpcTimeout); this.globalModVersion = globalModVersion; this.attemptNumber = attemptNumber; this.stateTimestamps = new long[ExecutionState.values().length]; markTimestamp(CREATED, startTimestamp); this.partitionInfos = new ArrayList<>(16); this.producedPartitions = Collections.emptyMap(); this.terminalStateFuture = new CompletableFuture<>(); this.releaseFuture = new CompletableFuture<>(); this.taskManagerLocationFuture = new CompletableFuture<>(); this.assignedResource = null; } // -------------------------------------------------------------------------------------------- // Properties // -------------------------------------------------------------------------------------------- public ExecutionVertex getVertex() { return vertex; } @Override public ExecutionAttemptID getAttemptId() { return attemptId; } @Override public int getAttemptNumber() { return attemptNumber; } @Override public ExecutionState getState() { return state; } @Nullable public AllocationID getAssignedAllocationID() { return assignedAllocationID; } /** * Gets the global modification version of the execution graph when this execution was created. * * <p>This version is bumped in the ExecutionGraph whenever a global failover happens. It is used * to resolve conflicts between concurrent modification by global and local failover actions. */ public long getGlobalModVersion() { return globalModVersion; } public CompletableFuture<TaskManagerLocation> getTaskManagerLocationFuture() { return taskManagerLocationFuture; } public LogicalSlot getAssignedResource() { return assignedResource; } public Optional<ResultPartitionDeploymentDescriptor> getResultPartitionDeploymentDescriptor( IntermediateResultPartitionID id) { return Optional.ofNullable(producedPartitions.get(id)); } /** * Tries to assign the given slot to the execution. The assignment works only if the * Execution is in state SCHEDULED. Returns true, if the resource could be assigned. * * @param logicalSlot to assign to this execution * @return true if the slot could be assigned to the execution, otherwise false */ @VisibleForTesting boolean tryAssignResource(final LogicalSlot logicalSlot) { assertRunningInJobMasterMainThread(); checkNotNull(logicalSlot); // only allow to set the assigned resource in state SCHEDULED or CREATED // note: we also accept resource assignment when being in state CREATED for testing purposes if (state == SCHEDULED || state == CREATED) { if (ASSIGNED_SLOT_UPDATER.compareAndSet(this, null, logicalSlot)) { if (logicalSlot.tryAssignPayload(this)) { // check for concurrent modification (e.g. cancelling call) if ((state == SCHEDULED || state == CREATED) && !taskManagerLocationFuture.isDone()) { taskManagerLocationFuture.complete(logicalSlot.getTaskManagerLocation()); assignedAllocationID = logicalSlot.getAllocationId(); return true; } else { // free assigned resource and return false ASSIGNED_SLOT_UPDATER.set(this, null); return false; } } else { ASSIGNED_SLOT_UPDATER.set(this, null); return false; } } else { // the slot already has another slot assigned return false; } } else { // do not allow resource assignment if we are not in state SCHEDULED return false; } } public InputSplit getNextInputSplit() { final LogicalSlot slot = this.getAssignedResource(); final String host = slot != null ? slot.getTaskManagerLocation().getHostname() : null; return this.vertex.getNextInputSplit(host); } @Override public TaskManagerLocation getAssignedResourceLocation() { // returns non-null only when a location is already assigned final LogicalSlot currentAssignedResource = assignedResource; return currentAssignedResource != null ? currentAssignedResource.getTaskManagerLocation() : null; } public Throwable getFailureCause() { return failureCause; } @Override public String getFailureCauseAsString() { return ExceptionUtils.stringifyException(getFailureCause()); } @Override public long[] getStateTimestamps() { return stateTimestamps; } @Override public long getStateTimestamp(ExecutionState state) { return this.stateTimestamps[state.ordinal()]; } public boolean isFinished() { return state.isTerminal(); } @Nullable public JobManagerTaskRestore getTaskRestore() { return taskRestore; } /** * Sets the initial state for the execution. The serialized state is then shipped via the * {@link TaskDeploymentDescriptor} to the TaskManagers. * * @param taskRestore information to restore the state */ public void setInitialState(@Nullable JobManagerTaskRestore taskRestore) { this.taskRestore = taskRestore; } /** * Gets a future that completes once the task execution reaches a terminal state. * The future will be completed with specific state that the execution reached. * This future is always completed from the job master's main thread. * * @return A future which is completed once the execution reaches a terminal state */ @Override public CompletableFuture<ExecutionState> getTerminalStateFuture() { return terminalStateFuture; } /** * Gets the release future which is completed once the execution reaches a terminal * state and the assigned resource has been released. * This future is always completed from the job master's main thread. * * @return A future which is completed once the assigned resource has been released */ public CompletableFuture<?> getReleaseFuture() { return releaseFuture; } // -------------------------------------------------------------------------------------------- // Actions // -------------------------------------------------------------------------------------------- public CompletableFuture<Void> scheduleForExecution() { final ExecutionGraph executionGraph = getVertex().getExecutionGraph(); final SlotProvider resourceProvider = executionGraph.getSlotProvider(); final boolean allowQueued = executionGraph.isQueuedSchedulingAllowed(); return scheduleForExecution( resourceProvider, allowQueued, LocationPreferenceConstraint.ANY, Collections.emptySet()); } /** * NOTE: This method only throws exceptions if it is in an illegal state to be scheduled, or if the tasks needs * to be scheduled immediately and no resource is available. If the task is accepted by the schedule, any * error sets the vertex state to failed and triggers the recovery logic. * * @param slotProvider The slot provider to use to allocate slot for this execution attempt. * @param queued Flag to indicate whether the scheduler may queue this task if it cannot * immediately deploy it. * @param locationPreferenceConstraint constraint for the location preferences * @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph. * Can be empty if the allocation ids are not required for scheduling. * @return Future which is completed once the Execution has been deployed */ public CompletableFuture<Void> scheduleForExecution( SlotProvider slotProvider, boolean queued, LocationPreferenceConstraint locationPreferenceConstraint, @Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds) { assertRunningInJobMasterMainThread(); final ExecutionGraph executionGraph = vertex.getExecutionGraph(); final Time allocationTimeout = executionGraph.getAllocationTimeout(); try { final CompletableFuture<Execution> allocationFuture = allocateResourcesForExecution( slotProvider, queued, locationPreferenceConstraint, allPreviousExecutionGraphAllocationIds, allocationTimeout); final CompletableFuture<Void> deploymentFuture; if (allocationFuture.isDone() || queued) { deploymentFuture = allocationFuture.thenRun(ThrowingRunnable.unchecked(this::deploy)); } else { deploymentFuture = FutureUtils.completedExceptionally( new IllegalArgumentException("The slot allocation future has not been completed yet.")); } deploymentFuture.whenComplete( (Void ignored, Throwable failure) -> { if (failure != null) { final Throwable stripCompletionException = ExceptionUtils.stripCompletionException(failure); final Throwable schedulingFailureCause; if (stripCompletionException instanceof TimeoutException) { schedulingFailureCause = new NoResourceAvailableException( "Could not allocate enough slots within timeout of " + allocationTimeout + " to run the job. " + "Please make sure that the cluster has enough resources."); } else { schedulingFailureCause = stripCompletionException; } markFailed(schedulingFailureCause); } }); return deploymentFuture; } catch (IllegalExecutionStateException e) { return FutureUtils.completedExceptionally(e); } } /** * Allocates resources for the execution. * * <p>Allocates following resources: * <ol> * <li>slot obtained from the slot provider</li> * <li>registers produced partitions with the {@link org.apache.flink.runtime.shuffle.ShuffleMaster}</li> * </ol> * * @param slotProvider to obtain a new slot from * @param queued if the allocation can be queued * @param locationPreferenceConstraint constraint for the location preferences * @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph. * Can be empty if the allocation ids are not required for scheduling. * @param allocationTimeout rpcTimeout for allocating a new slot * @return Future which is completed with this execution once the slot has been assigned * or with an exception if an error occurred. */ CompletableFuture<Execution> allocateResourcesForExecution( SlotProvider slotProvider, boolean queued, LocationPreferenceConstraint locationPreferenceConstraint, @Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds, Time allocationTimeout) { return allocateAndAssignSlotForExecution( slotProvider, queued, locationPreferenceConstraint, allPreviousExecutionGraphAllocationIds, allocationTimeout) .thenCompose(slot -> registerProducedPartitions(slot.getTaskManagerLocation())); } /** * Allocates and assigns a slot obtained from the slot provider to the execution. * * @param slotProvider to obtain a new slot from * @param queued if the allocation can be queued * @param locationPreferenceConstraint constraint for the location preferences * @param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph. * Can be empty if the allocation ids are not required for scheduling. * @param allocationTimeout rpcTimeout for allocating a new slot * @return Future which is completed with the allocated slot once it has been assigned * or with an exception if an error occurred. */ private CompletableFuture<LogicalSlot> allocateAndAssignSlotForExecution( SlotProvider slotProvider, boolean queued, LocationPreferenceConstraint locationPreferenceConstraint, @Nonnull Set<AllocationID> allPreviousExecutionGraphAllocationIds, Time allocationTimeout) { checkNotNull(slotProvider); assertRunningInJobMasterMainThread(); final SlotSharingGroup sharingGroup = vertex.getJobVertex().getSlotSharingGroup(); final CoLocationConstraint locationConstraint = vertex.getLocationConstraint(); // sanity check if (locationConstraint != null && sharingGroup == null) { throw new IllegalStateException( "Trying to schedule with co-location constraint but without slot sharing allowed."); } // this method only works if the execution is in the state 'CREATED' if (transitionState(CREATED, SCHEDULED)) { final SlotSharingGroupId slotSharingGroupId = sharingGroup != null ? sharingGroup.getSlotSharingGroupId() : null; ScheduledUnit toSchedule = locationConstraint == null ? new ScheduledUnit(this, slotSharingGroupId) : new ScheduledUnit(this, slotSharingGroupId, locationConstraint); // try to extract previous allocation ids, if applicable, so that we can reschedule to the same slot ExecutionVertex executionVertex = getVertex(); AllocationID lastAllocation = executionVertex.getLatestPriorAllocation(); Collection<AllocationID> previousAllocationIDs = lastAllocation != null ? Collections.singletonList(lastAllocation) : Collections.emptyList(); // calculate the preferred locations final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = calculatePreferredLocations(locationPreferenceConstraint); final SlotRequestId slotRequestId = new SlotRequestId(); final CompletableFuture<LogicalSlot> logicalSlotFuture = preferredLocationsFuture.thenCompose( (Collection<TaskManagerLocation> preferredLocations) -> slotProvider.allocateSlot( slotRequestId, toSchedule, new SlotProfile( ResourceProfile.UNKNOWN, preferredLocations, previousAllocationIDs, allPreviousExecutionGraphAllocationIds), queued, allocationTimeout)); // register call back to cancel slot request in case that the execution gets canceled releaseFuture.whenComplete( (Object ignored, Throwable throwable) -> { if (logicalSlotFuture.cancel(false)) { slotProvider.cancelSlotRequest( slotRequestId, slotSharingGroupId, new FlinkException("Execution " + this + " was released.")); } }); // This forces calls to the slot pool back into the main thread, for normal and exceptional completion return logicalSlotFuture.handle( (LogicalSlot logicalSlot, Throwable failure) -> { if (failure != null) { throw new CompletionException(failure); } if (tryAssignResource(logicalSlot)) { return logicalSlot; } else { // release the slot logicalSlot.releaseSlot(new FlinkException("Could not assign logical slot to execution " + this + '.')); throw new CompletionException( new FlinkException( "Could not assign slot " + logicalSlot + " to execution " + this + " because it has already been assigned ")); } }); } else { // call race, already deployed, or already done throw new IllegalExecutionStateException(this, CREATED, state); } } @VisibleForTesting CompletableFuture<Execution> registerProducedPartitions(TaskManagerLocation location) { assertRunningInJobMasterMainThread(); return FutureUtils.thenApplyAsyncIfNotDone( registerProducedPartitions(vertex, location, attemptId), vertex.getExecutionGraph().getJobMasterMainThreadExecutor(), producedPartitionsCache -> { producedPartitions = producedPartitionsCache; startTrackingPartitions(location.getResourceID(), producedPartitionsCache.values()); return this; }); } @VisibleForTesting static CompletableFuture<Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor>> registerProducedPartitions( ExecutionVertex vertex, TaskManagerLocation location, ExecutionAttemptID attemptId) { ProducerDescriptor producerDescriptor = ProducerDescriptor.create(location, attemptId); boolean lazyScheduling = vertex.getExecutionGraph().getScheduleMode().allowLazyDeployment(); Collection<IntermediateResultPartition> partitions = vertex.getProducedPartitions().values(); Collection<CompletableFuture<ResultPartitionDeploymentDescriptor>> partitionRegistrations = new ArrayList<>(partitions.size()); for (IntermediateResultPartition partition : partitions) { PartitionDescriptor partitionDescriptor = PartitionDescriptor.from(partition); int maxParallelism = getPartitionMaxParallelism(partition); CompletableFuture<? extends ShuffleDescriptor> shuffleDescriptorFuture = vertex .getExecutionGraph() .getShuffleMaster() .registerPartitionWithProducer(partitionDescriptor, producerDescriptor); final boolean releasePartitionOnConsumption = vertex.getExecutionGraph().isForcePartitionReleaseOnConsumption() || !partitionDescriptor.getPartitionType().isBlocking(); CompletableFuture<ResultPartitionDeploymentDescriptor> partitionRegistration = shuffleDescriptorFuture .thenApply(shuffleDescriptor -> new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, maxParallelism, lazyScheduling, releasePartitionOnConsumption ? ShuffleDescriptor.ReleaseType.AUTO : ShuffleDescriptor.ReleaseType.MANUAL)); partitionRegistrations.add(partitionRegistration); } return FutureUtils.combineAll(partitionRegistrations).thenApply(rpdds -> { Map<IntermediateResultPartitionID, ResultPartitionDeploymentDescriptor> producedPartitions = new LinkedHashMap<>(partitions.size()); rpdds.forEach(rpdd -> producedPartitions.put(rpdd.getPartitionId(), rpdd)); return producedPartitions; }); } private static int getPartitionMaxParallelism(IntermediateResultPartition partition) { // TODO consumers.isEmpty() only exists for test, currently there has to be exactly one consumer in real jobs! final List<List<ExecutionEdge>> consumers = partition.getConsumers(); int maxParallelism = KeyGroupRangeAssignment.UPPER_BOUND_MAX_PARALLELISM; if (!consumers.isEmpty()) { List<ExecutionEdge> consumer = consumers.get(0); ExecutionJobVertex consumerVertex = consumer.get(0).getTarget().getJobVertex(); maxParallelism = consumerVertex.getMaxParallelism(); } return maxParallelism; } /** * Deploys the execution to the previously assigned resource. * * @throws JobException if the execution cannot be deployed to the assigned resource */ public void deploy() throws JobException { assertRunningInJobMasterMainThread(); final LogicalSlot slot = assignedResource; checkNotNull(slot, "In order to deploy the execution we first have to assign a resource via tryAssignResource."); // Check if the TaskManager died in the meantime // This only speeds up the response to TaskManagers failing concurrently to deployments. // The more general check is the rpcTimeout of the deployment call if (!slot.isAlive()) { throw new JobException("Target slot (TaskManager) for deployment is no longer alive."); } // make sure exactly one deployment call happens from the correct state // note: the transition from CREATED to DEPLOYING is for testing purposes only ExecutionState previous = this.state; if (previous == SCHEDULED || previous == CREATED) { if (!transitionState(previous, DEPLOYING)) { // race condition, someone else beat us to the deploying call. // this should actually not happen and indicates a race somewhere else throw new IllegalStateException("Cannot deploy task: Concurrent deployment call race."); } } else { // vertex may have been cancelled, or it was already scheduled throw new IllegalStateException("The vertex must be in CREATED or SCHEDULED state to be deployed. Found state " + previous); } if (this != slot.getPayload()) { throw new IllegalStateException( String.format("The execution %s has not been assigned to the assigned slot.", this)); } try { // race double check, did we fail/cancel and do we need to release the slot? if (this.state != DEPLOYING) { slot.releaseSlot(new FlinkException("Actual state of execution " + this + " (" + state + ") does not match expected state DEPLOYING.")); return; } if (LOG.isInfoEnabled()) { LOG.info(String.format("Deploying %s (attempt #%d) to %s", vertex.getTaskNameWithSubtaskIndex(), attemptNumber, getAssignedResourceLocation())); } final TaskDeploymentDescriptor deployment = TaskDeploymentDescriptorFactory .fromExecutionVertex(vertex, attemptNumber) .createDeploymentDescriptor( slot.getAllocationId(), slot.getPhysicalSlotNumber(), taskRestore, producedPartitions.values()); // null taskRestore to let it be GC'ed taskRestore = null; final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); final ComponentMainThreadExecutor jobMasterMainThreadExecutor = vertex.getExecutionGraph().getJobMasterMainThreadExecutor(); // We run the submission in the future executor so that the serialization of large TDDs does not block // the main thread and sync back to the main thread once submission is completed. CompletableFuture.supplyAsync(() -> taskManagerGateway.submitTask(deployment, rpcTimeout), executor) .thenCompose(Function.identity()) .whenCompleteAsync( (ack, failure) -> { // only respond to the failure case if (failure != null) { if (failure instanceof TimeoutException) { String taskname = vertex.getTaskNameWithSubtaskIndex() + " (" + attemptId + ')'; markFailed(new Exception( "Cannot deploy task " + taskname + " - TaskManager (" + getAssignedResourceLocation() + ") not responding after a rpcTimeout of " + rpcTimeout, failure)); } else { markFailed(failure); } } }, jobMasterMainThreadExecutor); } catch (Throwable t) { markFailed(t); ExceptionUtils.rethrow(t); } } public void cancel() { // depending on the previous state, we go directly to cancelled (no cancel call necessary) // -- or to canceling (cancel call needs to be sent to the task manager) // because of several possibly previous states, we need to again loop until we make a // successful atomic state transition assertRunningInJobMasterMainThread(); while (true) { ExecutionState current = this.state; if (current == CANCELING || current == CANCELED) { // already taken care of, no need to cancel again return; } // these two are the common cases where we need to send a cancel call else if (current == RUNNING || current == DEPLOYING) { // try to transition to canceling, if successful, send the cancel call if (startCancelling(NUM_CANCEL_CALL_TRIES)) { return; } // else: fall through the loop } else if (current == FINISHED || current == FAILED) { // nothing to do any more. finished/failed before it could be cancelled. // in any case, the task is removed from the TaskManager already return; } else if (current == CREATED || current == SCHEDULED) { // from here, we can directly switch to cancelled, because no task has been deployed if (cancelAtomically()) { return; } // else: fall through the loop } else { throw new IllegalStateException(current.name()); } } } public CompletableFuture<?> suspend() { switch(state) { case RUNNING: case DEPLOYING: case CREATED: case SCHEDULED: if (!cancelAtomically()) { throw new IllegalStateException( String.format("Could not directly go to %s from %s.", CANCELED.name(), state.name())); } break; case CANCELING: completeCancelling(); break; case FINISHED: case FAILED: case CANCELED: break; default: throw new IllegalStateException(state.name()); } return releaseFuture; } private void scheduleConsumer(ExecutionVertex consumerVertex) { try { final ExecutionGraph executionGraph = consumerVertex.getExecutionGraph(); consumerVertex.scheduleForExecution( executionGraph.getSlotProvider(), executionGraph.isQueuedSchedulingAllowed(), LocationPreferenceConstraint.ANY, // there must be at least one known location Collections.emptySet()); } catch (Throwable t) { consumerVertex.fail(new IllegalStateException("Could not schedule consumer " + "vertex " + consumerVertex, t)); } } void scheduleOrUpdateConsumers(List<List<ExecutionEdge>> allConsumers) { assertRunningInJobMasterMainThread(); final int numConsumers = allConsumers.size(); if (numConsumers > 1) { fail(new IllegalStateException("Currently, only a single consumer group per partition is supported.")); } else if (numConsumers == 0) { return; } for (ExecutionEdge edge : allConsumers.get(0)) { final ExecutionVertex consumerVertex = edge.getTarget(); final Execution consumer = consumerVertex.getCurrentExecutionAttempt(); final ExecutionState consumerState = consumer.getState(); // ---------------------------------------------------------------- // Consumer is created => try to schedule it and the partition info // is known during deployment // ---------------------------------------------------------------- if (consumerState == CREATED) { // Schedule the consumer vertex if its inputs constraint is satisfied, otherwise skip the scheduling. // A shortcut of input constraint check is added for InputDependencyConstraint.ANY since // at least one of the consumer vertex's inputs is consumable here. This is to avoid the // O(N) complexity introduced by input constraint check for InputDependencyConstraint.ANY, // as we do not want the default scheduling performance to be affected. if (consumerVertex.getInputDependencyConstraint() == InputDependencyConstraint.ANY || consumerVertex.checkInputDependencyConstraints()) { scheduleConsumer(consumerVertex); } } // ---------------------------------------------------------------- // Consumer is running => send update message now // Consumer is deploying => cache the partition info which would be // sent after switching to running // ---------------------------------------------------------------- else if (consumerState == DEPLOYING || consumerState == RUNNING) { final PartitionInfo partitionInfo = createPartitionInfo(edge); if (consumerState == DEPLOYING) { consumerVertex.cachePartitionInfo(partitionInfo); } else { consumer.sendUpdatePartitionInfoRpcCall(Collections.singleton(partitionInfo)); } } } } private static PartitionInfo createPartitionInfo(ExecutionEdge executionEdge) { IntermediateDataSetID intermediateDataSetID = executionEdge.getSource().getIntermediateResult().getId(); ShuffleDescriptor shuffleDescriptor = getConsumedPartitionShuffleDescriptor(executionEdge, false); return new PartitionInfo(intermediateDataSetID, shuffleDescriptor); } /** * This method fails the vertex due to an external condition. The task will move to state FAILED. * If the task was in state RUNNING or DEPLOYING before, it will send a cancel call to the TaskManager. * * @param t The exception that caused the task to fail. */ @Override public void fail(Throwable t) { processFail(t, false); } /** * Request a stack trace sample from the task of this execution. * * @param sampleId of the stack trace sample * @param numSamples the sample should contain * @param delayBetweenSamples to wait * @param maxStackTraceDepth of the samples * @param timeout until the request times out * @return Future stack trace sample response */ public CompletableFuture<StackTraceSampleResponse> requestStackTraceSample( int sampleId, int numSamples, Time delayBetweenSamples, int maxStackTraceDepth, Time timeout) { final LogicalSlot slot = assignedResource; if (slot != null) { final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); return taskManagerGateway.requestStackTraceSample( attemptId, sampleId, numSamples, delayBetweenSamples, maxStackTraceDepth, timeout); } else { return FutureUtils.completedExceptionally(new Exception("The execution has no slot assigned.")); } } /** * Notify the task of this execution about a completed checkpoint. * * @param checkpointId of the completed checkpoint * @param timestamp of the completed checkpoint */ public void notifyCheckpointComplete(long checkpointId, long timestamp) { final LogicalSlot slot = assignedResource; if (slot != null) { final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); taskManagerGateway.notifyCheckpointComplete(attemptId, getVertex().getJobId(), checkpointId, timestamp); } else { LOG.debug("The execution has no slot assigned. This indicates that the execution is " + "no longer running."); } } /** * Trigger a new checkpoint on the task of this execution. * * @param checkpointId of th checkpoint to trigger * @param timestamp of the checkpoint to trigger * @param checkpointOptions of the checkpoint to trigger */ public void triggerCheckpoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions) { triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, false); } /** * Trigger a new checkpoint on the task of this execution. * * @param checkpointId of th checkpoint to trigger * @param timestamp of the checkpoint to trigger * @param checkpointOptions of the checkpoint to trigger * @param advanceToEndOfEventTime Flag indicating if the source should inject a {@code MAX_WATERMARK} in the pipeline * to fire any registered event-time timers */ public void triggerSynchronousSavepoint(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) { triggerCheckpointHelper(checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime); } private void triggerCheckpointHelper(long checkpointId, long timestamp, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) { final CheckpointType checkpointType = checkpointOptions.getCheckpointType(); if (advanceToEndOfEventTime && !(checkpointType.isSynchronous() && checkpointType.isSavepoint())) { throw new IllegalArgumentException("Only synchronous savepoints are allowed to advance the watermark to MAX."); } final LogicalSlot slot = assignedResource; if (slot != null) { final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); taskManagerGateway.triggerCheckpoint(attemptId, getVertex().getJobId(), checkpointId, timestamp, checkpointOptions, advanceToEndOfEventTime); } else { LOG.debug("The execution has no slot assigned. This indicates that the execution is no longer running."); } } // -------------------------------------------------------------------------------------------- // Callbacks // -------------------------------------------------------------------------------------------- /** * This method marks the task as failed, but will make no attempt to remove task execution from the task manager. * It is intended for cases where the task is known not to be running, or then the TaskManager reports failure * (in which case it has already removed the task). * * @param t The exception that caused the task to fail. */ void markFailed(Throwable t) { processFail(t, true); } void markFailed(Throwable t, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) { processFail(t, true, userAccumulators, metrics); } @VisibleForTesting void markFinished() { markFinished(null, null); } void markFinished(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) { assertRunningInJobMasterMainThread(); // this call usually comes during RUNNING, but may also come while still in deploying (very fast tasks!) while (true) { ExecutionState current = this.state; if (current == RUNNING || current == DEPLOYING) { if (transitionState(current, FINISHED)) { try { for (IntermediateResultPartition finishedPartition : getVertex().finishAllBlockingPartitions()) { IntermediateResultPartition[] allPartitions = finishedPartition .getIntermediateResult().getPartitions(); for (IntermediateResultPartition partition : allPartitions) { scheduleOrUpdateConsumers(partition.getConsumers()); } } updateAccumulatorsAndMetrics(userAccumulators, metrics); releaseAssignedResource(null); vertex.getExecutionGraph().deregisterExecution(this); } finally { vertex.executionFinished(this); } return; } } else if (current == CANCELING) { // we sent a cancel call, and the task manager finished before it arrived. We // will never get a CANCELED call back from the job manager completeCancelling(userAccumulators, metrics); return; } else if (current == CANCELED || current == FAILED) { if (LOG.isDebugEnabled()) { LOG.debug("Task FINISHED, but concurrently went to state " + state); } return; } else { // this should not happen, we need to fail this markFailed(new Exception("Vertex received FINISHED message while being in state " + state)); return; } } } private boolean cancelAtomically() { if (startCancelling(0)) { completeCancelling(); return true; } else { return false; } } private boolean startCancelling(int numberCancelRetries) { if (transitionState(state, CANCELING)) { taskManagerLocationFuture.cancel(false); sendCancelRpcCall(numberCancelRetries); return true; } else { return false; } } void completeCancelling() { completeCancelling(null, null); } void completeCancelling(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) { // the taskmanagers can themselves cancel tasks without an external trigger, if they find that the // network stack is canceled (for example by a failing / canceling receiver or sender // this is an artifact of the old network runtime, but for now we need to support task transitions // from running directly to canceled while (true) { ExecutionState current = this.state; if (current == CANCELED) { return; } else if (current == CANCELING || current == RUNNING || current == DEPLOYING) { updateAccumulatorsAndMetrics(userAccumulators, metrics); if (transitionState(current, CANCELED)) { finishCancellation(); return; } // else fall through the loop } else { // failing in the meantime may happen and is no problem. // anything else is a serious problem !!! if (current != FAILED) { String message = String.format("Asynchronous race: Found %s in state %s after successful cancel call.", vertex.getTaskNameWithSubtaskIndex(), state); LOG.error(message); vertex.getExecutionGraph().failGlobal(new Exception(message)); } return; } } } private void finishCancellation() { releaseAssignedResource(new FlinkException("Execution " + this + " was cancelled.")); vertex.getExecutionGraph().deregisterExecution(this); // release partitions on TM in case the Task finished while we where already CANCELING stopTrackingAndReleasePartitions(); } void cachePartitionInfo(PartitionInfo partitionInfo) { partitionInfos.add(partitionInfo); } private void sendPartitionInfos() { if (!partitionInfos.isEmpty()) { sendUpdatePartitionInfoRpcCall(new ArrayList<>(partitionInfos)); partitionInfos.clear(); } } // -------------------------------------------------------------------------------------------- // Internal Actions // -------------------------------------------------------------------------------------------- private boolean processFail(Throwable t, boolean isCallback) { return processFail(t, isCallback, null, null); } private boolean processFail(Throwable t, boolean isCallback, Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) { // damn, we failed. This means only that we keep our books and notify our parent JobExecutionVertex // the actual computation on the task manager is cleaned up by the TaskManager that noticed the failure // we may need to loop multiple times (in the presence of concurrent calls) in order to // atomically switch to failed assertRunningInJobMasterMainThread(); while (true) { ExecutionState current = this.state; if (current == FAILED) { // already failed. It is enough to remember once that we failed (its sad enough) return false; } if (current == CANCELED || current == FINISHED) { // we are already aborting or are already aborted or we are already finished if (LOG.isDebugEnabled()) { LOG.debug("Ignoring transition of vertex {} to {} while being {}.", getVertexWithAttempt(), FAILED, current); } return false; } if (current == CANCELING) { completeCancelling(userAccumulators, metrics); return false; } if (transitionState(current, FAILED, t)) { // success (in a manner of speaking) this.failureCause = t; updateAccumulatorsAndMetrics(userAccumulators, metrics); releaseAssignedResource(t); vertex.getExecutionGraph().deregisterExecution(this); stopTrackingAndReleasePartitions(); if (!isCallback && (current == RUNNING || current == DEPLOYING)) { if (LOG.isDebugEnabled()) { LOG.debug("Sending out cancel request, to remove task execution from TaskManager."); } try { if (assignedResource != null) { sendCancelRpcCall(NUM_CANCEL_CALL_TRIES); } } catch (Throwable tt) { // no reason this should ever happen, but log it to be safe LOG.error("Error triggering cancel call while marking task {} as failed.", getVertex().getTaskNameWithSubtaskIndex(), tt); } } // leave the loop return true; } } } boolean switchToRunning() { if (transitionState(DEPLOYING, RUNNING)) { sendPartitionInfos(); return true; } else { // something happened while the call was in progress. // it can mean: // - canceling, while deployment was in progress. state is now canceling, or canceled, if the response overtook // - finishing (execution and finished call overtook the deployment answer, which is possible and happens for fast tasks) // - failed (execution, failure, and failure message overtook the deployment answer) ExecutionState currentState = this.state; if (currentState == FINISHED || currentState == CANCELED) { // do nothing, the task was really fast (nice) // or it was canceled really fast } else if (currentState == CANCELING || currentState == FAILED) { if (LOG.isDebugEnabled()) { // this log statement is guarded because the 'getVertexWithAttempt()' method // performs string concatenations LOG.debug("Concurrent canceling/failing of {} while deployment was in progress.", getVertexWithAttempt()); } sendCancelRpcCall(NUM_CANCEL_CALL_TRIES); } else { String message = String.format("Concurrent unexpected state transition of task %s to %s while deployment was in progress.", getVertexWithAttempt(), currentState); if (LOG.isDebugEnabled()) { LOG.debug(message); } // undo the deployment sendCancelRpcCall(NUM_CANCEL_CALL_TRIES); // record the failure markFailed(new Exception(message)); } return false; } } /** * This method sends a CancelTask message to the instance of the assigned slot. * * <p>The sending is tried up to NUM_CANCEL_CALL_TRIES times. */ private void sendCancelRpcCall(int numberRetries) { final LogicalSlot slot = assignedResource; if (slot != null) { final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); final ComponentMainThreadExecutor jobMasterMainThreadExecutor = getVertex().getExecutionGraph().getJobMasterMainThreadExecutor(); CompletableFuture<Acknowledge> cancelResultFuture = FutureUtils.retry( () -> taskManagerGateway.cancelTask(attemptId, rpcTimeout), numberRetries, jobMasterMainThreadExecutor); cancelResultFuture.whenComplete( (ack, failure) -> { if (failure != null) { fail(new Exception("Task could not be canceled.", failure)); } }); } } private void startTrackingPartitions(final ResourceID taskExecutorId, final Collection<ResultPartitionDeploymentDescriptor> partitions) { PartitionTracker partitionTracker = vertex.getExecutionGraph().getPartitionTracker(); for (ResultPartitionDeploymentDescriptor partition : partitions) { partitionTracker.startTrackingPartition( taskExecutorId, partition); } } void stopTrackingAndReleasePartitions() { LOG.info("Discarding the results produced by task execution {}.", attemptId); if (producedPartitions != null && producedPartitions.size() > 0) { final PartitionTracker partitionTracker = getVertex().getExecutionGraph().getPartitionTracker(); final List<ResultPartitionID> producedPartitionIds = producedPartitions.values().stream() .map(ResultPartitionDeploymentDescriptor::getShuffleDescriptor) .map(ShuffleDescriptor::getResultPartitionID) .collect(Collectors.toList()); partitionTracker.stopTrackingAndReleasePartitions(producedPartitionIds); } } /** * Update the partition infos on the assigned resource. * * @param partitionInfos for the remote task */ private void sendUpdatePartitionInfoRpcCall( final Iterable<PartitionInfo> partitionInfos) { final LogicalSlot slot = assignedResource; if (slot != null) { final TaskManagerGateway taskManagerGateway = slot.getTaskManagerGateway(); final TaskManagerLocation taskManagerLocation = slot.getTaskManagerLocation(); CompletableFuture<Acknowledge> updatePartitionsResultFuture = taskManagerGateway.updatePartitions(attemptId, partitionInfos, rpcTimeout); updatePartitionsResultFuture.whenCompleteAsync( (ack, failure) -> { // fail if there was a failure if (failure != null) { fail(new IllegalStateException("Update task on TaskManager " + taskManagerLocation + " failed due to:", failure)); } }, getVertex().getExecutionGraph().getJobMasterMainThreadExecutor()); } } /** * Releases the assigned resource and completes the release future * once the assigned resource has been successfully released. * * @param cause for the resource release, null if none */ private void releaseAssignedResource(@Nullable Throwable cause) { assertRunningInJobMasterMainThread(); final LogicalSlot slot = assignedResource; if (slot != null) { ComponentMainThreadExecutor jobMasterMainThreadExecutor = getVertex().getExecutionGraph().getJobMasterMainThreadExecutor(); slot.releaseSlot(cause) .whenComplete((Object ignored, Throwable throwable) -> { jobMasterMainThreadExecutor.assertRunningInMainThread(); if (throwable != null) { releaseFuture.completeExceptionally(throwable); } else { releaseFuture.complete(null); } }); } else { // no assigned resource --> we can directly complete the release future releaseFuture.complete(null); } } // -------------------------------------------------------------------------------------------- // Miscellaneous // -------------------------------------------------------------------------------------------- /** * Calculates the preferred locations based on the location preference constraint. * * @param locationPreferenceConstraint constraint for the location preference * @return Future containing the collection of preferred locations. This might not be completed if not all inputs * have been a resource assigned. */ @VisibleForTesting public CompletableFuture<Collection<TaskManagerLocation>> calculatePreferredLocations(LocationPreferenceConstraint locationPreferenceConstraint) { final Collection<CompletableFuture<TaskManagerLocation>> preferredLocationFutures = getVertex().getPreferredLocations(); final CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture; switch(locationPreferenceConstraint) { case ALL: preferredLocationsFuture = FutureUtils.combineAll(preferredLocationFutures); break; case ANY: final ArrayList<TaskManagerLocation> completedTaskManagerLocations = new ArrayList<>(preferredLocationFutures.size()); for (CompletableFuture<TaskManagerLocation> preferredLocationFuture : preferredLocationFutures) { if (preferredLocationFuture.isDone() && !preferredLocationFuture.isCompletedExceptionally()) { final TaskManagerLocation taskManagerLocation = preferredLocationFuture.getNow(null); if (taskManagerLocation == null) { throw new FlinkRuntimeException("TaskManagerLocationFuture was completed with null. This indicates a programming bug."); } completedTaskManagerLocations.add(taskManagerLocation); } } preferredLocationsFuture = CompletableFuture.completedFuture(completedTaskManagerLocations); break; default: throw new RuntimeException("Unknown LocationPreferenceConstraint " + locationPreferenceConstraint + '.'); } return preferredLocationsFuture; } private boolean transitionState(ExecutionState currentState, ExecutionState targetState) { return transitionState(currentState, targetState, null); } private boolean transitionState(ExecutionState currentState, ExecutionState targetState, Throwable error) { // sanity check if (currentState.isTerminal()) { throw new IllegalStateException("Cannot leave terminal state " + currentState + " to transition to " + targetState + '.'); } if (STATE_UPDATER.compareAndSet(this, currentState, targetState)) { markTimestamp(targetState); if (error == null) { LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState); } else { LOG.info("{} ({}) switched from {} to {}.", getVertex().getTaskNameWithSubtaskIndex(), getAttemptId(), currentState, targetState, error); } if (targetState.isTerminal()) { // complete the terminal state future terminalStateFuture.complete(targetState); } // make sure that the state transition completes normally. // potential errors (in listeners may not affect the main logic) try { vertex.notifyStateTransition(this, targetState, error); } catch (Throwable t) { LOG.error("Error while notifying execution graph of execution state transition.", t); } return true; } else { return false; } } private void markTimestamp(ExecutionState state) { markTimestamp(state, System.currentTimeMillis()); } private void markTimestamp(ExecutionState state, long timestamp) { this.stateTimestamps[state.ordinal()] = timestamp; } public String getVertexWithAttempt() { return vertex.getTaskNameWithSubtaskIndex() + " - execution #" + attemptNumber; } // ------------------------------------------------------------------------ // Accumulators // ------------------------------------------------------------------------ /** * Update accumulators (discarded when the Execution has already been terminated). * @param userAccumulators the user accumulators */ public void setAccumulators(Map<String, Accumulator<?, ?>> userAccumulators) { synchronized (accumulatorLock) { if (!state.isTerminal()) { this.userAccumulators = userAccumulators; } } } public Map<String, Accumulator<?, ?>> getUserAccumulators() { return userAccumulators; } @Override public StringifiedAccumulatorResult[] getUserAccumulatorsStringified() { Map<String, OptionalFailure<Accumulator<?, ?>>> accumulators = userAccumulators == null ? null : userAccumulators.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> OptionalFailure.of(entry.getValue()))); return StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulators); } @Override public int getParallelSubtaskIndex() { return getVertex().getParallelSubtaskIndex(); } @Override public IOMetrics getIOMetrics() { return ioMetrics; } private void updateAccumulatorsAndMetrics(Map<String, Accumulator<?, ?>> userAccumulators, IOMetrics metrics) { if (userAccumulators != null) { synchronized (accumulatorLock) { this.userAccumulators = userAccumulators; } } if (metrics != null) { this.ioMetrics = metrics; } } // ------------------------------------------------------------------------ // Standard utilities // ------------------------------------------------------------------------ @Override public String toString() { final LogicalSlot slot = assignedResource; return String.format("Attempt #%d (%s) @ %s - [%s]", attemptNumber, vertex.getTaskNameWithSubtaskIndex(), (slot == null ? "(unassigned)" : slot), state); } @Override public ArchivedExecution archive() { return new ArchivedExecution(this); } private void assertRunningInJobMasterMainThread() { vertex.getExecutionGraph().assertRunningInJobMasterMainThread(); } }
shaoxuan-wang/flink
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
Java
apache-2.0
59,232
package water; import java.io.*; import java.lang.reflect.Array; import java.net.*; import java.nio.*; import java.nio.channels.*; import java.util.ArrayList; import java.util.Random; import water.network.SocketChannelUtils; import water.util.Log; import water.util.StringUtils; import water.util.TwoDimTable; /** A ByteBuffer backed mixed Input/Output streaming class, using Iced serialization. * * Reads/writes empty/fill the ByteBuffer as needed. When it is empty/full it * we go to the ByteChannel for more/less. Because DirectByteBuffers are * expensive to make, we keep a few pooled. * * When talking to a remote H2O node, switches between UDP and TCP transport * protocols depending on the message size. The TypeMap is not included, and * is assumed to exist on the remote H2O node. * * Supports direct NIO FileChannel read/write to disk, used during user-mode * swapping. The TypeMap is not included on write, and is assumed to be the * current map on read. * * Support read/write from byte[] - and this defeats the purpose of a * Streaming protocol, but is frequently handy for small structures. The * TypeMap is not included, and is assumed to be the current map on read. * * Supports read/write from a standard Stream, which by default assumes it is * NOT going in and out of the same Cloud, so the TypeMap IS included. The * serialized object can only be read back into the same minor version of H2O. * * @author <a href="mailto:cliffc@h2o.ai"></a> */ public final class AutoBuffer { // Maximum size of an array we allow to allocate (the value is designed // to mimic the behavior of OpenJDK libraries) private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; // The direct ByteBuffer for schlorping data about. // Set to null to indicate the AutoBuffer is closed. ByteBuffer _bb; public String sourceName = "???"; public boolean isClosed() { return _bb == null ; } // The ByteChannel for moving data in or out. Could be a SocketChannel (for // a TCP connection) or a FileChannel (spill-to-disk) or a DatagramChannel // (for a UDP connection). Null on closed AutoBuffers. Null on initial // remote-writing AutoBuffers which are still deciding UDP vs TCP. Not-null // for open AutoBuffers doing file i/o or reading any TCP/UDP or having // written at least one buffer to TCP/UDP. private Channel _chan; // A Stream for moving data in. Null unless this AutoBuffer is // stream-based, in which case _chan field is null. This path supports // persistance: reading and writing objects from different H2O cluster // instances (but exactly the same H2O revision). The only required // similarity is same-classes-same-fields; changes here will probably // silently crash. If the fields are named the same but the semantics // differ, then again the behavior is probably silent crash. private InputStream _is; private short[] _typeMap; // Mapping from input stream map to current map, or null // If we need a SocketChannel, raise the priority so we get the I/O over // with. Do not want to have some TCP socket open, blocking the TCP channel // and then have the thread stalled out. If we raise the priority - be sure // to lower it again. Note this is for TCP channels ONLY, and only because // we are blocking another Node with I/O. private int _oldPrior = -1; // Where to send or receive data via TCP or UDP (choice made as we discover // how big the message is); used to lazily create a Channel. If NULL, then // _chan should be a pre-existing Channel, such as a FileChannel. final H2ONode _h2o; // TRUE for read-mode. FALSE for write-mode. Can be flipped for rapid turnaround. private boolean _read; // TRUE if this AutoBuffer has never advanced past the first "page" of data. // The UDP-flavor, port# and task fields are only valid until we read over // them when flipping the ByteBuffer to the next chunk of data. Used in // asserts all over the place. private boolean _firstPage; // Total size written out from 'new' to 'close'. Only updated when actually // reading or writing data, or after close(). For profiling only. int _size; //int _zeros, _arys; // More profiling: start->close msec, plus nano's spent in blocking I/O // calls. The difference between (close-start) and i/o msec is the time the // i/o thread spends doing other stuff (e.g. allocating Java objects or // (de)serializing). long _time_start_ms, _time_close_ms, _time_io_ns; // I/O persistence flavor: Value.ICE, NFS, HDFS, S3, TCP. Used to record I/O time. final byte _persist; // The assumed max UDP packetsize static final int MTU = 1500-8/*UDP packet header size*/; // Enable this to test random TCP fails on open or write static final Random RANDOM_TCP_DROP = null; //new Random(); static final java.nio.charset.Charset UTF_8 = java.nio.charset.Charset.forName("UTF-8"); /** Incoming UDP request. Make a read-mode AutoBuffer from the open Channel, * figure the originating H2ONode from the first few bytes read. */ AutoBuffer( DatagramChannel sock ) throws IOException { _chan = null; _bb = BBP_SML.make(); // Get a small / UDP-sized ByteBuffer _read = true; // Reading by default _firstPage = true; // Read a packet; can get H2ONode from 'sad'? Inet4Address addr = null; SocketAddress sad = sock.receive(_bb); if( sad instanceof InetSocketAddress ) { InetAddress address = ((InetSocketAddress) sad).getAddress(); if( address instanceof Inet4Address ) { addr = (Inet4Address) address; } } _size = _bb.position(); _bb.flip(); // Set limit=amount read, and position==0 if( addr == null ) throw new RuntimeException("Unhandled socket type: " + sad); // Read Inet from socket, port from the stream, figure out H2ONode _h2o = H2ONode.intern(addr, getPort()); _firstPage = true; assert _h2o != null; _persist = 0; // No persistance } /** Incoming TCP request. Make a read-mode AutoBuffer from the open Channel, * figure the originating H2ONode from the first few bytes read. * * remoteAddress set to null means that the communication is originating from non-h2o node, non-null value * represents the case where the communication is coming from h2o node. * */ AutoBuffer( ByteChannel sock, InetAddress remoteAddress ) throws IOException { _chan = sock; raisePriority(); // Make TCP priority high _bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer _bb.flip(); _read = true; // Reading by default _firstPage = true; // Read Inet from socket, port from the stream, figure out H2ONode if(remoteAddress!=null) { _h2o = H2ONode.intern(remoteAddress, getPort()); }else{ // In case the communication originates from non-h2o node, we set _h2o node to null. // It is done for 2 reasons: // - H2ONode.intern creates a new thread and if there's a lot of connections // from non-h2o environment, it could end up with too many open files exception. // - H2OIntern also reads port (getPort()) and additional information which we do not send // in communication originating from non-h2o nodes _h2o = null; } _firstPage = true; // Yes, must reset this. _time_start_ms = System.currentTimeMillis(); _persist = Value.TCP; } /** Make an AutoBuffer to write to an H2ONode. Requests for full buffer will * open a TCP socket and roll through writing to the target. Smaller * requests will send via UDP. Small requests get ordered by priority, so * that e.g. NACK and ACKACK messages have priority over most anything else. * This helps in UDP floods to shut down flooding senders. */ private byte _msg_priority; AutoBuffer( H2ONode h2o, byte priority ) { // If UDP goes via UDP, we write into a DBB up front - because we plan on // sending it out via a Datagram socket send call. If UDP goes via batched // TCP, we write into a HBB up front, because this will be copied again // into a large outgoing buffer. _bb = H2O.ARGS.useUDP // Actually use UDP? ? BBP_SML.make() // Make DirectByteBuffers to start with : ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder()); _chan = null; // Channel made lazily only if we write alot _h2o = h2o; _read = false; // Writing by default _firstPage = true; // Filling first page assert _h2o != null; _time_start_ms = System.currentTimeMillis(); _persist = Value.TCP; _msg_priority = priority; } /** Spill-to/from-disk request. */ public AutoBuffer( FileChannel fc, boolean read, byte persist ) { _bb = BBP_BIG.make(); // Get a big / TPC-sized ByteBuffer _chan = fc; // Write to read/write _h2o = null; // File Channels never have an _h2o _read = read; // Mostly assert reading vs writing if( read ) _bb.flip(); _time_start_ms = System.currentTimeMillis(); _persist = persist; // One of Value.ICE, NFS, S3, HDFS } /** Read from UDP multicast. Same as the byte[]-read variant, except there is an H2O. */ AutoBuffer( DatagramPacket pack ) { _size = pack.getLength(); _bb = ByteBuffer.wrap(pack.getData(), 0, pack.getLength()).order(ByteOrder.nativeOrder()); _bb.position(0); _read = true; _firstPage = true; _chan = null; _h2o = H2ONode.intern(pack.getAddress(), getPort()); _persist = 0; // No persistance } /** Read from a UDP_TCP buffer; could be in the middle of a large buffer */ AutoBuffer( H2ONode h2o, byte[] buf, int off, int len ) { assert buf != null : "null fed to ByteBuffer.wrap"; _h2o = h2o; _bb = ByteBuffer.wrap(buf,off,len).order(ByteOrder.nativeOrder()); _chan = null; _read = true; _firstPage = true; _persist = 0; // No persistance _size = len; } /** Read from a fixed byte[]; should not be closed. */ public AutoBuffer( byte[] buf ) { this(null,buf,0, buf.length); } /** Write to an ever-expanding byte[]. Instead of calling {@link #close()}, * call {@link #buf()} to retrieve the final byte[]. */ public AutoBuffer( ) { _bb = ByteBuffer.wrap(new byte[16]).order(ByteOrder.nativeOrder()); _chan = null; _h2o = null; _read = false; _firstPage = true; _persist = 0; // No persistance } /** Write to a known sized byte[]. Instead of calling close(), call * {@link #bufClose()} to retrieve the final byte[]. */ public AutoBuffer( int len ) { _bb = ByteBuffer.wrap(MemoryManager.malloc1(len)).order(ByteOrder.nativeOrder()); _chan = null; _h2o = null; _read = false; _firstPage = true; _persist = 0; // No persistance } /** Write to a persistent Stream, including all TypeMap info to allow later * reloading (by the same exact rev of H2O). */ public AutoBuffer( OutputStream os, boolean persist ) { _bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder()); _read = false; _chan = Channels.newChannel(os); _h2o = null; _firstPage = true; _persist = 0; if( persist ) put1(0x1C).put1(0xED).putStr(H2O.ABV.projectVersion()).putAStr(TypeMap.CLAZZES); else put1(0); } /** Read from a persistent Stream (including all TypeMap info) into same * exact rev of H2O). */ public AutoBuffer( InputStream is ) { _chan = null; _h2o = null; _firstPage = true; _persist = 0; _read = true; _bb = ByteBuffer.wrap(MemoryManager.malloc1(BBP_BIG._size)).order(ByteOrder.nativeOrder()); _bb.flip(); _is = is; int b = get1U(); if( b==0 ) return; // No persistence info int magic = get1U(); if( b!=0x1C || magic != 0xED ) throw new IllegalArgumentException("Missing magic number 0x1CED at stream start"); String version = getStr(); if( !version.equals(H2O.ABV.projectVersion()) ) throw new IllegalArgumentException("Found version "+version+", but running version "+H2O.ABV.projectVersion()); String[] typeMap = getAStr(); _typeMap = new short[typeMap.length]; for( int i=0; i<typeMap.length; i++ ) _typeMap[i] = (short)(typeMap[i]==null ? 0 : TypeMap.onIce(typeMap[i])); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("[AB ").append(_read ? "read " : "write "); sb.append(_firstPage?"first ":"2nd ").append(_h2o); sb.append(" ").append(Value.nameOfPersist(_persist)); if( _bb != null ) sb.append(" 0 <= ").append(_bb.position()).append(" <= ").append(_bb.limit()); if( _bb != null ) sb.append(" <= ").append(_bb.capacity()); return sb.append("]").toString(); } // Fetch a DBB from an object pool... they are fairly expensive to make // because a native call is required to get the backing memory. I've // included BB count tracking code to help track leaks. As of 12/17/2012 the // leaks are under control, but figure this may happen again so keeping these // counters around. // // We use 2 pool sizes: lots of small UDP packet-sized buffers and fewer // larger TCP-sized buffers. private static final boolean DEBUG = Boolean.getBoolean("h2o.find-ByteBuffer-leaks"); private static long HWM=0; static class BBPool { long _made, _cached, _freed; long _numer, _denom, _goal=4*H2O.NUMCPUS, _lastGoal; final ArrayList<ByteBuffer> _bbs = new ArrayList<>(); final int _size; // Big or small size of ByteBuffers BBPool( int sz) { _size=sz; } private ByteBuffer stats( ByteBuffer bb ) { if( !DEBUG ) return bb; if( ((_made+_cached)&255)!=255 ) return bb; // Filter printing to 1 in 256 long now = System.currentTimeMillis(); if( now < HWM ) return bb; HWM = now+1000; water.util.SB sb = new water.util.SB(); sb.p("BB").p(this==BBP_BIG?1:0).p(" made=").p(_made).p(" -freed=").p(_freed).p(", cache hit=").p(_cached).p(" ratio=").p(_numer/_denom).p(", goal=").p(_goal).p(" cache size=").p(_bbs.size()).nl(); for( int i=0; i<H2O.MAX_PRIORITY; i++ ) { int x = H2O.getWrkQueueSize(i); if( x > 0 ) sb.p('Q').p(i).p('=').p(x).p(' '); } Log.warn(sb.nl().toString()); return bb; } ByteBuffer make() { while( true ) { // Repeat loop for DBB OutOfMemory errors ByteBuffer bb=null; synchronized(_bbs) { int sz = _bbs.size(); if( sz > 0 ) { bb = _bbs.remove(sz-1); _cached++; _numer++; } } if( bb != null ) return stats(bb); // Cache empty; go get one from C/Native memory try { bb = ByteBuffer.allocateDirect(_size).order(ByteOrder.nativeOrder()); synchronized(this) { _made++; _denom++; _goal = Math.max(_goal,_made-_freed); _lastGoal=System.nanoTime(); } // Goal was too low, raise it return stats(bb); } catch( OutOfMemoryError oome ) { // java.lang.OutOfMemoryError: Direct buffer memory if( !"Direct buffer memory".equals(oome.getMessage()) ) throw oome; System.out.println("OOM DBB - Sleeping & retrying"); try { Thread.sleep(100); } catch( InterruptedException ignore ) { } } } } void free(ByteBuffer bb) { // Heuristic: keep the ratio of BB's made to cache-hits at a fixed level. // Free to GC if ratio is high, free to internal cache if low. long ratio = _numer/(_denom+1); synchronized(_bbs) { if( ratio < 100 || _bbs.size() < _goal ) { // low hit/miss ratio or below goal bb.clear(); // Clear-before-add _bbs.add(bb); } else _freed++; // Toss the extras (above goal & ratio) long now = System.nanoTime(); if( now-_lastGoal > 1000000000L ) { // Once/sec, drop goal by 10% _lastGoal = now; if( ratio > 110 ) // If ratio is really high, lower goal _goal=Math.max(4*H2O.NUMCPUS,(long)(_goal*0.99)); // Once/sec, lower numer/denom... means more recent activity outweighs really old stuff long denom = (long) (0.99 * _denom); // Proposed reduction if( denom > 10 ) { // Keep a little precision _numer = (long) (0.99 * _numer); // Keep ratio between made & cached the same _denom = denom; // ... by lowering both by 10% } } } } static int FREE( ByteBuffer bb ) { if(bb.isDirect()) (bb.capacity()==BBP_BIG._size ? BBP_BIG : BBP_SML).free(bb); return 0; // Flow coding } } static BBPool BBP_SML = new BBPool( 2*1024); // Bytebuffer "common small size", for UDP static BBPool BBP_BIG = new BBPool(64*1024); // Bytebuffer "common big size", for TCP public static int TCP_BUF_SIZ = BBP_BIG._size; private int bbFree() { if(_bb != null && _bb.isDirect()) BBPool.FREE(_bb); _bb = null; return 0; // Flow-coding } // You thought TCP was a reliable protocol, right? WRONG! Fails 100% of the // time under heavy network load. Connection-reset-by-peer & connection // timeouts abound, even after a socket open and after a 1st successful // ByteBuffer write. It *appears* that the reader is unaware that a writer // was told "go ahead and write" by the TCP stack, so all these fails are // only on the writer-side. public static class AutoBufferException extends RuntimeException { public final IOException _ioe; AutoBufferException( IOException ioe ) { _ioe = ioe; } } // For reads, just assert all was read and close and release resources. // (release ByteBuffer back to the common pool). For writes, force any final // bytes out. If the write is to an H2ONode and is short, send via UDP. // AutoBuffer close calls order; i.e. a reader close() will block until the // writer does a close(). public final int close() { //if( _size > 2048 ) System.out.println("Z="+_zeros+" / "+_size+", A="+_arys); if( isClosed() ) return 0; // Already closed assert _h2o != null || _chan != null || _is != null; // Byte-array backed should not be closed try { if( _chan == null ) { // No channel? if( _read ) { if( _is != null ) _is.close(); return 0; } else { // Write // For small-packet write, send via UDP. Since nothing is sent until // now, this close() call trivially orders - since the reader will not // even start (much less close()) until this packet is sent. if( _bb.position() < MTU) return udpSend(); // oops - Big Write, switch to TCP and finish out there } } // Force AutoBuffer 'close' calls to order; i.e. block readers until // writers do a 'close' - by writing 1 more byte in the close-call which // the reader will have to wait for. if( hasTCP()) { // TCP connection? try { if( _read ) { // Reader? int x = get1U(); // Read 1 more byte assert x == 0xab : "AB.close instead of 0xab sentinel got "+x+", "+this; assert _chan != null; // chan set by incoming reader, since we KNOW it is a TCP // Write the reader-handshake-byte. SocketChannelUtils.underlyingSocketChannel(_chan).socket().getOutputStream().write(0xcd); // do not close actually reader socket; recycle it in TCPReader thread } else { // Writer? put1(0xab); // Write one-more byte ; might set _chan from null to not-null sendPartial(); // Finish partial writes; might set _chan from null to not-null assert _chan != null; // _chan is set not-null now! // Read the writer-handshake-byte. int x = SocketChannelUtils.underlyingSocketChannel(_chan).socket().getInputStream().read(); // either TCP con was dropped or other side closed connection without reading/confirming (e.g. task was cancelled). if( x == -1 ) throw new IOException("Other side closed connection before handshake byte read"); assert x == 0xcd : "Handshake; writer expected a 0xcd from reader but got "+x; } } catch( IOException ioe ) { try { _chan.close(); } catch( IOException ignore ) {} // Silently close _chan = null; // No channel now, since i/o error throw ioe; // Rethrow after close } finally { if( !_read ) _h2o.freeTCPSocket((ByteChannel) _chan); // Recycle writable TCP channel restorePriority(); // And if we raised priority, lower it back } } else { // FileChannel if( !_read ) sendPartial(); // Finish partial file-system writes _chan.close(); _chan = null; // Closed file channel } } catch( IOException e ) { // Dunno how to handle so crash-n-burn throw new AutoBufferException(e); } finally { bbFree(); _time_close_ms = System.currentTimeMillis(); // TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections assert isClosed(); } return 0; } // Need a sock for a big read or write operation. // See if we got one already, else open a new socket. private void tcpOpen() throws IOException { assert _firstPage && _bb.limit() >= 1+2+4; // At least something written assert _chan == null; // assert _bb.position()==0; _chan = _h2o.getTCPSocket(); raisePriority(); } // Just close the channel here without reading anything. Without the task // object at hand we do not know what (how many bytes) should we read from // the channel. And since the other side will try to read confirmation from // us before closing the channel, we can not read till the end. So we just // close the channel and let the other side to deal with it and figure out // the task has been cancelled (still sending ack ack back). void drainClose() { if( isClosed() ) return; // Already closed final Channel chan = _chan; // Read before closing assert _h2o != null || chan != null; // Byte-array backed should not be closed if( chan != null ) { // Channel assumed sick from prior IOException try { chan.close(); } catch( IOException ignore ) {} // Silently close _chan = null; // No channel now! if( !_read && SocketChannelUtils.isSocketChannel(chan)) _h2o.freeTCPSocket((ByteChannel) chan); // Recycle writable TCP channel } restorePriority(); // And if we raised priority, lower it back bbFree(); _time_close_ms = System.currentTimeMillis(); // TimeLine.record_IOclose(this,_persist); // Profile AutoBuffer connections assert isClosed(); } // True if we opened a TCP channel, or will open one to close-and-send boolean hasTCP() { assert !isClosed(); return SocketChannelUtils.isSocketChannel(_chan) || (_h2o!=null && _bb.position() >= MTU); } // Size in bytes sent, after a close() int size() { return _size; } //int zeros() { return _zeros; } public int position () { return _bb.position(); } public AutoBuffer position(int p) {_bb.position(p); return this;} /** Skip over some bytes in the byte buffer. Caller is responsible for not * reading off end of the bytebuffer; generally this is easy for * array-backed autobuffers and difficult for i/o-backed bytebuffers. */ public void skip(int skip) { _bb.position(_bb.position()+skip); } // Return byte[] from a writable AutoBuffer public final byte[] buf() { assert _h2o==null && _chan==null && !_read && !_bb.isDirect(); return MemoryManager.arrayCopyOfRange(_bb.array(), _bb.arrayOffset(), _bb.position()); } public final byte[] bufClose() { byte[] res = _bb.array(); bbFree(); return res; } // For TCP sockets ONLY, raise the thread priority. We assume we are // blocking other Nodes with our network I/O, so try to get the I/O // over with. private void raisePriority() { if(_oldPrior == -1){ assert SocketChannelUtils.isSocketChannel(_chan); _oldPrior = Thread.currentThread().getPriority(); Thread.currentThread().setPriority(Thread.MAX_PRIORITY-1); } } private void restorePriority() { if( _oldPrior == -1 ) return; Thread.currentThread().setPriority(_oldPrior); _oldPrior = -1; } // Send via UDP socket. Unlike eg TCP sockets, we only need one for sending // so we keep a global one. Also, we do not close it when done, and we do // not connect it up-front to a target - but send the entire packet right now. private int udpSend() throws IOException { assert _chan == null; TimeLine.record_send(this,false); _size = _bb.position(); assert _size < AutoBuffer.BBP_SML._size; _bb.flip(); // Flip for sending if( _h2o==H2O.SELF ) { // SELF-send is the multi-cast signal water.init.NetworkInit.multicast(_bb, _msg_priority); } else { // Else single-cast send if(H2O.ARGS.useUDP) // Send via UDP directly water.init.NetworkInit.CLOUD_DGRAM.send(_bb, _h2o._key); else // Send via bulk TCP _h2o.sendMessage(_bb, _msg_priority); } return 0; // Flow-coding } // Flip to write-mode AutoBuffer clearForWriting(byte priority) { assert _read; _read = false; _msg_priority = priority; _bb.clear(); _firstPage = true; return this; } // Flip to read-mode public AutoBuffer flipForReading() { assert !_read; _read = true; _bb.flip(); _firstPage = true; return this; } /** Ensure the buffer has space for sz more bytes */ private ByteBuffer getSp( int sz ) { return sz > _bb.remaining() ? getImpl(sz) : _bb; } /** Ensure buffer has at least sz bytes in it. * - Also, set position just past this limit for future reading. */ private ByteBuffer getSz(int sz) { assert _firstPage : "getSz() is only valid for early UDP bytes"; if( sz > _bb.limit() ) getImpl(sz); _bb.position(sz); return _bb; } private ByteBuffer getImpl( int sz ) { assert _read : "Reading from a buffer in write mode"; _bb.compact(); // Move remaining unread bytes to start of buffer; prep for reading // Its got to fit or we asked for too much assert _bb.position()+sz <= _bb.capacity() : "("+_bb.position()+"+"+sz+" <= "+_bb.capacity()+")"; long ns = System.nanoTime(); while( _bb.position() < sz ) { // Read until we got enuf try { int res = readAnInt(); // Read more // Readers are supposed to be strongly typed and read the exact expected bytes. // However, if a TCP connection fails mid-read we'll get a short-read. // This is indistinguishable from a mis-alignment between the writer and reader! if( res <= 0 ) throw new AutoBufferException(new EOFException("Reading "+sz+" bytes, AB="+this)); if( _is != null ) _bb.position(_bb.position()+res); // Advance BB for Streams manually _size += res; // What we read } catch( IOException e ) { // Dunno how to handle so crash-n-burn // Linux/Ubuntu message for a reset-channel if( e.getMessage().equals("An existing connection was forcibly closed by the remote host") ) throw new AutoBufferException(e); // Windows message for a reset-channel if( e.getMessage().equals("An established connection was aborted by the software in your host machine") ) throw new AutoBufferException(e); throw Log.throwErr(e); } } _time_io_ns += (System.nanoTime()-ns); _bb.flip(); // Prep for handing out bytes //for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++; _firstPage = false; // First page of data is gone gone gone return _bb; } private int readAnInt() throws IOException { if (_is == null) return ((ReadableByteChannel) _chan).read(_bb); final byte[] array = _bb.array(); final int position = _bb.position(); final int remaining = _bb.remaining(); try { return _is.read(array, position, remaining); } catch (IOException ioe) { throw new IOException("Failed reading " + remaining + " bytes into buffer[" + array.length + "] at " + position + " from " + sourceName + " " + _is, ioe); } } /** Put as needed to keep from overflowing the ByteBuffer. */ private ByteBuffer putSp( int sz ) { assert !_read; if (sz > _bb.remaining()) { if ((_h2o == null && _chan == null) || (_bb.hasArray() && _bb.capacity() < BBP_BIG._size)) expandByteBuffer(sz); else sendPartial(); assert sz <= _bb.remaining(); } return _bb; } // Do something with partial results, because the ByteBuffer is full. // If we are doing I/O, ship the bytes we have now and flip the ByteBuffer. private ByteBuffer sendPartial() { // Doing I/O with the full ByteBuffer - ship partial results _size += _bb.position(); if( _chan == null ) TimeLine.record_send(this, true); _bb.flip(); // Prep for writing. try { if( _chan == null ) tcpOpen(); // This is a big operation. Open a TCP socket as-needed. //for( int i=0; i < _bb.limit(); i++ ) if( _bb.get(i)==0 ) _zeros++; long ns = System.nanoTime(); while( _bb.hasRemaining() ) { ((WritableByteChannel) _chan).write(_bb); if( RANDOM_TCP_DROP != null && SocketChannelUtils.isSocketChannel(_chan) && RANDOM_TCP_DROP.nextInt(100) == 0 ) throw new IOException("Random TCP Write Fail"); } _time_io_ns += (System.nanoTime()-ns); } catch( IOException e ) { // Some kind of TCP fail? // Change to an unchecked exception (so we don't have to annotate every // frick'n put1/put2/put4/read/write call). Retry & recovery happens at // a higher level. AutoBuffers are used for many things including e.g. // disk i/o & UDP writes; this exception only happens on a failed TCP // write - and we don't want to make the other AutoBuffer users have to // declare (and then ignore) this exception. throw new AutoBufferException(e); } _firstPage = false; _bb.clear(); return _bb; } // Called when the byte buffer doesn't have enough room // If buffer is array backed, and the needed room is small, // increase the size of the backing array, // otherwise dump into a large direct buffer private ByteBuffer expandByteBuffer(int sizeHint) { final long needed = (long) sizeHint - _bb.remaining() + _bb.capacity(); // Max needed is 2G if ((_h2o==null && _chan == null) || (_bb.hasArray() && needed < MTU)) { if (needed > MAX_ARRAY_SIZE) { throw new IllegalArgumentException("Cannot allocate more than 2GB array: sizeHint="+sizeHint+", " + "needed="+needed + ", bb.remaining()=" + _bb.remaining() + ", bb.capacity()="+_bb.capacity()); } byte[] ary = _bb.array(); // just get twice what is currently needed but not more then max array size (2G) // Be careful not to overflow because of integer math! int newLen = (int) Math.min(1L << (water.util.MathUtils.log2(needed)+1), MAX_ARRAY_SIZE); int oldpos = _bb.position(); _bb = ByteBuffer.wrap(MemoryManager.arrayCopyOfRange(ary,0,newLen),oldpos,newLen-oldpos) .order(ByteOrder.nativeOrder()); } else if (_bb.capacity() != BBP_BIG._size) { //avoid expanding existing BBP items int oldPos = _bb.position(); _bb.flip(); _bb = BBP_BIG.make().put(_bb); _bb.position(oldPos); } return _bb; } @SuppressWarnings("unused") public String getStr(int off, int len) { return new String(_bb.array(), _bb.arrayOffset()+off, len, UTF_8); } // ----------------------------------------------- // Utility functions to get various Java primitives @SuppressWarnings("unused") public boolean getZ() { return get1()!=0; } @SuppressWarnings("unused") public byte get1 () { return getSp(1).get (); } @SuppressWarnings("unused") public int get1U() { return get1() & 0xFF; } @SuppressWarnings("unused") public char get2 () { return getSp(2).getChar (); } @SuppressWarnings("unused") public short get2s () { return getSp(2).getShort (); } @SuppressWarnings("unused") public int get3 () { getSp(3); return get1U() | get1U() << 8 | get1U() << 16; } @SuppressWarnings("unused") public int get4 () { return getSp(4).getInt (); } @SuppressWarnings("unused") public float get4f() { return getSp(4).getFloat (); } @SuppressWarnings("unused") public long get8 () { return getSp(8).getLong (); } @SuppressWarnings("unused") public double get8d() { return getSp(8).getDouble(); } int get1U(int off) { return _bb.get (off)&0xFF; } int get4 (int off) { return _bb.getInt (off); } long get8 (int off) { return _bb.getLong(off); } @SuppressWarnings("unused") public AutoBuffer putZ (boolean b){ return put1(b?1:0); } @SuppressWarnings("unused") public AutoBuffer put1 ( int b) { assert b >= -128 && b <= 255 : ""+b+" is not a byte"; putSp(1).put((byte)b); return this; } @SuppressWarnings("unused") public AutoBuffer put2 ( char c) { putSp(2).putChar (c); return this; } @SuppressWarnings("unused") public AutoBuffer put2 ( short s) { putSp(2).putShort (s); return this; } @SuppressWarnings("unused") public AutoBuffer put2s ( short s) { return put2(s); } @SuppressWarnings("unused") public AutoBuffer put3( int x ) { assert (-1<<24) <= x && x < (1<<24); return put1((x)&0xFF).put1((x >> 8)&0xFF).put1(x >> 16); } @SuppressWarnings("unused") public AutoBuffer put4 ( int i) { putSp(4).putInt (i); return this; } @SuppressWarnings("unused") public AutoBuffer put4f( float f) { putSp(4).putFloat (f); return this; } @SuppressWarnings("unused") public AutoBuffer put8 ( long l) { putSp(8).putLong (l); return this; } @SuppressWarnings("unused") public AutoBuffer put8d(double d) { putSp(8).putDouble(d); return this; } public AutoBuffer put(Freezable f) { if( f == null ) return putInt(TypeMap.NULL); assert f.frozenType() > 0 : "No TypeMap for "+f.getClass().getName(); putInt(f.frozenType()); return f.write(this); } public <T extends Freezable> T get() { int id = getInt(); if( id == TypeMap.NULL ) return null; if( _is!=null ) id = _typeMap[id]; return (T)TypeMap.newFreezable(id).read(this); } public <T extends Freezable> T get(Class<T> tc) { int id = getInt(); if( id == TypeMap.NULL ) return null; if( _is!=null ) id = _typeMap[id]; assert tc.isInstance(TypeMap.theFreezable(id)):tc.getName() + " != " + TypeMap.theFreezable(id).getClass().getName() + ", id = " + id; return (T)TypeMap.newFreezable(id).read(this); } // Write Key's target IFF the Key is not null; target can be null. public AutoBuffer putKey(Key k) { if( k==null ) return this; // Key is null ==> write nothing Keyed kd = DKV.getGet(k); put(kd); return kd == null ? this : kd.writeAll_impl(this); } public Keyed getKey(Key k, Futures fs) { return k==null ? null : getKey(fs); // Key is null ==> read nothing } public Keyed getKey(Futures fs) { Keyed kd = get(Keyed.class); if( kd == null ) return null; DKV.put(kd,fs); return kd.readAll_impl(this,fs); } // Put a (compressed) integer. Specifically values in the range -1 to ~250 // will take 1 byte, values near a Short will take 1+2 bytes, values near an // Int will take 1+4 bytes, and bigger values 1+8 bytes. This compression is // optimized for small integers (including -1 which is often used as a "array // is null" flag when passing the array length). public AutoBuffer putInt(int x) { if( 0 <= (x+1)&& (x+1) <= 253 ) return put1(x+1); if( Short.MIN_VALUE <= x && x <= Short.MAX_VALUE ) return put1(255).put2((short)x); return put1(254).put4(x); } // Get a (compressed) integer. See above for the compression strategy and reasoning. int getInt( ) { int x = get1U(); if( x <= 253 ) return x-1; if( x==255 ) return (short)get2(); assert x==254; return get4(); } // Put a zero-compressed array. Compression is: // If null : putInt(-1) // Else // putInt(# of leading nulls) // putInt(# of non-nulls) // If # of non-nulls is > 0, putInt( # of trailing nulls) long putZA( Object[] A ) { if( A==null ) { putInt(-1); return 0; } int x=0; for( ; x<A.length; x++ ) if( A[x ]!=null ) break; int y=A.length; for( ; y>x; y-- ) if( A[y-1]!=null ) break; putInt(x); // Leading zeros to skip putInt(y-x); // Mixed non-zero guts in middle if( y > x ) // If any trailing nulls putInt(A.length-y); // Trailing zeros return ((long)x<<32)|(y-x); // Return both leading zeros, and middle non-zeros } // Get the lengths of a zero-compressed array. // Returns -1 if null. // Returns a long of (leading zeros | middle non-zeros). // If there are non-zeros, caller has to read the trailing zero-length. long getZA( ) { int x=getInt(); // Length of leading zeros if( x == -1 ) return -1; // or a null int nz=getInt(); // Non-zero in the middle return ((long)x<<32)|(long)nz; // Return both ints } // TODO: untested. . . @SuppressWarnings("unused") public AutoBuffer putAEnum(Enum[] enums) { //_arys++; long xy = putZA(enums); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putEnum(enums[i]); return this; } @SuppressWarnings("unused") public <E extends Enum> E[] getAEnum(E[] values) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls E[] ts = (E[]) Array.newInstance(values.getClass().getComponentType(), x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getEnum(values); return ts; } @SuppressWarnings("unused") public AutoBuffer putA(Freezable[] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) put(fs[i]); return this; } public AutoBuffer putAA(Freezable[][] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA(fs[i]); return this; } @SuppressWarnings("unused") public AutoBuffer putAAA(Freezable[][][] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAA(fs[i]); return this; } public <T extends Freezable> T[] getA(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls T[] ts = (T[]) Array.newInstance(tc, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = get(tc); return ts; } public <T extends Freezable> T[][] getAA(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls Class<T[]> tcA = (Class<T[]>) Array.newInstance(tc, 0).getClass(); T[][] ts = (T[][]) Array.newInstance(tcA, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getA(tc); return ts; } @SuppressWarnings("unused") public <T extends Freezable> T[][][] getAAA(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls Class<T[] > tcA = (Class<T[] >) Array.newInstance(tc , 0).getClass(); Class<T[][]> tcAA = (Class<T[][]>) Array.newInstance(tcA, 0).getClass(); T[][][] ts = (T[][][]) Array.newInstance(tcAA, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getAA(tc); return ts; } public AutoBuffer putAStr(String[] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putStr(fs[i]); return this; } public String[] getAStr() { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls String[] ts = new String[x+y+z]; for( int i = x; i < x+y; ++i ) ts[i] = getStr(); return ts; } @SuppressWarnings("unused") public AutoBuffer putAAStr(String[][] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAStr(fs[i]); return this; } @SuppressWarnings("unused") public String[][] getAAStr() { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls String[][] ts = new String[x+y+z][]; for( int i = x; i < x+y; ++i ) ts[i] = getAStr(); return ts; } // Read the smaller of _bb.remaining() and len into buf. // Return bytes read, which could be zero. int read( byte[] buf, int off, int len ) { int sz = Math.min(_bb.remaining(),len); _bb.get(buf,off,sz); return sz; } // ----------------------------------------------- // Utility functions to handle common UDP packet tasks. // Get the 1st control byte int getCtrl( ) { return getSz(1).get(0)&0xFF; } // Get the port in next 2 bytes int getPort( ) { return getSz(1+2).getChar(1); } // Get the task# in the next 4 bytes int getTask( ) { return getSz(1+2+4).getInt(1+2); } // Get the flag in the next 1 byte int getFlag( ) { return getSz(1+2+4+1).get(1+2+4); } // Set the ctrl, port, task. Ready to write more bytes afterwards AutoBuffer putUdp (UDP.udp type) { assert _bb.position() == 0; putSp(_bb.position()+1+2); _bb.put ((byte)type.ordinal()); _bb.putChar((char)H2O.H2O_PORT ); // Outgoing port is always the sender's (me) port return this; } AutoBuffer putTask(UDP.udp type, int tasknum) { return putUdp(type).put4(tasknum); } AutoBuffer putTask(int ctrl, int tasknum) { assert _bb.position() == 0; putSp(_bb.position()+1+2+4); _bb.put((byte)ctrl).putChar((char)H2O.H2O_PORT).putInt(tasknum); return this; } // ----------------------------------------------- // Utility functions to read & write arrays public boolean[] getAZ() { int len = getInt(); if (len == -1) return null; boolean[] r = new boolean[len]; for (int i=0;i<len;++i) r[i] = getZ(); return r; } public byte[] getA1( ) { //_arys++; int len = getInt(); return len == -1 ? null : getA1(len); } public byte[] getA1( int len ) { byte[] buf = MemoryManager.malloc1(len); int sofar = 0; while( sofar < len ) { int more = Math.min(_bb.remaining(), len - sofar); _bb.get(buf, sofar, more); sofar += more; if( sofar < len ) getSp(Math.min(_bb.capacity(), len-sofar)); } return buf; } public short[] getA2( ) { //_arys++; int len = getInt(); if( len == -1 ) return null; short[] buf = MemoryManager.malloc2(len); int sofar = 0; while( sofar < buf.length ) { ShortBuffer as = _bb.asShortBuffer(); int more = Math.min(as.remaining(), len - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*2); if( sofar < len ) getSp(Math.min(_bb.capacity()-1, (len-sofar)*2)); } return buf; } public int[] getA4( ) { //_arys++; int len = getInt(); if( len == -1 ) return null; int[] buf = MemoryManager.malloc4(len); int sofar = 0; while( sofar < buf.length ) { IntBuffer as = _bb.asIntBuffer(); int more = Math.min(as.remaining(), len - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*4); if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4)); } return buf; } public float[] getA4f( ) { //_arys++; int len = getInt(); if( len == -1 ) return null; float[] buf = MemoryManager.malloc4f(len); int sofar = 0; while( sofar < buf.length ) { FloatBuffer as = _bb.asFloatBuffer(); int more = Math.min(as.remaining(), len - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*4); if( sofar < len ) getSp(Math.min(_bb.capacity()-3, (len-sofar)*4)); } return buf; } public long[] getA8( ) { //_arys++; // Get the lengths of lead & trailing zero sections, and the non-zero // middle section. int x = getInt(); if( x == -1 ) return null; int y = getInt(); // Non-zero in the middle int z = y==0 ? 0 : getInt();// Trailing zeros long[] buf = MemoryManager.malloc8(x+y+z); switch( get1U() ) { // 1,2,4 or 8 for how the middle section is passed case 1: for( int i=x; i<x+y; i++ ) buf[i] = get1U(); return buf; case 2: for( int i=x; i<x+y; i++ ) buf[i] = (short)get2(); return buf; case 4: for( int i=x; i<x+y; i++ ) buf[i] = get4(); return buf; case 8: break; default: throw H2O.fail(); } int sofar = x; while( sofar < x+y ) { LongBuffer as = _bb.asLongBuffer(); int more = Math.min(as.remaining(), x+y - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*8); if( sofar < x+y ) getSp(Math.min(_bb.capacity()-7, (x+y-sofar)*8)); } return buf; } public double[] getA8d( ) { //_arys++; int len = getInt(); if( len == -1 ) return null; double[] buf = MemoryManager.malloc8d(len); int sofar = 0; while( sofar < len ) { DoubleBuffer as = _bb.asDoubleBuffer(); int more = Math.min(as.remaining(), len - sofar); as.get(buf, sofar, more); sofar += more; _bb.position(_bb.position() + as.position()*8); if( sofar < len ) getSp(Math.min(_bb.capacity()-7, (len-sofar)*8)); } return buf; } @SuppressWarnings("unused") public byte[][] getAA1( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls byte[][] ary = new byte[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA1(); return ary; } @SuppressWarnings("unused") public short[][] getAA2( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls short[][] ary = new short[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA2(); return ary; } public int[][] getAA4( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls int[][] ary = new int[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA4(); return ary; } @SuppressWarnings("unused") public float[][] getAA4f( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls float[][] ary = new float[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA4f(); return ary; } public long[][] getAA8( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls long[][] ary = new long[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA8(); return ary; } @SuppressWarnings("unused") public double[][] getAA8d( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls double[][] ary = new double[x+y+z][]; for( int i=x; i<x+y; i++ ) ary[i] = getA8d(); return ary; } @SuppressWarnings("unused") public int[][][] getAAA4( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls int[][][] ary = new int[x+y+z][][]; for( int i=x; i<x+y; i++ ) ary[i] = getAA4(); return ary; } @SuppressWarnings("unused") public long[][][] getAAA8( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls long[][][] ary = new long[x+y+z][][]; for( int i=x; i<x+y; i++ ) ary[i] = getAA8(); return ary; } public double[][][] getAAA8d( ) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls double[][][] ary = new double[x+y+z][][]; for( int i=x; i<x+y; i++ ) ary[i] = getAA8d(); return ary; } public String getStr( ) { int len = getInt(); return len == -1 ? null : new String(getA1(len), UTF_8); } public <E extends Enum> E getEnum(E[] values ) { int idx = get1(); return idx == -1 ? null : values[idx]; } public AutoBuffer putAZ( boolean[] ary ) { if( ary == null ) return putInt(-1); putInt(ary.length); for (boolean anAry : ary) putZ(anAry); return this; } public AutoBuffer putA1( byte[] ary ) { //_arys++; if( ary == null ) return putInt(-1); putInt(ary.length); return putA1(ary,ary.length); } public AutoBuffer putA1( byte[] ary, int length ) { return putA1(ary,0,length); } public AutoBuffer putA1( byte[] ary, int sofar, int length ) { if (length - sofar > _bb.remaining()) expandByteBuffer(length-sofar); while( sofar < length ) { int len = Math.min(length - sofar, _bb.remaining()); _bb.put(ary, sofar, len); sofar += len; if( sofar < length ) sendPartial(); } return this; } AutoBuffer putA2( short[] ary ) { //_arys++; if( ary == null ) return putInt(-1); putInt(ary.length); if (ary.length*2 > _bb.remaining()) expandByteBuffer(ary.length*2); int sofar = 0; while( sofar < ary.length ) { ShortBuffer sb = _bb.asShortBuffer(); int len = Math.min(ary.length - sofar, sb.remaining()); sb.put(ary, sofar, len); sofar += len; _bb.position(_bb.position() + sb.position()*2); if( sofar < ary.length ) sendPartial(); } return this; } public AutoBuffer putA4( int[] ary ) { //_arys++; if( ary == null ) return putInt(-1); putInt(ary.length); // Note: based on Brandon commit this should improve performance during parse (7d950d622ee3037555ecbab0e39404f8f0917652) if (ary.length*4 > _bb.remaining()) { expandByteBuffer(ary.length*4); // Try to expand BB buffer to fit input array } int sofar = 0; while( sofar < ary.length ) { IntBuffer ib = _bb.asIntBuffer(); int len = Math.min(ary.length - sofar, ib.remaining()); ib.put(ary, sofar, len); sofar += len; _bb.position(_bb.position() + ib.position()*4); if( sofar < ary.length ) sendPartial(); } return this; } public AutoBuffer putA8( long[] ary ) { //_arys++; if( ary == null ) return putInt(-1); // Trim leading & trailing zeros. Pass along the length of leading & // trailing zero sections, and the non-zero section in the middle. int x=0; for( ; x<ary.length; x++ ) if( ary[x ]!=0 ) break; int y=ary.length; for( ; y>x; y-- ) if( ary[y-1]!=0 ) break; int nzlen = y-x; putInt(x); putInt(nzlen); if( nzlen > 0 ) // If any trailing nulls putInt(ary.length-y); // Trailing zeros // Size trim the NZ section: pass as bytes or shorts if possible. long min=Long.MAX_VALUE, max=Long.MIN_VALUE; for( int i=x; i<y; i++ ) { if( ary[i]<min ) min=ary[i]; if( ary[i]>max ) max=ary[i]; } if( 0 <= min && max < 256 ) { // Ship as unsigned bytes put1(1); for( int i=x; i<y; i++ ) put1((int)ary[i]); return this; } if( Short.MIN_VALUE <= min && max < Short.MAX_VALUE ) { // Ship as shorts put1(2); for( int i=x; i<y; i++ ) put2((short)ary[i]); return this; } if( Integer.MIN_VALUE <= min && max < Integer.MAX_VALUE ) { // Ship as ints put1(4); for( int i=x; i<y; i++ ) put4((int)ary[i]); return this; } put1(8); // Ship as full longs int sofar = x; if ((y-sofar)*8 > _bb.remaining()) expandByteBuffer(ary.length*8); while( sofar < y ) { LongBuffer lb = _bb.asLongBuffer(); int len = Math.min(y - sofar, lb.remaining()); lb.put(ary, sofar, len); sofar += len; _bb.position(_bb.position() + lb.position() * 8); if( sofar < y ) sendPartial(); } return this; } public AutoBuffer putA4f( float[] ary ) { //_arys++; if( ary == null ) return putInt(-1); putInt(ary.length); if (ary.length*4 > _bb.remaining()) expandByteBuffer(ary.length*4); int sofar = 0; while( sofar < ary.length ) { FloatBuffer fb = _bb.asFloatBuffer(); int len = Math.min(ary.length - sofar, fb.remaining()); fb.put(ary, sofar, len); sofar += len; _bb.position(_bb.position() + fb.position()*4); if( sofar < ary.length ) sendPartial(); } return this; } public AutoBuffer putA8d( double[] ary ) { //_arys++; if( ary == null ) return putInt(-1); putInt(ary.length); if (ary.length*8 > _bb.remaining()) expandByteBuffer(ary.length*8); int sofar = 0; while( sofar < ary.length ) { DoubleBuffer db = _bb.asDoubleBuffer(); int len = Math.min(ary.length - sofar, db.remaining()); db.put(ary, sofar, len); sofar += len; _bb.position(_bb.position() + db.position()*8); if( sofar < ary.length ) sendPartial(); } return this; } public AutoBuffer putAA1( byte[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA1(ary[i]); return this; } @SuppressWarnings("unused") AutoBuffer putAA2( short[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA2(ary[i]); return this; } public AutoBuffer putAA4( int[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA4(ary[i]); return this; } @SuppressWarnings("unused") public AutoBuffer putAA4f( float[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA4f(ary[i]); return this; } public AutoBuffer putAA8( long[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA8(ary[i]); return this; } @SuppressWarnings("unused") public AutoBuffer putAA8d( double[][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putA8d(ary[i]); return this; } public AutoBuffer putAAA4( int[][][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAA4(ary[i]); return this; } public AutoBuffer putAAA8( long[][][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAA8(ary[i]); return this; } public AutoBuffer putAAA8d( double[][][] ary ) { //_arys++; long xy = putZA(ary); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAA8d(ary[i]); return this; } // Put a String as bytes (not chars!) public AutoBuffer putStr( String s ) { if( s==null ) return putInt(-1); return putA1(StringUtils.bytesOf(s)); } @SuppressWarnings("unused") public AutoBuffer putEnum( Enum x ) { return put1(x==null ? -1 : x.ordinal()); } public static byte[] javaSerializeWritePojo(Object o) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream out = null; try { out = new ObjectOutputStream(bos); out.writeObject(o); out.close(); return bos.toByteArray(); } catch (IOException e) { throw Log.throwErr(e); } } public static Object javaSerializeReadPojo(byte [] bytes) { try { final ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes)); Object o = ois.readObject(); return o; } catch (IOException e) { String className = nameOfClass(bytes); throw Log.throwErr(new RuntimeException("Failed to deserialize " + className, e)); } catch (ClassNotFoundException e) { throw Log.throwErr(e); } } static String nameOfClass(byte[] bytes) { if (bytes == null) return "(null)"; if (bytes.length < 11) return "(no name)"; int nameSize = Math.min(40, Math.max(3, bytes[7])); return new String(bytes, 8, Math.min(nameSize, bytes.length - 8)); } // ========================================================================== // Java Serializable objects // Note: These are heck-a-lot more expensive than their Freezable equivalents. @SuppressWarnings("unused") public AutoBuffer putSer( Object obj ) { if (obj == null) return putA1(null); return putA1(javaSerializeWritePojo(obj)); } @SuppressWarnings("unused") public AutoBuffer putASer(Object[] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putSer(fs[i]); return this; } @SuppressWarnings("unused") public AutoBuffer putAASer(Object[][] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putASer(fs[i]); return this; } @SuppressWarnings("unused") public AutoBuffer putAAASer(Object[][][] fs) { //_arys++; long xy = putZA(fs); if( xy == -1 ) return this; int x=(int)(xy>>32); int y=(int)xy; for( int i=x; i<x+y; i++ ) putAASer(fs[i]); return this; } @SuppressWarnings("unused") public Object getSer() { byte[] ba = getA1(); return ba == null ? null : javaSerializeReadPojo(ba); } @SuppressWarnings("unused") public <T> T getSer(Class<T> tc) { return (T)getSer(); } @SuppressWarnings("unused") public <T> T[] getASer(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls T[] ts = (T[]) Array.newInstance(tc, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getSer(tc); return ts; } @SuppressWarnings("unused") public <T> T[][] getAASer(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls T[][] ts = (T[][]) Array.newInstance(tc, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getASer(tc); return ts; } @SuppressWarnings("unused") public <T> T[][][] getAAASer(Class<T> tc) { //_arys++; long xy = getZA(); if( xy == -1 ) return null; int x=(int)(xy>>32); // Leading nulls int y=(int)xy; // Middle non-zeros int z = y==0 ? 0 : getInt(); // Trailing nulls T[][][] ts = (T[][][]) Array.newInstance(tc, x+y+z); for( int i = x; i < x+y; ++i ) ts[i] = getAASer(tc); return ts; } // ========================================================================== // JSON AutoBuffer printers public AutoBuffer putJNULL( ) { return put1('n').put1('u').put1('l').put1('l'); } // Escaped JSON string private AutoBuffer putJStr( String s ) { byte[] b = StringUtils.bytesOf(s); int off=0; for( int i=0; i<b.length; i++ ) { if( b[i] == '\\' || b[i] == '"') { // Double up backslashes, escape quotes putA1(b,off,i); // Everything so far (no backslashes) put1('\\'); // The extra backslash off=i; // Advance the "so far" variable } // Handle remaining special cases in JSON // if( b[i] == '/' ) { putA1(b,off,i); put1('\\'); put1('/'); off=i+1; continue;} if( b[i] == '\b' ) { putA1(b,off,i); put1('\\'); put1('b'); off=i+1; continue;} if( b[i] == '\f' ) { putA1(b,off,i); put1('\\'); put1('f'); off=i+1; continue;} if( b[i] == '\n' ) { putA1(b,off,i); put1('\\'); put1('n'); off=i+1; continue;} if( b[i] == '\r' ) { putA1(b,off,i); put1('\\'); put1('r'); off=i+1; continue;} if( b[i] == '\t' ) { putA1(b,off,i); put1('\\'); put1('t'); off=i+1; continue;} // ASCII Control characters if( b[i] == 127 ) { putA1(b,off,i); put1('\\'); put1('u'); put1('0'); put1('0'); put1('7'); put1('f'); off=i+1; continue;} if( b[i] >= 0 && b[i] < 32 ) { String hexStr = Integer.toHexString(b[i]); putA1(b, off, i); put1('\\'); put1('u'); for (int j = 0; j < 4 - hexStr.length(); j++) put1('0'); for (int j = 0; j < hexStr.length(); j++) put1(hexStr.charAt(hexStr.length()-j-1)); off=i+1; } } return putA1(b,off,b.length); } public AutoBuffer putJSONStrUnquoted ( String s ) { return s==null ? putJNULL() : putJStr(s); } public AutoBuffer putJSONStrUnquoted ( String name, String s ) { return s==null ? putJSONStr(name).put1(':').putJNULL() : putJSONStr(name).put1(':').putJStr(s); } public AutoBuffer putJSONName( String s ) { return put1('"').putJStr(s).put1('"'); } public AutoBuffer putJSONStr ( String s ) { return s==null ? putJNULL() : putJSONName(s); } public AutoBuffer putJSONAStr(String[] ss) { if( ss == null ) return putJNULL(); put1('['); for( int i=0; i<ss.length; i++ ) { if( i>0 ) put1(','); putJSONStr(ss[i]); } return put1(']'); } private AutoBuffer putJSONAAStr( String[][] sss) { if( sss == null ) return putJNULL(); put1('['); for( int i=0; i<sss.length; i++ ) { if( i>0 ) put1(','); putJSONAStr(sss[i]); } return put1(']'); } @SuppressWarnings("unused") public AutoBuffer putJSONStr (String name, String s ) { return putJSONStr(name).put1(':').putJSONStr(s); } @SuppressWarnings("unused") public AutoBuffer putJSONAStr (String name, String[] ss ) { return putJSONStr(name).put1(':').putJSONAStr(ss); } @SuppressWarnings("unused") public AutoBuffer putJSONAAStr(String name, String[][]sss) { return putJSONStr(name).put1(':').putJSONAAStr(sss); } @SuppressWarnings("unused") public AutoBuffer putJSONSer (String name, Object o ) { return putJSONStr(name).put1(':').putJNULL(); } @SuppressWarnings("unused") public AutoBuffer putJSONASer (String name, Object[] oo ) { return putJSONStr(name).put1(':').putJNULL(); } @SuppressWarnings("unused") public AutoBuffer putJSONAASer (String name, Object[][] ooo ) { return putJSONStr(name).put1(':').putJNULL(); } @SuppressWarnings("unused") public AutoBuffer putJSONAAASer(String name, Object[][][] oooo) { return putJSONStr(name).put1(':').putJNULL(); } public AutoBuffer putJSONAZ( String name, boolean[] f) { return putJSONStr(name).put1(':').putJSONAZ(f); } public AutoBuffer putJSON(Freezable ice) { return ice == null ? putJNULL() : ice.writeJSON(this); } public AutoBuffer putJSONA( Freezable fs[] ) { if( fs == null ) return putJNULL(); put1('['); for( int i=0; i<fs.length; i++ ) { if( i>0 ) put1(','); putJSON(fs[i]); } return put1(']'); } public AutoBuffer putJSONAA( Freezable fs[][]) { if( fs == null ) return putJNULL(); put1('['); for( int i=0; i<fs.length; i++ ) { if( i>0 ) put1(','); putJSONA(fs[i]); } return put1(']'); } public AutoBuffer putJSONAAA( Freezable fs[][][]) { if( fs == null ) return putJNULL(); put1('['); for( int i=0; i<fs.length; i++ ) { if( i>0 ) put1(','); putJSONAA(fs[i]); } return put1(']'); } @SuppressWarnings("unused") public AutoBuffer putJSON ( String name, Freezable f ) { return putJSONStr(name).put1(':').putJSON (f); } public AutoBuffer putJSONA ( String name, Freezable f[] ) { return putJSONStr(name).put1(':').putJSONA (f); } @SuppressWarnings("unused") public AutoBuffer putJSONAA( String name, Freezable f[][]){ return putJSONStr(name).put1(':').putJSONAA(f); } @SuppressWarnings("unused") public AutoBuffer putJSONAAA( String name, Freezable f[][][]){ return putJSONStr(name).put1(':').putJSONAAA(f); } @SuppressWarnings("unused") public AutoBuffer putJSONZ( String name, boolean value ) { return putJSONStr(name).put1(':').putJStr("" + value); } private AutoBuffer putJSONAZ(boolean [] b) { if (b == null) return putJNULL(); put1('['); for( int i = 0; i < b.length; ++i) { if (i > 0) put1(','); putJStr(""+b[i]); } return put1(']'); } // Most simple integers private AutoBuffer putJInt( int i ) { byte b[] = StringUtils.toBytes(i); return putA1(b,b.length); } public AutoBuffer putJSON1( byte b ) { return putJInt(b); } public AutoBuffer putJSONA1( byte ary[] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSON1(ary[i]); } return put1(']'); } private AutoBuffer putJSONAA1(byte ary[][]) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSONA1(ary[i]); } return put1(']'); } @SuppressWarnings("unused") public AutoBuffer putJSON1 (String name, byte b ) { return putJSONStr(name).put1(':').putJSON1(b); } @SuppressWarnings("unused") public AutoBuffer putJSONA1 (String name, byte b[] ) { return putJSONStr(name).put1(':').putJSONA1(b); } @SuppressWarnings("unused") public AutoBuffer putJSONAA1(String name, byte b[][]) { return putJSONStr(name).put1(':').putJSONAA1(b); } public AutoBuffer putJSONAEnum(String name, Enum[] enums) { return putJSONStr(name).put1(':').putJSONAEnum(enums); } public AutoBuffer putJSONAEnum( Enum[] enums ) { if( enums == null ) return putJNULL(); put1('['); for( int i=0; i<enums.length; i++ ) { if( i>0 ) put1(','); putJSONEnum(enums[i]); } return put1(']'); } AutoBuffer putJSON2( char c ) { return putJSON4(c); } AutoBuffer putJSON2( String name, char c ) { return putJSONStr(name).put1(':').putJSON2(c); } AutoBuffer putJSON2( short c ) { return putJSON4(c); } AutoBuffer putJSON2( String name, short c ) { return putJSONStr(name).put1(':').putJSON2(c); } public AutoBuffer putJSONA2( String name, short ary[] ) { return putJSONStr(name).put1(':').putJSONA2(ary); } AutoBuffer putJSONA2( short ary[] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSON2(ary[i]); } return put1(']'); } AutoBuffer putJSON8 ( long l ) { return putJStr(Long.toString(l)); } AutoBuffer putJSONA8( long ary[] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSON8(ary[i]); } return put1(']'); } AutoBuffer putJSONAA8( long ary[][] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSONA8(ary[i]); } return put1(']'); } AutoBuffer putJSONAAA8( long ary[][][] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSONAA8(ary[i]); } return put1(']'); } AutoBuffer putJSONEnum( Enum e ) { return e==null ? putJNULL() : put1('"').putJStr(e.toString()).put1('"'); } public AutoBuffer putJSON8 ( String name, long l ) { return putJSONStr(name).put1(':').putJSON8(l); } public AutoBuffer putJSONEnum( String name, Enum e ) { return putJSONStr(name).put1(':').putJSONEnum(e); } public AutoBuffer putJSONA8( String name, long ary[] ) { return putJSONStr(name).put1(':').putJSONA8(ary); } public AutoBuffer putJSONAA8( String name, long ary[][] ) { return putJSONStr(name).put1(':').putJSONAA8(ary); } public AutoBuffer putJSONAAA8( String name, long ary[][][] ) { return putJSONStr(name).put1(':').putJSONAAA8(ary); } public AutoBuffer putJSON4(int i) { return putJStr(Integer.toString(i)); } AutoBuffer putJSONA4( int[] a) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSON4(a[i]); } return put1(']'); } AutoBuffer putJSONAA4( int[][] a ) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSONA4(a[i]); } return put1(']'); } AutoBuffer putJSONAAA4( int[][][] a ) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSONAA4(a[i]); } return put1(']'); } public AutoBuffer putJSON4 ( String name, int i ) { return putJSONStr(name).put1(':').putJSON4(i); } public AutoBuffer putJSONA4( String name, int[] a) { return putJSONStr(name).put1(':').putJSONA4(a); } public AutoBuffer putJSONAA4( String name, int[][] a ) { return putJSONStr(name).put1(':').putJSONAA4(a); } public AutoBuffer putJSONAAA4( String name, int[][][] a ) { return putJSONStr(name).put1(':').putJSONAAA4(a); } AutoBuffer putJSON4f ( float f ) { return f==Float.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(f==Float.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Float.isNaN(f)?putJSONStr(JSON_NAN):putJStr(Float .toString(f)))); } public AutoBuffer putJSON4f ( String name, float f ) { return putJSONStr(name).put1(':').putJSON4f(f); } AutoBuffer putJSONA4f( float[] a ) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSON4f(a[i]); } return put1(']'); } public AutoBuffer putJSONA4f(String name, float[] a) { putJSONStr(name).put1(':'); return putJSONA4f(a); } AutoBuffer putJSONAA4f(String name, float[][] a) { putJSONStr(name).put1(':'); if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSONA4f(a[i]); } return put1(']'); } AutoBuffer putJSON8d( double d ) { if (TwoDimTable.isEmpty(d)) return putJNULL(); return d==Double.POSITIVE_INFINITY?putJSONStr(JSON_POS_INF):(d==Double.NEGATIVE_INFINITY?putJSONStr(JSON_NEG_INF):(Double.isNaN(d)?putJSONStr(JSON_NAN):putJStr(Double.toString(d)))); } public AutoBuffer putJSON8d( String name, double d ) { return putJSONStr(name).put1(':').putJSON8d(d); } public AutoBuffer putJSONA8d( String name, double[] a ) { return putJSONStr(name).put1(':').putJSONA8d(a); } public AutoBuffer putJSONAA8d( String name, double[][] a) { return putJSONStr(name).put1(':').putJSONAA8d(a); } public AutoBuffer putJSONAAA8d( String name, double[][][] a) { return putJSONStr(name).put1(':').putJSONAAA8d(a); } public AutoBuffer putJSONA8d( double[] a ) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSON8d(a[i]); } return put1(']'); } public AutoBuffer putJSONAA8d( double[][] a ) { if( a == null ) return putJNULL(); put1('['); for( int i=0; i<a.length; i++ ) { if( i>0 ) put1(','); putJSONA8d(a[i]); } return put1(']'); } AutoBuffer putJSONAAA8d( double ary[][][] ) { if( ary == null ) return putJNULL(); put1('['); for( int i=0; i<ary.length; i++ ) { if( i>0 ) put1(','); putJSONAA8d(ary[i]); } return put1(']'); } static final String JSON_NAN = "NaN"; static final String JSON_POS_INF = "Infinity"; static final String JSON_NEG_INF = "-Infinity"; }
mathemage/h2o-3
h2o-core/src/main/java/water/AutoBuffer.java
Java
apache-2.0
74,621
"use strict"; import chai from "chai"; import chaiAsPromised from "chai-as-promised"; import sinon from "sinon"; import BusinessElementsClient from "../src"; import uuid from "uuid"; import * as requests from "../src/requests"; chai.use(chaiAsPromised); chai.should(); chai.config.includeStack = true; const FAKE_SERVER_URL = "http://api.fake-server"; /** @test {Attribute} */ describe("Attribute", () => { let sandbox, client, attributeId, attribute; beforeEach(() => { sandbox = sinon.sandbox.create(); client = new BusinessElementsClient(FAKE_SERVER_URL); attributeId = uuid.v4(); attribute = client.tenant("example.com").attributes().attribute(attributeId); }); afterEach(() => { sandbox.restore(); }); /** @test {Attribute#get} */ describe("#get()", () => { const data = {id: attributeId}; beforeEach(() => { sandbox.stub(client, "execute").returns(Promise.resolve(data)); }); it("should get capture", () => { attribute.get(); sinon.assert.calledWithMatch(client.execute, { path: `/attributes/${attributeId}` }); }); it("should return attribute data", () => { return attribute.get().should.become(data); }); }); /** @test {Attribute#edit} */ describe("#edit()", () => { const response = {status: "Ok"}; const schema = { "type": "object", "properties": { "type": { "title": "type", "type": "string" } } }; beforeEach(() => { sandbox.stub(client, "execute").returns(Promise.resolve(response)); sandbox.spy(requests, "updateAttribute"); }); it("should edit the attribute", () => { attribute.edit(schema, {}); sinon.assert.calledWithMatch(requests.updateAttribute, attributeId, schema); }); it("should return success", () => { return attribute.edit(schema, {}).should.eventually.become(response); }); }); /** @test {Attribute#remove} */ describe("#remove()", () => { const response = {status: "Ok"}; beforeEach(() => { sandbox.stub(client, "execute").returns(Promise.resolve(response)); sandbox.spy(requests, "deleteAttribute"); }); it("should delete the attribute", () => { attribute.remove({}); sinon.assert.calledWithMatch(requests.deleteAttribute, attributeId); }); it("should return success", () => { return attribute.remove({}).should.eventually.become(response); }); }); });
Product-Foundry/business-elements-client-js
test/attribute_test.js
JavaScript
apache-2.0
2,495
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.model.impl.lens; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.schema.DeltaConvertor; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.util.exception.*; import com.evolveum.midpoint.xml.ns._public.model.model_context_3.LensProjectionContextType; import org.apache.commons.lang.StringUtils; import org.jvnet.jaxb2_commons.lang.Validate; import com.evolveum.midpoint.common.crypto.CryptoUtil; import com.evolveum.midpoint.common.refinery.RefinedObjectClassDefinition; import com.evolveum.midpoint.common.refinery.RefinedResourceSchema; import com.evolveum.midpoint.common.refinery.ResourceShadowDiscriminator; import com.evolveum.midpoint.model.api.context.ModelProjectionContext; import com.evolveum.midpoint.model.api.context.SynchronizationPolicyDecision; import com.evolveum.midpoint.prism.delta.ChangeType; import com.evolveum.midpoint.prism.delta.DeltaSetTriple; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.delta.PrismValueDeltaSetTriple; import com.evolveum.midpoint.prism.delta.ReferenceDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.processor.ResourceSchema; import com.evolveum.midpoint.schema.util.MiscSchemaUtil; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.schema.util.ResourceTypeUtil; import com.evolveum.midpoint.schema.util.SchemaDebugUtil; import com.evolveum.midpoint.util.Cloner; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentPolicyEnforcementType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.LayerType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ProjectionPolicyType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceObjectTypeDefinitionType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceObjectTypeDependencyType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowAssociationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowDiscriminatorType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.SynchronizationSituationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ValuePolicyType; /** * @author semancik * */ public class LensProjectionContext extends LensElementContext<ShadowType> implements ModelProjectionContext { private ObjectDelta<ShadowType> syncDelta; /** * If set to true: absolute state of this projection was detected by the synchronization. * This is mostly for debugging and visibility. It is not used by projection logic. */ private boolean syncAbsoluteTrigger = false; /** * The wave in which this resource should be processed. Initial value of -1 means "undetermined". */ private int wave = -1; /** * Indicates that the wave computation is still in progress. */ private transient boolean waveIncomplete = false; /** * Definition of account type. */ private ResourceShadowDiscriminator resourceShadowDiscriminator; private boolean fullShadow = false; /** * True if the account is "legal" (assigned to the user). It may be false for accounts that are either * found to be illegal by live sync, were unassigned from user, etc. * If set to null the situation is not yet known. Null is a typical value when the context is constructed. */ private boolean isAssigned; /** * True if the account should be part of the synchronization. E.g. outbound expression should be applied to it. */ private boolean isActive; /** * True if there is a valid assignment for this projection and/or the policy allows such project to exist. */ private Boolean isLegal = null; private Boolean isLegalOld = null; private boolean isExists; /** * Decision regarding the account. It indicated what the engine has DECIDED TO DO with the context. * If set to null no decision was made yet. Null is also a typical value when the context is created. */ private SynchronizationPolicyDecision synchronizationPolicyDecision; /** * True if we want to reconcile account in this context. */ private boolean doReconciliation; /** * Synchronization situation as it was originally detected by the synchronization code (SynchronizationService). * This is mostly for debug purposes. Projector and clockwork do not need to care about this. * The synchronization intent is used instead. */ private SynchronizationSituationType synchronizationSituationDetected = null; /** * Synchronization situation which was the result of synchronization reaction (projector and clockwork run). * This is mostly for debug purposes. Projector and clockwork do not care about this (except for setting it). * The synchronization decision is used instead. */ private SynchronizationSituationType synchronizationSituationResolved = null; /** * Delta set triple for accounts. Specifies which accounts should be added, removed or stay as they are. * It tells almost nothing about attributes directly although the information about attributes are inside * each account construction (in a form of ValueConstruction that contains attribute delta triples). * * Intermediary computation result. It is stored to allow re-computing of account constructions during * iterative computations. */ private transient PrismValueDeltaSetTriple<PrismPropertyValue<Construction>> constructionDeltaSetTriple; private transient Construction outboundConstruction; private transient Collection<ResourceObjectTypeDependencyType> dependencies = null; private transient Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> squeezedAttributes; private transient Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismContainerValue<ShadowAssociationType>>>> squeezedAssociations; private ValuePolicyType accountPasswordPolicy; /** * Resource that hosts this projection. */ transient private ResourceType resource; LensProjectionContext(LensContext<? extends ObjectType> lensContext, ResourceShadowDiscriminator resourceAccountType) { super(ShadowType.class, lensContext); this.resourceShadowDiscriminator = resourceAccountType; this.isAssigned = false; } public ObjectDelta<ShadowType> getSyncDelta() { return syncDelta; } public void setSyncDelta(ObjectDelta<ShadowType> syncDelta) { this.syncDelta = syncDelta; } public boolean isSyncAbsoluteTrigger() { return syncAbsoluteTrigger; } public void setSyncAbsoluteTrigger(boolean syncAbsoluteTrigger) { this.syncAbsoluteTrigger = syncAbsoluteTrigger; } public int getWave() { return wave; } public void setWave(int wave) { this.wave = wave; } public boolean isWaveIncomplete() { return waveIncomplete; } public void setWaveIncomplete(boolean waveIncomplete) { this.waveIncomplete = waveIncomplete; } public boolean isDoReconciliation() { return doReconciliation; } public void setDoReconciliation(boolean doReconciliation) { this.doReconciliation = doReconciliation; } public ResourceShadowDiscriminator getResourceShadowDiscriminator() { return resourceShadowDiscriminator; } public void setResourceShadowDiscriminator(ResourceShadowDiscriminator resourceShadowDiscriminator) { this.resourceShadowDiscriminator = resourceShadowDiscriminator; } public boolean compareResourceShadowDiscriminator(ResourceShadowDiscriminator rsd, boolean compareOrder) { Validate.notNull(rsd.getResourceOid()); if (resourceShadowDiscriminator == null) { // This may be valid case e.g. in case of broken contexts or if a context is just loading return false; } if (!rsd.getResourceOid().equals(resourceShadowDiscriminator.getResourceOid())) { return false; } if (!rsd.getKind().equals(resourceShadowDiscriminator.getKind())) { return false; } if (rsd.isThombstone() != resourceShadowDiscriminator.isThombstone()) { return false; } if (rsd.getIntent() == null) { try { if (!getRefinedAccountDefinition().isDefaultInAKind()) { return false; } } catch (SchemaException e) { throw new SystemException("Internal error: "+e.getMessage(), e); } } else if (!rsd.getIntent().equals(resourceShadowDiscriminator.getIntent())) { return false; } if (compareOrder && rsd.getOrder() != resourceShadowDiscriminator.getOrder()) { return false; } return true; } public boolean isThombstone() { if (resourceShadowDiscriminator == null) { return false; } return resourceShadowDiscriminator.isThombstone(); } public void addAccountSyncDelta(ObjectDelta<ShadowType> delta) throws SchemaException { if (syncDelta == null) { syncDelta = delta; } else { syncDelta.merge(delta); } } public boolean isAdd() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.ADD) { return true; } else if (synchronizationPolicyDecision != null){ return false; } return super.isAdd(); } public boolean isModify() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.KEEP) { return true; } else if (synchronizationPolicyDecision != null){ return false; } return super.isModify(); } public boolean isDelete() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.DELETE) { return true; } else if (synchronizationPolicyDecision != null){ return false; } if (syncDelta != null && syncDelta.isDelete()) { return true; } return super.isDelete(); } public ResourceType getResource() { return resource; } public void setResource(ResourceType resource) { this.resource = resource; } public boolean isAssigned() { return isAssigned; } public void setAssigned(boolean isAssigned) { this.isAssigned = isAssigned; } public boolean isActive() { return isActive; } public void setActive(boolean isActive) { this.isActive = isActive; } public Boolean isLegal() { return isLegal; } public void setLegal(Boolean isLegal) { this.isLegal = isLegal; } public Boolean isLegalOld() { return isLegalOld; } public void setLegalOld(Boolean isLegalOld) { this.isLegalOld = isLegalOld; } public boolean isExists() { return isExists; } public void setExists(boolean exists) { this.isExists = exists; } public SynchronizationPolicyDecision getSynchronizationPolicyDecision() { return synchronizationPolicyDecision; } public void setSynchronizationPolicyDecision(SynchronizationPolicyDecision policyDecision) { this.synchronizationPolicyDecision = policyDecision; } public SynchronizationSituationType getSynchronizationSituationDetected() { return synchronizationSituationDetected; } public void setSynchronizationSituationDetected( SynchronizationSituationType synchronizationSituationDetected) { this.synchronizationSituationDetected = synchronizationSituationDetected; } public SynchronizationSituationType getSynchronizationSituationResolved() { return synchronizationSituationResolved; } public void setSynchronizationSituationResolved( SynchronizationSituationType synchronizationSituationResolved) { this.synchronizationSituationResolved = synchronizationSituationResolved; } public boolean isFullShadow() { return fullShadow; } /** * Returns true if full shadow is available, either loaded or in a create delta. */ public boolean hasFullShadow() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.ADD) { return true; } return isFullShadow(); } public void setFullShadow(boolean fullShadow) { this.fullShadow = fullShadow; } public ShadowKindType getKind() { ResourceShadowDiscriminator discr = getResourceShadowDiscriminator(); if (discr != null) { return discr.getKind(); } if (getObjectOld()!=null) { return getObjectOld().asObjectable().getKind(); } if (getObjectCurrent()!=null) { return getObjectCurrent().asObjectable().getKind(); } if (getObjectNew()!=null) { return getObjectNew().asObjectable().getKind(); } return ShadowKindType.ACCOUNT; } public PrismValueDeltaSetTriple<PrismPropertyValue<Construction>> getConstructionDeltaSetTriple() { return constructionDeltaSetTriple; } public void setConstructionDeltaSetTriple( PrismValueDeltaSetTriple<PrismPropertyValue<Construction>> constructionDeltaSetTriple) { this.constructionDeltaSetTriple = constructionDeltaSetTriple; } public Construction getOutboundConstruction() { return outboundConstruction; } public void setOutboundConstruction(Construction outboundConstruction) { this.outboundConstruction = outboundConstruction; } public Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> getSqueezedAttributes() { return squeezedAttributes; } public void setSqueezedAttributes(Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> squeezedAttributes) { this.squeezedAttributes = squeezedAttributes; } public Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismContainerValue<ShadowAssociationType>>>> getSqueezedAssociations() { return squeezedAssociations; } public void setSqueezedAssociations( Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismContainerValue<ShadowAssociationType>>>> squeezedAssociations) { this.squeezedAssociations = squeezedAssociations; } public ResourceObjectTypeDefinitionType getResourceObjectTypeDefinitionType() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.BROKEN) { return null; } ResourceObjectTypeDefinitionType def = ResourceTypeUtil.getResourceObjectTypeDefinitionType( resource, getResourceShadowDiscriminator().getKind(), resourceShadowDiscriminator.getIntent()); return def; } private ResourceSchema getResourceSchema() throws SchemaException { return RefinedResourceSchema.getResourceSchema(resource, getNotNullPrismContext()); } public RefinedResourceSchema getRefinedResourceSchema() throws SchemaException { if (resource == null) { return null; } return RefinedResourceSchema.getRefinedSchema(resource, LayerType.MODEL, getNotNullPrismContext()); } public RefinedObjectClassDefinition getRefinedAccountDefinition() throws SchemaException { RefinedResourceSchema refinedSchema = getRefinedResourceSchema(); if (refinedSchema == null) { return null; } return refinedSchema.getRefinedDefinition(getResourceShadowDiscriminator().getKind(), getResourceShadowDiscriminator().getIntent()); } public Collection<ResourceObjectTypeDependencyType> getDependencies() { if (dependencies == null) { ResourceObjectTypeDefinitionType resourceAccountTypeDefinitionType = getResourceObjectTypeDefinitionType(); if (resourceAccountTypeDefinitionType == null) { // No dependencies. But we cannot set null as that means "unknown". So let's set empty collection instead. dependencies = new ArrayList<ResourceObjectTypeDependencyType>(); } else { dependencies = resourceAccountTypeDefinitionType.getDependency(); } } return dependencies; } public ValuePolicyType getAccountPasswordPolicy() { return accountPasswordPolicy; } public void setAccountPasswordPolicy(ValuePolicyType accountPasswordPolicy) { this.accountPasswordPolicy = accountPasswordPolicy; } public ValuePolicyType getEffectivePasswordPolicy() { if (accountPasswordPolicy != null) { return accountPasswordPolicy; } if (getLensContext().getFocusContext().getOrgPasswordPolicy() != null){ return getLensContext().getFocusContext().getOrgPasswordPolicy(); } return getLensContext().getGlobalPasswordPolicy(); } public AssignmentPolicyEnforcementType getAssignmentPolicyEnforcementType() { // TODO: per-resource assignment enforcement ResourceType resource = getResource(); ProjectionPolicyType globalAccountSynchronizationSettings = null; if (resource != null){ globalAccountSynchronizationSettings = resource.getProjection(); } if (globalAccountSynchronizationSettings == null) { globalAccountSynchronizationSettings = getLensContext().getAccountSynchronizationSettings(); } AssignmentPolicyEnforcementType globalAssignmentPolicyEnforcement = MiscSchemaUtil.getAssignmentPolicyEnforcementType(globalAccountSynchronizationSettings); return globalAssignmentPolicyEnforcement; } public boolean isLegalize(){ ResourceType resource = getResource(); ProjectionPolicyType globalAccountSynchronizationSettings = null; if (resource != null){ globalAccountSynchronizationSettings = resource.getProjection(); } if (globalAccountSynchronizationSettings == null) { globalAccountSynchronizationSettings = getLensContext().getAccountSynchronizationSettings(); } if (globalAccountSynchronizationSettings == null){ return false; } if (globalAccountSynchronizationSettings.isLegalize() == null){ return false; } return globalAccountSynchronizationSettings.isLegalize(); } /** * Recomputes the new state of account (accountNew). It is computed by applying deltas to the old state (accountOld). * Assuming that oldAccount is already set (or is null if it does not exist) */ public void recompute() throws SchemaException { ObjectDelta<ShadowType> accDelta = getDelta(); PrismObject<ShadowType> base = getObjectCurrent(); if (base == null) { base = getObjectOld(); } ObjectDelta<ShadowType> syncDelta = getSyncDelta(); if (base == null && syncDelta != null && ChangeType.ADD.equals(syncDelta.getChangeType())) { PrismObject<ShadowType> objectToAdd = syncDelta.getObjectToAdd(); if (objectToAdd != null) { PrismObjectDefinition<ShadowType> objectDefinition = objectToAdd.getDefinition(); // TODO: remove constructor, use some factory method instead base = new PrismObject<ShadowType>(objectToAdd.getElementName(), objectDefinition, getNotNullPrismContext()); base = syncDelta.computeChangedObject(base); } } if (accDelta == null) { // No change setObjectNew(base); return; } if (base == null && accDelta.isModify()) { RefinedObjectClassDefinition rAccountDef = getRefinedAccountDefinition(); if (rAccountDef != null) { base = (PrismObject<ShadowType>) rAccountDef.createBlankShadow(); } } setObjectNew(accDelta.computeChangedObject(base)); } public void clearIntermediateResults() { constructionDeltaSetTriple = null; outboundConstruction = null; squeezedAttributes = null; } /** * Distribute the resource that's in the context into all the prism objects (old, new) and deltas. * The resourceRef will not just contain the OID but also full resource object. This may optimize handling * of the objects in upper layers (e.g. GUI). */ public void distributeResource() { ResourceType resourceType = getResource(); if (resourceType == null) { return; } PrismObject<ResourceType> resource = resourceType.asPrismObject(); distributeResourceObject(getObjectOld(), resource); distributeResourceObject(getObjectCurrent(), resource); distributeResourceObject(getObjectNew(), resource); distributeResourceDelta(getPrimaryDelta(), resource); distributeResourceDelta(getSecondaryDelta(), resource); } private void distributeResourceObject(PrismObject<ShadowType> object, PrismObject<ResourceType> resource) { if (object == null) { return; } PrismReference resourceRef = object.findReference(ShadowType.F_RESOURCE_REF); if (resourceRef != null) { distributeResourceValues(resourceRef.getValues(), resource); } } private void distributeResourceValue(PrismReferenceValue resourceRefVal, PrismObject<ResourceType> resource) { if (resourceRefVal != null) { resourceRefVal.setObject(resource); } } private void distributeResourceDelta(ObjectDelta<ShadowType> delta, PrismObject<ResourceType> resource) { if (delta == null) { return; } if (delta.isAdd()) { distributeResourceObject(delta.getObjectToAdd(), resource); } else if (delta.isModify()) { ReferenceDelta referenceDelta = delta.findReferenceModification(ShadowType.F_RESOURCE_REF); if (referenceDelta != null) { distributeResourceValues(referenceDelta.getValuesToAdd(), resource); distributeResourceValues(referenceDelta.getValuesToDelete(), resource); distributeResourceValues(referenceDelta.getValuesToReplace(), resource); } } // Nothing to do for DELETE delta } private void distributeResourceValues(Collection<PrismReferenceValue> values, PrismObject<ResourceType> resource) { if (values == null) { return; } for(PrismReferenceValue pval: values) { distributeResourceValue(pval, resource); } } /** * Returns delta suitable for execution. The primary and secondary deltas may not make complete sense all by themselves. * E.g. they may both be MODIFY deltas even in case that the account should be created. The deltas begin to make sense * only if combined with sync decision. This method provides the deltas all combined and ready for execution. */ public ObjectDelta<ShadowType> getExecutableDelta() throws SchemaException { SynchronizationPolicyDecision policyDecision = getSynchronizationPolicyDecision(); ObjectDelta<ShadowType> origDelta = getDelta(); if (policyDecision == SynchronizationPolicyDecision.ADD) { if (origDelta == null || origDelta.isModify()) { // We need to convert modify delta to ADD ObjectDelta<ShadowType> addDelta = new ObjectDelta<ShadowType>(getObjectTypeClass(), ChangeType.ADD, getPrismContext()); RefinedObjectClassDefinition rAccount = getRefinedAccountDefinition(); if (rAccount == null) { throw new IllegalStateException("Definition for account type " + getResourceShadowDiscriminator() + " not found in the context, but it should be there"); } PrismObject<ShadowType> newAccount = (PrismObject<ShadowType>) rAccount.createBlankShadow(); addDelta.setObjectToAdd(newAccount); if (origDelta != null) { addDelta.merge(origDelta); } return addDelta; } } else if (policyDecision == SynchronizationPolicyDecision.KEEP) { // Any delta is OK } else if (policyDecision == SynchronizationPolicyDecision.DELETE) { ObjectDelta<ShadowType> deleteDelta = new ObjectDelta<ShadowType>(getObjectTypeClass(), ChangeType.DELETE, getPrismContext()); String oid = getOid(); if (oid == null) { throw new IllegalStateException( "Internal error: account context OID is null during attempt to create delete secondary delta; context=" +this); } deleteDelta.setOid(oid); return deleteDelta; } else { // This is either UNLINK or null, both are in fact the same as KEEP // Any delta is OK } return origDelta; } public void checkConsistence() { checkConsistence(null, true, false); } public void checkConsistence(String contextDesc, boolean fresh, boolean force) { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.IGNORE) { // No not check these. they may be quite wild. return; } super.checkConsistence(contextDesc); if (synchronizationPolicyDecision == SynchronizationPolicyDecision.BROKEN) { return; } if (fresh && !force) { if (resource == null) { throw new IllegalStateException("Null resource in "+this + (contextDesc == null ? "" : " in " +contextDesc)); } if (resourceShadowDiscriminator == null) { throw new IllegalStateException("Null resource account type in "+this + (contextDesc == null ? "" : " in " +contextDesc)); } } if (syncDelta != null) { try { syncDelta.checkConsistence(true, true, true); } catch (IllegalArgumentException e) { throw new IllegalArgumentException(e.getMessage()+"; in "+getElementDesc()+" sync delta in "+this + (contextDesc == null ? "" : " in " +contextDesc), e); } catch (IllegalStateException e) { throw new IllegalStateException(e.getMessage()+"; in "+getElementDesc()+" sync delta in "+this + (contextDesc == null ? "" : " in " +contextDesc), e); } } } protected boolean isRequireSecondardyDeltaOid() { if (synchronizationPolicyDecision == SynchronizationPolicyDecision.ADD || synchronizationPolicyDecision == SynchronizationPolicyDecision.BROKEN || synchronizationPolicyDecision == SynchronizationPolicyDecision.IGNORE) { return false; } if (getResourceShadowDiscriminator() != null && getResourceShadowDiscriminator().getOrder() > 0) { // These may not have the OID yet return false; } return super.isRequireSecondardyDeltaOid(); } @Override public void cleanup() { super.cleanup(); synchronizationPolicyDecision = null; // isLegal = null; // isLegalOld = null; isAssigned = false; isActive = false; } @Override public void normalize() { super.normalize(); if (syncDelta != null) { syncDelta.normalize(); } } @Override public void reset() { super.reset(); wave = -1; fullShadow = false; isAssigned = false; isActive = false; synchronizationPolicyDecision = null; constructionDeltaSetTriple = null; outboundConstruction = null; dependencies = null; squeezedAttributes = null; accountPasswordPolicy = null; } @Override public void adopt(PrismContext prismContext) throws SchemaException { super.adopt(prismContext); if (syncDelta != null) { prismContext.adopt(syncDelta); } } @Override public LensProjectionContext clone(LensContext<? extends ObjectType> lensContext) { LensProjectionContext clone = new LensProjectionContext(lensContext, resourceShadowDiscriminator); copyValues(clone, lensContext); return clone; } protected void copyValues(LensProjectionContext clone, LensContext<? extends ObjectType> lensContext) { super.copyValues(clone, lensContext); // do NOT clone transient values such as accountConstructionDeltaSetTriple // these are not meant to be cloned and they are also not directly clonnable clone.dependencies = this.dependencies; clone.doReconciliation = this.doReconciliation; clone.fullShadow = this.fullShadow; clone.isAssigned = this.isAssigned; clone.outboundConstruction = this.outboundConstruction; clone.synchronizationPolicyDecision = this.synchronizationPolicyDecision; clone.resource = this.resource; clone.resourceShadowDiscriminator = this.resourceShadowDiscriminator; clone.squeezedAttributes = cloneSqueezedAttributes(); if (this.syncDelta != null) { clone.syncDelta = this.syncDelta.clone(); } clone.wave = this.wave; } private Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> cloneSqueezedAttributes() { if (squeezedAttributes == null) { return null; } Map<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> clonedMap = new HashMap<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>>(); Cloner<ItemValueWithOrigin<PrismPropertyValue<?>>> cloner = new Cloner<ItemValueWithOrigin<PrismPropertyValue<?>>>() { @Override public ItemValueWithOrigin<PrismPropertyValue<?>> clone(ItemValueWithOrigin<PrismPropertyValue<?>> original) { return original.clone(); } }; for (Entry<QName, DeltaSetTriple<ItemValueWithOrigin<PrismPropertyValue<?>>>> entry: squeezedAttributes.entrySet()) { clonedMap.put(entry.getKey(), entry.getValue().clone(cloner)); } return clonedMap; } /** * Returns true if the projection has any value for specified attribute. */ public boolean hasValueForAttribute(QName attributeName) throws SchemaException { ItemPath attrPath = new ItemPath(ShadowType.F_ATTRIBUTES, attributeName); if (getObjectNew() != null) { PrismProperty<?> attrNew = getObjectNew().findProperty(attrPath); if (attrNew != null && !attrNew.isEmpty()) { return true; } } return false; } private boolean hasValueForAttribute(QName attributeName, Collection<PrismPropertyValue<Construction>> acPpvSet) { if (acPpvSet == null) { return false; } for (PrismPropertyValue<Construction> acPpv: acPpvSet) { Construction ac = acPpv.getValue(); if (ac.hasValueForAttribute(attributeName)) { return true; } } return false; } public AccountOperation getOperation() { if (isAdd()) { return AccountOperation.ADD; } if (isDelete()) { return AccountOperation.DELETE; } return AccountOperation.MODIFY; } @Override public void checkEncrypted() { super.checkEncrypted(); if (syncDelta != null) { CryptoUtil.checkEncrypted(syncDelta); } } public String getHumanReadableName() { StringBuilder sb = new StringBuilder(); sb.append("account("); String humanReadableAccountIdentifier = getHumanReadableIdentifier(); if (StringUtils.isEmpty(humanReadableAccountIdentifier)) { sb.append("no ID"); } else { sb.append("ID "); sb.append(humanReadableAccountIdentifier); } ResourceShadowDiscriminator discr = getResourceShadowDiscriminator(); if (discr != null) { sb.append(", type '"); sb.append(discr.getIntent()); sb.append("', "); if (discr.getOrder() != 0) { sb.append("order ").append(discr.getOrder()).append(", "); } } else { sb.append(" (no discriminator) "); } sb.append(getResource()); sb.append(")"); return sb.toString(); } private String getHumanReadableIdentifier() { PrismObject<ShadowType> object = getObjectNew(); if (object == null) { object = getObjectOld(); } if (object == null) { object = getObjectCurrent(); } if (object == null) { return null; } if (object.canRepresent(ShadowType.class)) { PrismObject<ShadowType> shadow = (PrismObject<ShadowType>)object; Collection<ResourceAttribute<?>> identifiers = ShadowUtil.getIdentifiers(shadow); if (identifiers == null) { return null; } StringBuilder sb = new StringBuilder(); Iterator<ResourceAttribute<?>> iterator = identifiers.iterator(); while (iterator.hasNext()) { ResourceAttribute<?> id = iterator.next(); sb.append(id.toHumanReadableString()); if (iterator.hasNext()) { sb.append(","); } } return sb.toString(); } else { return object.toString(); } } @Override public String debugDump() { return debugDump(0); } @Override public String debugDump(int indent) { return debugDump(indent, true); } public String debugDump(int indent, boolean showTriples) { StringBuilder sb = new StringBuilder(); SchemaDebugUtil.indentDebugDump(sb, indent); sb.append("PROJECTION "); sb.append(getObjectTypeClass() == null ? "null" : getObjectTypeClass().getSimpleName()); sb.append(" "); sb.append(getResourceShadowDiscriminator()); if (resource != null) { sb.append(" : "); sb.append(resource.getName().getOrig()); } sb.append("\n"); SchemaDebugUtil.indentDebugDump(sb, indent + 1); sb.append("OID: ").append(getOid()); sb.append(", wave ").append(wave); if (fullShadow) { sb.append(", full"); } else { sb.append(", shadow"); } sb.append(", exists=").append(isExists); sb.append(", assigned=").append(isAssigned); sb.append(", active=").append(isActive); sb.append(", legal=").append(isLegalOld).append("->").append(isLegal); sb.append(", recon=").append(doReconciliation); sb.append(", syncIntent=").append(getSynchronizationIntent()); sb.append(", decision=").append(synchronizationPolicyDecision); if (!isFresh()) { sb.append(", NOT FRESH"); } if (resourceShadowDiscriminator != null && resourceShadowDiscriminator.isThombstone()) { sb.append(", THOMBSTONE"); } if (syncAbsoluteTrigger) { sb.append(", SYNC TRIGGER"); } if (getIteration() != 0) { sb.append(", iteration=").append(getIteration()).append(" (").append(getIterationToken()).append(")"); } sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("old"), getObjectOld(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("current"), getObjectCurrent(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("new"), getObjectNew(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("primary delta"), getPrimaryDelta(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("secondary delta"), getSecondaryDelta(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("sync delta"), getSyncDelta(), indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("executed deltas"), getExecutedDeltas(), indent+1); if (showTriples) { sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("constructionDeltaSetTriple"), constructionDeltaSetTriple, indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("outbound account construction"), outboundConstruction, indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("squeezed attributes"), squeezedAttributes, indent + 1); sb.append("\n"); DebugUtil.debugDumpWithLabel(sb, getDebugDumpTitle("squeezed associations"), squeezedAssociations, indent + 1); // This is just a debug thing // sb.append("\n"); // DebugUtil.indentDebugDump(sb, indent); // sb.append("ACCOUNT dependencies\n"); // sb.append(DebugUtil.debugDump(dependencies, indent + 1)); } return sb.toString(); } @Override protected String getElementDefaultDesc() { return "projection"; } @Override public String toString() { return "LensProjectionContext(" + (getObjectTypeClass() == null ? "null" : getObjectTypeClass().getSimpleName()) + ":" + getOid() + ( resource == null ? "" : " on " + resource ) + ")"; } /** * Return a human readable name of the projection object suitable for logs. */ public String toHumanReadableString() { if (resourceShadowDiscriminator == null) { return "(null" + resource + ")"; } if (resource != null) { return "("+getKindValue(resourceShadowDiscriminator.getKind()) + " ("+resourceShadowDiscriminator.getIntent()+") on " + resource + ")"; } else { return "("+getKindValue(resourceShadowDiscriminator.getKind()) + " ("+resourceShadowDiscriminator.getIntent()+") on " + resourceShadowDiscriminator.getResourceOid() + ")"; } } public String getHumanReadableKind() { if (resourceShadowDiscriminator == null) { return "resource object"; } return getKindValue(resourceShadowDiscriminator.getKind()); } private String getKindValue(ShadowKindType kind) { if (kind == null) { return "null"; } return kind.value(); } @Override protected String getElementDesc() { if (resourceShadowDiscriminator == null) { return "shadow"; } return getKindValue(resourceShadowDiscriminator.getKind()); } public void addToPrismContainer(PrismContainer<LensProjectionContextType> lensProjectionContextTypeContainer) throws SchemaException { LensProjectionContextType lensProjectionContextType = lensProjectionContextTypeContainer.createNewValue().asContainerable(); super.storeIntoLensElementContextType(lensProjectionContextType); lensProjectionContextType.setSyncDelta(syncDelta != null ? DeltaConvertor.toObjectDeltaType(syncDelta) : null); lensProjectionContextType.setWave(wave); lensProjectionContextType.setResourceShadowDiscriminator(resourceShadowDiscriminator != null ? resourceShadowDiscriminator.toResourceShadowDiscriminatorType() : null); lensProjectionContextType.setFullShadow(fullShadow); lensProjectionContextType.setIsAssigned(isAssigned); lensProjectionContextType.setIsActive(isActive); lensProjectionContextType.setIsLegal(isLegal); lensProjectionContextType.setIsLegalOld(isLegalOld); lensProjectionContextType.setIsExists(isExists); lensProjectionContextType.setSynchronizationPolicyDecision(synchronizationPolicyDecision != null ? synchronizationPolicyDecision.toSynchronizationPolicyDecisionType() : null); lensProjectionContextType.setDoReconciliation(doReconciliation); lensProjectionContextType.setSynchronizationSituationDetected(synchronizationSituationDetected); lensProjectionContextType.setSynchronizationSituationResolved(synchronizationSituationResolved); lensProjectionContextType.setAccountPasswordPolicy(accountPasswordPolicy); lensProjectionContextType.setSyncAbsoluteTrigger(syncAbsoluteTrigger); } public static LensProjectionContext fromLensProjectionContextType(LensProjectionContextType projectionContextType, LensContext lensContext, OperationResult result) throws SchemaException, ConfigurationException, ObjectNotFoundException, CommunicationException { String objectTypeClassString = projectionContextType.getObjectTypeClass(); if (StringUtils.isEmpty(objectTypeClassString)) { throw new SystemException("Object type class is undefined in LensProjectionContextType"); } ResourceShadowDiscriminator resourceShadowDiscriminator = ResourceShadowDiscriminator.fromResourceShadowDiscriminatorType(projectionContextType.getResourceShadowDiscriminator()); LensProjectionContext projectionContext = new LensProjectionContext(lensContext, resourceShadowDiscriminator); projectionContext.retrieveFromLensElementContextType(projectionContextType, result); if (projectionContextType.getSyncDelta() != null) { projectionContext.syncDelta = DeltaConvertor.createObjectDelta(projectionContextType.getSyncDelta(), lensContext.getPrismContext()); } else { projectionContext.syncDelta = null; } projectionContext.wave = projectionContextType.getWave() != null ? projectionContextType.getWave() : 0; projectionContext.fullShadow = projectionContextType.isFullShadow() != null ? projectionContextType.isFullShadow() : false; projectionContext.isAssigned = projectionContextType.isIsAssigned() != null ? projectionContextType.isIsAssigned() : false; projectionContext.isActive = projectionContextType.isIsActive() != null ? projectionContextType.isIsActive() : false; projectionContext.isLegal = projectionContextType.isIsLegal(); projectionContext.isExists = projectionContextType.isIsExists() != null ? projectionContextType.isIsExists() : false; projectionContext.synchronizationPolicyDecision = SynchronizationPolicyDecision.fromSynchronizationPolicyDecisionType(projectionContextType.getSynchronizationPolicyDecision()); projectionContext.doReconciliation = projectionContextType.isDoReconciliation() != null ? projectionContextType.isDoReconciliation() : false; projectionContext.synchronizationSituationDetected = projectionContextType.getSynchronizationSituationDetected(); projectionContext.synchronizationSituationResolved = projectionContextType.getSynchronizationSituationResolved(); projectionContext.accountPasswordPolicy = projectionContextType.getAccountPasswordPolicy(); projectionContext.syncAbsoluteTrigger = projectionContextType.isSyncAbsoluteTrigger(); return projectionContext; } // determines whether full shadow is present, based on operation result got from provisioning public void determineFullShadowFlag(OperationResultType fetchResult) { if (fetchResult != null && (fetchResult.getStatus() == OperationResultStatusType.PARTIAL_ERROR || fetchResult.getStatus() == OperationResultStatusType.FATAL_ERROR)) { // todo what about other kinds of status? [e.g. in-progress] setFullShadow(false); } else { setFullShadow(true); } } }
sabriarabacioglu/engerek
model/model-impl/src/main/java/com/evolveum/midpoint/model/impl/lens/LensProjectionContext.java
Java
apache-2.0
42,728
/* * Copyright 2017 Exorath * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.exorath.plugin.game.cakewars.rewards; import com.exorath.plugin.game.cakewars.Main; import com.exorath.service.currency.api.CurrencyServiceAPI; import com.exorath.victoryHandler.rewards.CurrencyReward; import net.md_5.bungee.api.ChatColor; /** * Created by toonsev on 5/31/2017. */ public class KillsReward extends CurrencyReward{ public static final int CRUMBS_PER_KILL = 2; private int kills; public KillsReward(CurrencyServiceAPI currencyServiceAPI) { super(null, currencyServiceAPI, Main.CRUMBS_CURRENCY, 0); setCurrencyColor(ChatColor.GOLD); setCurrencyName("Crumbs"); } public void addKill(){ kills++; setAmount(kills*CRUMBS_PER_KILL); setReason("Killing " + kills + " Players"); } }
Exorath/CakeWarsGamePlugin
src/main/java/com/exorath/plugin/game/cakewars/rewards/KillsReward.java
Java
apache-2.0
1,398
#!/usr/bin/env python # pylint: disable=missing-docstring # flake8: noqa: T001 # ___ ___ _ _ ___ ___ _ _____ ___ ___ # / __| __| \| | __| _ \ /_\_ _| __| \ # | (_ | _|| .` | _|| / / _ \| | | _|| |) | # \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____ # | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _| # | |) | (_) | | .` | (_) || | | _|| |) | | | | # |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_| # # Copyright 2016 Red Hat, Inc. and/or its affiliates # and other contributors as indicated by the @author tags. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # -*- -*- -*- Begin included fragment: lib/import.py -*- -*- -*- ''' OpenShiftCLI class that wraps the oc commands in a subprocess ''' # pylint: disable=too-many-lines from __future__ import print_function import atexit import copy import json import os import re import shutil import subprocess import tempfile # pylint: disable=import-error try: import ruamel.yaml as yaml except ImportError: import yaml from ansible.module_utils.basic import AnsibleModule # -*- -*- -*- End included fragment: lib/import.py -*- -*- -*- # -*- -*- -*- Begin included fragment: doc/registry -*- -*- -*- DOCUMENTATION = ''' --- module: oc_adm_registry short_description: Module to manage openshift registry description: - Manage openshift registry programmatically. options: state: description: - The desired action when managing openshift registry - present - update or create the registry - absent - tear down the registry service and deploymentconfig - list - returns the current representiation of a registry required: false default: False aliases: [] kubeconfig: description: - The path for the kubeconfig file to use for authentication required: false default: /etc/origin/master/admin.kubeconfig aliases: [] debug: description: - Turn on debug output. required: false default: False aliases: [] name: description: - The name of the registry required: false default: None aliases: [] namespace: description: - The selector when filtering on node labels required: false default: None aliases: [] images: description: - The image to base this registry on - ${component} will be replaced with --type required: 'openshift3/ose-${component}:${version}' default: None aliases: [] latest_images: description: - If true, attempt to use the latest image for the registry instead of the latest release. required: false default: False aliases: [] labels: description: - A set of labels to uniquely identify the registry and its components. required: false default: None aliases: [] enforce_quota: description: - If set, the registry will refuse to write blobs if they exceed quota limits required: False default: False aliases: [] mount_host: description: - If set, the registry volume will be created as a host-mount at this path. required: False default: False aliases: [] ports: description: - A comma delimited list of ports or port pairs to expose on the registry pod. The default is set for 5000. required: False default: [5000] aliases: [] replicas: description: - The replication factor of the registry; commonly 2 when high availability is desired. required: False default: 1 aliases: [] selector: description: - Selector used to filter nodes on deployment. Used to run registries on a specific set of nodes. required: False default: None aliases: [] service_account: description: - Name of the service account to use to run the registry pod. required: False default: 'registry' aliases: [] tls_certificate: description: - An optional path to a PEM encoded certificate (which may contain the private key) for serving over TLS required: false default: None aliases: [] tls_key: description: - An optional path to a PEM encoded private key for serving over TLS required: false default: None aliases: [] volume_mounts: description: - The volume mounts for the registry. required: false default: None aliases: [] daemonset: description: - Use a daemonset instead of a deployment config. required: false default: False aliases: [] edits: description: - A list of modifications to make on the deploymentconfig required: false default: None aliases: [] env_vars: description: - A dictionary of modifications to make on the deploymentconfig. e.g. FOO: BAR required: false default: None aliases: [] force: description: - Force a registry update. required: false default: False aliases: [] author: - "Kenny Woodson <kwoodson@redhat.com>" extends_documentation_fragment: [] ''' EXAMPLES = ''' - name: create a secure registry oc_adm_registry: name: docker-registry service_account: registry replicas: 2 namespace: default selector: type=infra images: "registry.ops.openshift.com/openshift3/ose-${component}:${version}" env_vars: REGISTRY_CONFIGURATION_PATH: /etc/registryconfig/config.yml REGISTRY_HTTP_TLS_CERTIFICATE: /etc/secrets/registry.crt REGISTRY_HTTP_TLS_KEY: /etc/secrets/registry.key REGISTRY_HTTP_SECRET: supersecret volume_mounts: - path: /etc/secrets name: dockercerts type: secret secret_name: registry-secret - path: /etc/registryconfig name: dockersecrets type: secret secret_name: docker-registry-config edits: - key: spec.template.spec.containers[0].livenessProbe.httpGet.scheme value: HTTPS action: put - key: spec.template.spec.containers[0].readinessProbe.httpGet.scheme value: HTTPS action: put - key: spec.strategy.rollingParams value: intervalSeconds: 1 maxSurge: 50% maxUnavailable: 50% timeoutSeconds: 600 updatePeriodSeconds: 1 action: put - key: spec.template.spec.containers[0].resources.limits.memory value: 2G action: update - key: spec.template.spec.containers[0].resources.requests.memory value: 1G action: update register: registryout ''' # -*- -*- -*- End included fragment: doc/registry -*- -*- -*- # -*- -*- -*- Begin included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*- class YeditException(Exception): # pragma: no cover ''' Exception class for Yedit ''' pass # pylint: disable=too-many-public-methods class Yedit(object): # pragma: no cover ''' Class to modify yaml files ''' re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$" re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z{}/_-]+)" com_sep = set(['.', '#', '|', ':']) # pylint: disable=too-many-arguments def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False): self.content = content self._separator = separator self.filename = filename self.__yaml_dict = content self.content_type = content_type self.backup = backup self.load(content_type=self.content_type) if self.__yaml_dict is None: self.__yaml_dict = {} @property def separator(self): ''' getter method for separator ''' return self._separator @separator.setter def separator(self, inc_sep): ''' setter method for separator ''' self._separator = inc_sep @property def yaml_dict(self): ''' getter method for yaml_dict ''' return self.__yaml_dict @yaml_dict.setter def yaml_dict(self, value): ''' setter method for yaml_dict ''' self.__yaml_dict = value @staticmethod def parse_key(key, sep='.'): '''parse the key allowing the appropriate separator''' common_separators = list(Yedit.com_sep - set([sep])) return re.findall(Yedit.re_key.format(''.join(common_separators)), key) @staticmethod def valid_key(key, sep='.'): '''validate the incoming key''' common_separators = list(Yedit.com_sep - set([sep])) if not re.match(Yedit.re_valid_key.format(''.join(common_separators)), key): return False return True @staticmethod def remove_entry(data, key, sep='.'): ''' remove data at location key ''' if key == '' and isinstance(data, dict): data.clear() return True elif key == '' and isinstance(data, list): del data[:] return True if not (key and Yedit.valid_key(key, sep)) and \ isinstance(data, (list, dict)): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes[:-1]: if dict_key and isinstance(data, dict): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1): data = data[int(arr_ind)] else: return None # process last index for remove # expected list entry if key_indexes[-1][0]: if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501 del data[int(key_indexes[-1][0])] return True # expected dict entry elif key_indexes[-1][1]: if isinstance(data, dict): del data[key_indexes[-1][1]] return True @staticmethod def add_entry(data, key, item=None, sep='.'): ''' Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a#b return c ''' if key == '': pass elif (not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes[:-1]: if dict_key: if isinstance(data, dict) and dict_key in data and data[dict_key]: # noqa: E501 data = data[dict_key] continue elif data and not isinstance(data, dict): raise YeditException("Unexpected item type found while going through key " + "path: {} (at key: {})".format(key, dict_key)) data[dict_key] = {} data = data[dict_key] elif (arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1): data = data[int(arr_ind)] else: raise YeditException("Unexpected item type found while going through key path: {}".format(key)) if key == '': data = item # process last index for add # expected list entry elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: # noqa: E501 data[int(key_indexes[-1][0])] = item # expected dict entry elif key_indexes[-1][1] and isinstance(data, dict): data[key_indexes[-1][1]] = item # didn't add/update to an existing list, nor add/update key to a dict # so we must have been provided some syntax like a.b.c[<int>] = "data" for a # non-existent array else: raise YeditException("Error adding to object at path: {}".format(key)) return data @staticmethod def get_entry(data, key, sep='.'): ''' Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a.b return c ''' if key == '': pass elif (not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict))): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes: if dict_key and isinstance(data, dict): data = data.get(dict_key) elif (arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1): data = data[int(arr_ind)] else: return None return data @staticmethod def _write(filename, contents): ''' Actually write the file contents to disk. This helps with mocking. ''' tmp_filename = filename + '.yedit' with open(tmp_filename, 'w') as yfd: yfd.write(contents) os.rename(tmp_filename, filename) def write(self): ''' write to file ''' if not self.filename: raise YeditException('Please specify a filename.') if self.backup and self.file_exists(): shutil.copy(self.filename, self.filename + '.orig') # Try to set format attributes if supported try: self.yaml_dict.fa.set_block_style() except AttributeError: pass # Try to use RoundTripDumper if supported. try: Yedit._write(self.filename, yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper)) except AttributeError: Yedit._write(self.filename, yaml.safe_dump(self.yaml_dict, default_flow_style=False)) return (True, self.yaml_dict) def read(self): ''' read from file ''' # check if it exists if self.filename is None or not self.file_exists(): return None contents = None with open(self.filename) as yfd: contents = yfd.read() return contents def file_exists(self): ''' return whether file exists ''' if os.path.exists(self.filename): return True return False def load(self, content_type='yaml'): ''' return yaml file ''' contents = self.read() if not contents and not self.content: return None if self.content: if isinstance(self.content, dict): self.yaml_dict = self.content return self.yaml_dict elif isinstance(self.content, str): contents = self.content # check if it is yaml try: if content_type == 'yaml' and contents: # Try to set format attributes if supported try: self.yaml_dict.fa.set_block_style() except AttributeError: pass # Try to use RoundTripLoader if supported. try: self.yaml_dict = yaml.safe_load(contents, yaml.RoundTripLoader) except AttributeError: self.yaml_dict = yaml.safe_load(contents) # Try to set format attributes if supported try: self.yaml_dict.fa.set_block_style() except AttributeError: pass elif content_type == 'json' and contents: self.yaml_dict = json.loads(contents) except yaml.YAMLError as err: # Error loading yaml or json raise YeditException('Problem with loading yaml file. {}'.format(err)) return self.yaml_dict def get(self, key): ''' get a specified key''' try: entry = Yedit.get_entry(self.yaml_dict, key, self.separator) except KeyError: entry = None return entry def pop(self, path, key_or_item): ''' remove a key, value pair from a dict or an item for a list''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if entry is None: return (False, self.yaml_dict) if isinstance(entry, dict): # AUDIT:maybe-no-member makes sense due to fuzzy types # pylint: disable=maybe-no-member if key_or_item in entry: entry.pop(key_or_item) return (True, self.yaml_dict) return (False, self.yaml_dict) elif isinstance(entry, list): # AUDIT:maybe-no-member makes sense due to fuzzy types # pylint: disable=maybe-no-member ind = None try: ind = entry.index(key_or_item) except ValueError: return (False, self.yaml_dict) entry.pop(ind) return (True, self.yaml_dict) return (False, self.yaml_dict) def delete(self, path): ''' remove path from a dict''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if entry is None: return (False, self.yaml_dict) result = Yedit.remove_entry(self.yaml_dict, path, self.separator) if not result: return (False, self.yaml_dict) return (True, self.yaml_dict) def exists(self, path, value): ''' check if value exists at path''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, list): if value in entry: return True return False elif isinstance(entry, dict): if isinstance(value, dict): rval = False for key, val in value.items(): if entry[key] != val: rval = False break else: rval = True return rval return value in entry return entry == value def append(self, path, value): '''append value to a list''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if entry is None: self.put(path, []) entry = Yedit.get_entry(self.yaml_dict, path, self.separator) if not isinstance(entry, list): return (False, self.yaml_dict) # AUDIT:maybe-no-member makes sense due to loading data from # a serialized format. # pylint: disable=maybe-no-member entry.append(value) return (True, self.yaml_dict) # pylint: disable=too-many-arguments def update(self, path, value, index=None, curr_value=None): ''' put path, value into a dict ''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if isinstance(entry, dict): # AUDIT:maybe-no-member makes sense due to fuzzy types # pylint: disable=maybe-no-member if not isinstance(value, dict): raise YeditException('Cannot replace key, value entry in dict with non-dict type. ' + 'value=[{}] type=[{}]'.format(value, type(value))) entry.update(value) return (True, self.yaml_dict) elif isinstance(entry, list): # AUDIT:maybe-no-member makes sense due to fuzzy types # pylint: disable=maybe-no-member ind = None if curr_value: try: ind = entry.index(curr_value) except ValueError: return (False, self.yaml_dict) elif index is not None: ind = index if ind is not None and entry[ind] != value: entry[ind] = value return (True, self.yaml_dict) # see if it exists in the list try: ind = entry.index(value) except ValueError: # doesn't exist, append it entry.append(value) return (True, self.yaml_dict) # already exists, return if ind is not None: return (False, self.yaml_dict) return (False, self.yaml_dict) def put(self, path, value): ''' put path, value into a dict ''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError: entry = None if entry == value: return (False, self.yaml_dict) # deepcopy didn't work # Try to use ruamel.yaml and fallback to pyyaml try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) # set the format attributes if available try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if result is None: return (False, self.yaml_dict) # When path equals "" it is a special case. # "" refers to the root of the document # Only update the root path (entire document) when its a list or dict if path == '': if isinstance(result, list) or isinstance(result, dict): self.yaml_dict = result return (True, self.yaml_dict) return (False, self.yaml_dict) self.yaml_dict = tmp_copy return (True, self.yaml_dict) def create(self, path, value): ''' create a yaml file ''' if not self.file_exists(): # deepcopy didn't work # Try to use ruamel.yaml and fallback to pyyaml try: tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) except AttributeError: tmp_copy = copy.deepcopy(self.yaml_dict) # set the format attributes if available try: tmp_copy.fa.set_block_style() except AttributeError: pass result = Yedit.add_entry(tmp_copy, path, value, self.separator) if result is not None: self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict) @staticmethod def get_curr_value(invalue, val_type): '''return the current value''' if invalue is None: return None curr_value = invalue if val_type == 'yaml': curr_value = yaml.load(invalue) elif val_type == 'json': curr_value = json.loads(invalue) return curr_value @staticmethod def parse_value(inc_value, vtype=''): '''determine value type passed''' true_bools = ['y', 'Y', 'yes', 'Yes', 'YES', 'true', 'True', 'TRUE', 'on', 'On', 'ON', ] false_bools = ['n', 'N', 'no', 'No', 'NO', 'false', 'False', 'FALSE', 'off', 'Off', 'OFF'] # It came in as a string but you didn't specify value_type as string # we will convert to bool if it matches any of the above cases if isinstance(inc_value, str) and 'bool' in vtype: if inc_value not in true_bools and inc_value not in false_bools: raise YeditException('Not a boolean type. str=[{}] vtype=[{}]'.format(inc_value, vtype)) elif isinstance(inc_value, bool) and 'str' in vtype: inc_value = str(inc_value) # There is a special case where '' will turn into None after yaml loading it so skip if isinstance(inc_value, str) and inc_value == '': pass # If vtype is not str then go ahead and attempt to yaml load it. elif isinstance(inc_value, str) and 'str' not in vtype: try: inc_value = yaml.safe_load(inc_value) except Exception: raise YeditException('Could not determine type of incoming value. ' + 'value=[{}] vtype=[{}]'.format(type(inc_value), vtype)) return inc_value @staticmethod def process_edits(edits, yamlfile): '''run through a list of edits and process them one-by-one''' results = [] for edit in edits: value = Yedit.parse_value(edit['value'], edit.get('value_type', '')) if edit.get('action') == 'update': # pylint: disable=line-too-long curr_value = Yedit.get_curr_value( Yedit.parse_value(edit.get('curr_value')), edit.get('curr_value_format')) rval = yamlfile.update(edit['key'], value, edit.get('index'), curr_value) elif edit.get('action') == 'append': rval = yamlfile.append(edit['key'], value) else: rval = yamlfile.put(edit['key'], value) if rval[0]: results.append({'key': edit['key'], 'edit': rval[1]}) return {'changed': len(results) > 0, 'results': results} # pylint: disable=too-many-return-statements,too-many-branches @staticmethod def run_ansible(params): '''perform the idempotent crud operations''' yamlfile = Yedit(filename=params['src'], backup=params['backup'], separator=params['separator']) state = params['state'] if params['src']: rval = yamlfile.load() if yamlfile.yaml_dict is None and state != 'present': return {'failed': True, 'msg': 'Error opening file [{}]. Verify that the '.format(params['src']) + 'file exists, that it is has correct permissions, and is valid yaml.'} if state == 'list': if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['key']: rval = yamlfile.get(params['key']) or {} return {'changed': False, 'result': rval, 'state': state} elif state == 'absent': if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) yamlfile.yaml_dict = content if params['update']: rval = yamlfile.pop(params['key'], params['value']) else: rval = yamlfile.delete(params['key']) if rval[0] and params['src']: yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} elif state == 'present': # check if content is different than what is in the file if params['content']: content = Yedit.parse_value(params['content'], params['content_type']) # We had no edits to make and the contents are the same if yamlfile.yaml_dict == content and \ params['value'] is None: return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} yamlfile.yaml_dict = content # If we were passed a key, value then # we enapsulate it in a list and process it # Key, Value passed to the module : Converted to Edits list # edits = [] _edit = {} if params['value'] is not None: _edit['value'] = params['value'] _edit['value_type'] = params['value_type'] _edit['key'] = params['key'] if params['update']: _edit['action'] = 'update' _edit['curr_value'] = params['curr_value'] _edit['curr_value_format'] = params['curr_value_format'] _edit['index'] = params['index'] elif params['append']: _edit['action'] = 'append' edits.append(_edit) elif params['edits'] is not None: edits = params['edits'] if edits: results = Yedit.process_edits(edits, yamlfile) # if there were changes and a src provided to us we need to write if results['changed'] and params['src']: yamlfile.write() return {'changed': results['changed'], 'result': results['results'], 'state': state} # no edits to make if params['src']: # pylint: disable=redefined-variable-type rval = yamlfile.write() return {'changed': rval[0], 'result': rval[1], 'state': state} # We were passed content but no src, key or value, or edits. Return contents in memory return {'changed': False, 'result': yamlfile.yaml_dict, 'state': state} return {'failed': True, 'msg': 'Unkown state passed'} # -*- -*- -*- End included fragment: ../../lib_utils/src/class/yedit.py -*- -*- -*- # -*- -*- -*- Begin included fragment: lib/base.py -*- -*- -*- # pylint: disable=too-many-lines # noqa: E301,E302,E303,T001 class OpenShiftCLIError(Exception): '''Exception class for openshiftcli''' pass ADDITIONAL_PATH_LOOKUPS = ['/usr/local/bin', os.path.expanduser('~/bin')] def locate_oc_binary(): ''' Find and return oc binary file ''' # https://github.com/openshift/openshift-ansible/issues/3410 # oc can be in /usr/local/bin in some cases, but that may not # be in $PATH due to ansible/sudo paths = os.environ.get("PATH", os.defpath).split(os.pathsep) + ADDITIONAL_PATH_LOOKUPS oc_binary = 'oc' # Use shutil.which if it is available, otherwise fallback to a naive path search try: which_result = shutil.which(oc_binary, path=os.pathsep.join(paths)) if which_result is not None: oc_binary = which_result except AttributeError: for path in paths: if os.path.exists(os.path.join(path, oc_binary)): oc_binary = os.path.join(path, oc_binary) break return oc_binary # pylint: disable=too-few-public-methods class OpenShiftCLI(object): ''' Class to wrap the command line tools ''' def __init__(self, namespace, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False, all_namespaces=False): ''' Constructor for OpenshiftCLI ''' self.namespace = namespace self.verbose = verbose self.kubeconfig = Utils.create_tmpfile_copy(kubeconfig) self.all_namespaces = all_namespaces self.oc_binary = locate_oc_binary() # Pylint allows only 5 arguments to be passed. # pylint: disable=too-many-arguments def _replace_content(self, resource, rname, content, force=False, sep='.'): ''' replace the current object with the content ''' res = self._get(resource, rname) if not res['results']: return res fname = Utils.create_tmpfile(rname + '-') yed = Yedit(fname, res['results'][0], separator=sep) changes = [] for key, value in content.items(): changes.append(yed.put(key, value)) if any([change[0] for change in changes]): yed.write() atexit.register(Utils.cleanup, [fname]) return self._replace(fname, force) return {'returncode': 0, 'updated': False} def _replace(self, fname, force=False): '''replace the current object with oc replace''' # We are removing the 'resourceVersion' to handle # a race condition when modifying oc objects yed = Yedit(fname) results = yed.delete('metadata.resourceVersion') if results[0]: yed.write() cmd = ['replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd) def _create_from_content(self, rname, content): '''create a temporary file and then call oc create on it''' fname = Utils.create_tmpfile(rname + '-') yed = Yedit(fname, content=content) yed.write() atexit.register(Utils.cleanup, [fname]) return self._create(fname) def _create(self, fname): '''call oc create on a filename''' return self.openshift_cmd(['create', '-f', fname]) def _delete(self, resource, name=None, selector=None): '''call oc delete on a resource''' cmd = ['delete', resource] if selector is not None: cmd.append('--selector={}'.format(selector)) elif name is not None: cmd.append(name) else: raise OpenShiftCLIError('Either name or selector is required when calling delete.') return self.openshift_cmd(cmd) def _process(self, template_name, create=False, params=None, template_data=None): # noqa: E501 '''process a template template_name: the name of the template to process create: whether to send to oc create after processing params: the parameters for the template template_data: the incoming template's data; instead of a file ''' cmd = ['process'] if template_data: cmd.extend(['-f', '-']) else: cmd.append(template_name) if params: param_str = ["{}={}".format(key, str(value).replace("'", r'"')) for key, value in params.items()] cmd.append('-v') cmd.extend(param_str) results = self.openshift_cmd(cmd, output=True, input_data=template_data) if results['returncode'] != 0 or not create: return results fname = Utils.create_tmpfile(template_name + '-') yed = Yedit(fname, results['results']) yed.write() atexit.register(Utils.cleanup, [fname]) return self.openshift_cmd(['create', '-f', fname]) def _get(self, resource, name=None, selector=None): '''return a resource by name ''' cmd = ['get', resource] if selector is not None: cmd.append('--selector={}'.format(selector)) elif name is not None: cmd.append(name) cmd.extend(['-o', 'json']) rval = self.openshift_cmd(cmd, output=True) # Ensure results are retuned in an array if 'items' in rval: rval['results'] = rval['items'] elif not isinstance(rval['results'], list): rval['results'] = [rval['results']] return rval def _schedulable(self, node=None, selector=None, schedulable=True): ''' perform oadm manage-node scheduable ''' cmd = ['manage-node'] if node: cmd.extend(node) else: cmd.append('--selector={}'.format(selector)) cmd.append('--schedulable={}'.format(schedulable)) return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # noqa: E501 def _list_pods(self, node=None, selector=None, pod_selector=None): ''' perform oadm list pods node: the node in which to list pods selector: the label selector filter if provided pod_selector: the pod selector filter if provided ''' cmd = ['manage-node'] if node: cmd.extend(node) else: cmd.append('--selector={}'.format(selector)) if pod_selector: cmd.append('--pod-selector={}'.format(pod_selector)) cmd.extend(['--list-pods', '-o', 'json']) return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') # pylint: disable=too-many-arguments def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False): ''' perform oadm manage-node evacuate ''' cmd = ['manage-node'] if node: cmd.extend(node) else: cmd.append('--selector={}'.format(selector)) if dry_run: cmd.append('--dry-run') if pod_selector: cmd.append('--pod-selector={}'.format(pod_selector)) if grace_period: cmd.append('--grace-period={}'.format(int(grace_period))) if force: cmd.append('--force') cmd.append('--evacuate') return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') def _version(self): ''' return the openshift version''' return self.openshift_cmd(['version'], output=True, output_type='raw') def _import_image(self, url=None, name=None, tag=None): ''' perform image import ''' cmd = ['import-image'] image = '{0}'.format(name) if tag: image += ':{0}'.format(tag) cmd.append(image) if url: cmd.append('--from={0}/{1}'.format(url, image)) cmd.append('-n{0}'.format(self.namespace)) cmd.append('--confirm') return self.openshift_cmd(cmd) def _run(self, cmds, input_data): ''' Actually executes the command. This makes mocking easier. ''' curr_env = os.environ.copy() curr_env.update({'KUBECONFIG': self.kubeconfig}) proc = subprocess.Popen(cmds, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=curr_env) stdout, stderr = proc.communicate(input_data) return proc.returncode, stdout.decode('utf-8'), stderr.decode('utf-8') # pylint: disable=too-many-arguments,too-many-branches def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None): '''Base command for oc ''' cmds = [self.oc_binary] if oadm: cmds.append('adm') cmds.extend(cmd) if self.all_namespaces: cmds.extend(['--all-namespaces']) elif self.namespace is not None and self.namespace.lower() not in ['none', 'emtpy']: # E501 cmds.extend(['-n', self.namespace]) rval = {} results = '' err = None if self.verbose: print(' '.join(cmds)) try: returncode, stdout, stderr = self._run(cmds, input_data) except OSError as ex: returncode, stdout, stderr = 1, '', 'Failed to execute {}: {}'.format(subprocess.list2cmdline(cmds), ex) rval = {"returncode": returncode, "results": results, "cmd": ' '.join(cmds)} if returncode == 0: if output: if output_type == 'json': try: rval['results'] = json.loads(stdout) except ValueError as verr: if "No JSON object could be decoded" in verr.args: err = verr.args elif output_type == 'raw': rval['results'] = stdout if self.verbose: print("STDOUT: {0}".format(stdout)) print("STDERR: {0}".format(stderr)) if err: rval.update({"err": err, "stderr": stderr, "stdout": stdout, "cmd": cmds}) else: rval.update({"stderr": stderr, "stdout": stdout, "results": {}}) return rval class Utils(object): # pragma: no cover ''' utilities for openshiftcli modules ''' @staticmethod def _write(filename, contents): ''' Actually write the file contents to disk. This helps with mocking. ''' with open(filename, 'w') as sfd: sfd.write(contents) @staticmethod def create_tmp_file_from_contents(rname, data, ftype='yaml'): ''' create a file in tmp with name and contents''' tmp = Utils.create_tmpfile(prefix=rname) if ftype == 'yaml': # AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage # pylint: disable=no-member if hasattr(yaml, 'RoundTripDumper'): Utils._write(tmp, yaml.dump(data, Dumper=yaml.RoundTripDumper)) else: Utils._write(tmp, yaml.safe_dump(data, default_flow_style=False)) elif ftype == 'json': Utils._write(tmp, json.dumps(data)) else: Utils._write(tmp, data) # Register cleanup when module is done atexit.register(Utils.cleanup, [tmp]) return tmp @staticmethod def create_tmpfile_copy(inc_file): '''create a temporary copy of a file''' tmpfile = Utils.create_tmpfile('lib_openshift-') Utils._write(tmpfile, open(inc_file).read()) # Cleanup the tmpfile atexit.register(Utils.cleanup, [tmpfile]) return tmpfile @staticmethod def create_tmpfile(prefix='tmp'): ''' Generates and returns a temporary file name ''' with tempfile.NamedTemporaryFile(prefix=prefix, delete=False) as tmp: return tmp.name @staticmethod def create_tmp_files_from_contents(content, content_type=None): '''Turn an array of dict: filename, content into a files array''' if not isinstance(content, list): content = [content] files = [] for item in content: path = Utils.create_tmp_file_from_contents(item['path'] + '-', item['data'], ftype=content_type) files.append({'name': os.path.basename(item['path']), 'path': path}) return files @staticmethod def cleanup(files): '''Clean up on exit ''' for sfile in files: if os.path.exists(sfile): if os.path.isdir(sfile): shutil.rmtree(sfile) elif os.path.isfile(sfile): os.remove(sfile) @staticmethod def exists(results, _name): ''' Check to see if the results include the name ''' if not results: return False if Utils.find_result(results, _name): return True return False @staticmethod def find_result(results, _name): ''' Find the specified result by name''' rval = None for result in results: if 'metadata' in result and result['metadata']['name'] == _name: rval = result break return rval @staticmethod def get_resource_file(sfile, sfile_type='yaml'): ''' return the service file ''' contents = None with open(sfile) as sfd: contents = sfd.read() if sfile_type == 'yaml': # AUDIT:no-member makes sense here due to ruamel.YAML/PyYAML usage # pylint: disable=no-member if hasattr(yaml, 'RoundTripLoader'): contents = yaml.load(contents, yaml.RoundTripLoader) else: contents = yaml.safe_load(contents) elif sfile_type == 'json': contents = json.loads(contents) return contents @staticmethod def filter_versions(stdout): ''' filter the oc version output ''' version_dict = {} version_search = ['oc', 'openshift', 'kubernetes'] for line in stdout.strip().split('\n'): for term in version_search: if not line: continue if line.startswith(term): version_dict[term] = line.split()[-1] # horrible hack to get openshift version in Openshift 3.2 # By default "oc version in 3.2 does not return an "openshift" version if "openshift" not in version_dict: version_dict["openshift"] = version_dict["oc"] return version_dict @staticmethod def add_custom_versions(versions): ''' create custom versions strings ''' versions_dict = {} for tech, version in versions.items(): # clean up "-" from version if "-" in version: version = version.split("-")[0] if version.startswith('v'): versions_dict[tech + '_numeric'] = version[1:].split('+')[0] # "v3.3.0.33" is what we have, we want "3.3" versions_dict[tech + '_short'] = version[1:4] return versions_dict @staticmethod def openshift_installed(): ''' check if openshift is installed ''' import yum yum_base = yum.YumBase() if yum_base.rpmdb.searchNevra(name='atomic-openshift'): return True return False # Disabling too-many-branches. This is a yaml dictionary comparison function # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements @staticmethod def check_def_equal(user_def, result_def, skip_keys=None, debug=False): ''' Given a user defined definition, compare it with the results given back by our query. ''' # Currently these values are autogenerated and we do not need to check them skip = ['metadata', 'status'] if skip_keys: skip.extend(skip_keys) for key, value in result_def.items(): if key in skip: continue # Both are lists if isinstance(value, list): if key not in user_def: if debug: print('User data does not have key [%s]' % key) print('User data: %s' % user_def) return False if not isinstance(user_def[key], list): if debug: print('user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key])) return False if len(user_def[key]) != len(value): if debug: print("List lengths are not equal.") print("key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value))) print("user_def: %s" % user_def[key]) print("value: %s" % value) return False for values in zip(user_def[key], value): if isinstance(values[0], dict) and isinstance(values[1], dict): if debug: print('sending list - list') print(type(values[0])) print(type(values[1])) result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug) if not result: print('list compare returned false') return False elif value != user_def[key]: if debug: print('value should be identical') print(user_def[key]) print(value) return False # recurse on a dictionary elif isinstance(value, dict): if key not in user_def: if debug: print("user_def does not have key [%s]" % key) return False if not isinstance(user_def[key], dict): if debug: print("dict returned false: not instance of dict") return False # before passing ensure keys match api_values = set(value.keys()) - set(skip) user_values = set(user_def[key].keys()) - set(skip) if api_values != user_values: if debug: print("keys are not equal in dict") print(user_values) print(api_values) return False result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug) if not result: if debug: print("dict returned false") print(result) return False # Verify each key, value pair is the same else: if key not in user_def or value != user_def[key]: if debug: print("value not equal; user_def does not have key") print(key) print(value) if key in user_def: print(user_def[key]) return False if debug: print('returning true') return True class OpenShiftCLIConfig(object): '''Generic Config''' def __init__(self, rname, namespace, kubeconfig, options): self.kubeconfig = kubeconfig self.name = rname self.namespace = namespace self._options = options @property def config_options(self): ''' return config options ''' return self._options def to_option_list(self, ascommalist=''): '''return all options as a string if ascommalist is set to the name of a key, and the value of that key is a dict, format the dict as a list of comma delimited key=value pairs''' return self.stringify(ascommalist) def stringify(self, ascommalist=''): ''' return the options hash as cli params in a string if ascommalist is set to the name of a key, and the value of that key is a dict, format the dict as a list of comma delimited key=value pairs ''' rval = [] for key in sorted(self.config_options.keys()): data = self.config_options[key] if data['include'] \ and (data['value'] or isinstance(data['value'], int)): if key == ascommalist: val = ','.join(['{}={}'.format(kk, vv) for kk, vv in sorted(data['value'].items())]) else: val = data['value'] rval.append('--{}={}'.format(key.replace('_', '-'), val)) return rval # -*- -*- -*- End included fragment: lib/base.py -*- -*- -*- # -*- -*- -*- Begin included fragment: lib/deploymentconfig.py -*- -*- -*- # pylint: disable=too-many-public-methods class DeploymentConfig(Yedit): ''' Class to model an openshift DeploymentConfig''' default_deployment_config = ''' apiVersion: v1 kind: DeploymentConfig metadata: name: default_dc namespace: default spec: replicas: 0 selector: default_dc: default_dc strategy: resources: {} rollingParams: intervalSeconds: 1 maxSurge: 0 maxUnavailable: 25% timeoutSeconds: 600 updatePercent: -25 updatePeriodSeconds: 1 type: Rolling template: metadata: spec: containers: - env: - name: default value: default image: default imagePullPolicy: IfNotPresent name: default_dc ports: - containerPort: 8000 hostPort: 8000 protocol: TCP name: default_port resources: {} terminationMessagePath: /dev/termination-log dnsPolicy: ClusterFirst hostNetwork: true nodeSelector: type: compute restartPolicy: Always securityContext: {} serviceAccount: default serviceAccountName: default terminationGracePeriodSeconds: 30 triggers: - type: ConfigChange ''' replicas_path = "spec.replicas" env_path = "spec.template.spec.containers[0].env" volumes_path = "spec.template.spec.volumes" container_path = "spec.template.spec.containers" volume_mounts_path = "spec.template.spec.containers[0].volumeMounts" def __init__(self, content=None): ''' Constructor for deploymentconfig ''' if not content: content = DeploymentConfig.default_deployment_config super(DeploymentConfig, self).__init__(content=content) def add_env_value(self, key, value): ''' add key, value pair to env array ''' rval = False env = self.get_env_vars() if env: env.append({'name': key, 'value': value}) rval = True else: result = self.put(DeploymentConfig.env_path, {'name': key, 'value': value}) rval = result[0] return rval def exists_env_value(self, key, value): ''' return whether a key, value pair exists ''' results = self.get_env_vars() if not results: return False for result in results: if result['name'] == key and result['value'] == value: return True return False def exists_env_key(self, key): ''' return whether a key, value pair exists ''' results = self.get_env_vars() if not results: return False for result in results: if result['name'] == key: return True return False def get_env_var(self, key): '''return a environment variables ''' results = self.get(DeploymentConfig.env_path) or [] if not results: return None for env_var in results: if env_var['name'] == key: return env_var return None def get_env_vars(self): '''return a environment variables ''' return self.get(DeploymentConfig.env_path) or [] def delete_env_var(self, keys): '''delete a list of keys ''' if not isinstance(keys, list): keys = [keys] env_vars_array = self.get_env_vars() modified = False idx = None for key in keys: for env_idx, env_var in enumerate(env_vars_array): if env_var['name'] == key: idx = env_idx break if idx: modified = True del env_vars_array[idx] if modified: return True return False def update_env_var(self, key, value): '''place an env in the env var list''' env_vars_array = self.get_env_vars() idx = None for env_idx, env_var in enumerate(env_vars_array): if env_var['name'] == key: idx = env_idx break if idx: env_vars_array[idx]['value'] = value else: self.add_env_value(key, value) return True def exists_volume_mount(self, volume_mount): ''' return whether a volume mount exists ''' exist_volume_mounts = self.get_volume_mounts() if not exist_volume_mounts: return False volume_mount_found = False for exist_volume_mount in exist_volume_mounts: if exist_volume_mount['name'] == volume_mount['name']: volume_mount_found = True break return volume_mount_found def exists_volume(self, volume): ''' return whether a volume exists ''' exist_volumes = self.get_volumes() volume_found = False for exist_volume in exist_volumes: if exist_volume['name'] == volume['name']: volume_found = True break return volume_found def find_volume_by_name(self, volume, mounts=False): ''' return the index of a volume ''' volumes = [] if mounts: volumes = self.get_volume_mounts() else: volumes = self.get_volumes() for exist_volume in volumes: if exist_volume['name'] == volume['name']: return exist_volume return None def get_replicas(self): ''' return replicas setting ''' return self.get(DeploymentConfig.replicas_path) def get_volume_mounts(self): '''return volume mount information ''' return self.get_volumes(mounts=True) def get_volumes(self, mounts=False): '''return volume mount information ''' if mounts: return self.get(DeploymentConfig.volume_mounts_path) or [] return self.get(DeploymentConfig.volumes_path) or [] def delete_volume_by_name(self, volume): '''delete a volume ''' modified = False exist_volume_mounts = self.get_volume_mounts() exist_volumes = self.get_volumes() del_idx = None for idx, exist_volume in enumerate(exist_volumes): if 'name' in exist_volume and exist_volume['name'] == volume['name']: del_idx = idx break if del_idx != None: del exist_volumes[del_idx] modified = True del_idx = None for idx, exist_volume_mount in enumerate(exist_volume_mounts): if 'name' in exist_volume_mount and exist_volume_mount['name'] == volume['name']: del_idx = idx break if del_idx != None: del exist_volume_mounts[idx] modified = True return modified def add_volume_mount(self, volume_mount): ''' add a volume or volume mount to the proper location ''' exist_volume_mounts = self.get_volume_mounts() if not exist_volume_mounts and volume_mount: self.put(DeploymentConfig.volume_mounts_path, [volume_mount]) else: exist_volume_mounts.append(volume_mount) def add_volume(self, volume): ''' add a volume or volume mount to the proper location ''' exist_volumes = self.get_volumes() if not volume: return if not exist_volumes: self.put(DeploymentConfig.volumes_path, [volume]) else: exist_volumes.append(volume) def update_replicas(self, replicas): ''' update replicas value ''' self.put(DeploymentConfig.replicas_path, replicas) def update_volume(self, volume): '''place an env in the env var list''' exist_volumes = self.get_volumes() if not volume: return False # update the volume update_idx = None for idx, exist_vol in enumerate(exist_volumes): if exist_vol['name'] == volume['name']: update_idx = idx break if update_idx != None: exist_volumes[update_idx] = volume else: self.add_volume(volume) return True def update_volume_mount(self, volume_mount): '''place an env in the env var list''' modified = False exist_volume_mounts = self.get_volume_mounts() if not volume_mount: return False # update the volume mount for exist_vol_mount in exist_volume_mounts: if exist_vol_mount['name'] == volume_mount['name']: if 'mountPath' in exist_vol_mount and \ str(exist_vol_mount['mountPath']) != str(volume_mount['mountPath']): exist_vol_mount['mountPath'] = volume_mount['mountPath'] modified = True break if not modified: self.add_volume_mount(volume_mount) modified = True return modified def needs_update_volume(self, volume, volume_mount): ''' verify a volume update is needed ''' exist_volume = self.find_volume_by_name(volume) exist_volume_mount = self.find_volume_by_name(volume, mounts=True) results = [] results.append(exist_volume['name'] == volume['name']) if 'secret' in volume: results.append('secret' in exist_volume) results.append(exist_volume['secret']['secretName'] == volume['secret']['secretName']) results.append(exist_volume_mount['name'] == volume_mount['name']) results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath']) elif 'emptyDir' in volume: results.append(exist_volume_mount['name'] == volume['name']) results.append(exist_volume_mount['mountPath'] == volume_mount['mountPath']) elif 'persistentVolumeClaim' in volume: pvc = 'persistentVolumeClaim' results.append(pvc in exist_volume) if results[-1]: results.append(exist_volume[pvc]['claimName'] == volume[pvc]['claimName']) if 'claimSize' in volume[pvc]: results.append(exist_volume[pvc]['claimSize'] == volume[pvc]['claimSize']) elif 'hostpath' in volume: results.append('hostPath' in exist_volume) results.append(exist_volume['hostPath']['path'] == volume_mount['mountPath']) return not all(results) def needs_update_replicas(self, replicas): ''' verify whether a replica update is needed ''' current_reps = self.get(DeploymentConfig.replicas_path) return not current_reps == replicas # -*- -*- -*- End included fragment: lib/deploymentconfig.py -*- -*- -*- # -*- -*- -*- Begin included fragment: lib/secret.py -*- -*- -*- # pylint: disable=too-many-instance-attributes class SecretConfig(object): ''' Handle secret options ''' # pylint: disable=too-many-arguments def __init__(self, sname, namespace, kubeconfig, secrets=None): ''' constructor for handling secret options ''' self.kubeconfig = kubeconfig self.name = sname self.namespace = namespace self.secrets = secrets self.data = {} self.create_dict() def create_dict(self): ''' assign the correct properties for a secret dict ''' self.data['apiVersion'] = 'v1' self.data['kind'] = 'Secret' self.data['metadata'] = {} self.data['metadata']['name'] = self.name self.data['metadata']['namespace'] = self.namespace self.data['data'] = {} if self.secrets: for key, value in self.secrets.items(): self.data['data'][key] = value # pylint: disable=too-many-instance-attributes class Secret(Yedit): ''' Class to wrap the oc command line tools ''' secret_path = "data" kind = 'secret' def __init__(self, content): '''secret constructor''' super(Secret, self).__init__(content=content) self._secrets = None @property def secrets(self): '''secret property getter''' if self._secrets is None: self._secrets = self.get_secrets() return self._secrets @secrets.setter def secrets(self): '''secret property setter''' if self._secrets is None: self._secrets = self.get_secrets() return self._secrets def get_secrets(self): ''' returns all of the defined secrets ''' return self.get(Secret.secret_path) or {} def add_secret(self, key, value): ''' add a secret ''' if self.secrets: self.secrets[key] = value else: self.put(Secret.secret_path, {key: value}) return True def delete_secret(self, key): ''' delete secret''' try: del self.secrets[key] except KeyError as _: return False return True def find_secret(self, key): ''' find secret''' rval = None try: rval = self.secrets[key] except KeyError as _: return None return {'key': key, 'value': rval} def update_secret(self, key, value): ''' update a secret''' if key in self.secrets: self.secrets[key] = value else: self.add_secret(key, value) return True # -*- -*- -*- End included fragment: lib/secret.py -*- -*- -*- # -*- -*- -*- Begin included fragment: lib/service.py -*- -*- -*- # pylint: disable=too-many-instance-attributes class ServiceConfig(object): ''' Handle service options ''' # pylint: disable=too-many-arguments def __init__(self, sname, namespace, ports, selector=None, labels=None, cluster_ip=None, portal_ip=None, session_affinity=None, service_type=None, external_ips=None): ''' constructor for handling service options ''' self.name = sname self.namespace = namespace self.ports = ports self.selector = selector self.labels = labels self.cluster_ip = cluster_ip self.portal_ip = portal_ip self.session_affinity = session_affinity self.service_type = service_type self.external_ips = external_ips self.data = {} self.create_dict() def create_dict(self): ''' instantiates a service dict ''' self.data['apiVersion'] = 'v1' self.data['kind'] = 'Service' self.data['metadata'] = {} self.data['metadata']['name'] = self.name self.data['metadata']['namespace'] = self.namespace if self.labels: self.data['metadata']['labels'] = {} for lab, lab_value in self.labels.items(): self.data['metadata']['labels'][lab] = lab_value self.data['spec'] = {} if self.ports: self.data['spec']['ports'] = self.ports else: self.data['spec']['ports'] = [] if self.selector: self.data['spec']['selector'] = self.selector self.data['spec']['sessionAffinity'] = self.session_affinity or 'None' if self.cluster_ip: self.data['spec']['clusterIP'] = self.cluster_ip if self.portal_ip: self.data['spec']['portalIP'] = self.portal_ip if self.service_type: self.data['spec']['type'] = self.service_type if self.external_ips: self.data['spec']['externalIPs'] = self.external_ips # pylint: disable=too-many-instance-attributes,too-many-public-methods class Service(Yedit): ''' Class to model the oc service object ''' port_path = "spec.ports" portal_ip = "spec.portalIP" cluster_ip = "spec.clusterIP" selector_path = 'spec.selector' kind = 'Service' external_ips = "spec.externalIPs" def __init__(self, content): '''Service constructor''' super(Service, self).__init__(content=content) def get_ports(self): ''' get a list of ports ''' return self.get(Service.port_path) or [] def get_selector(self): ''' get the service selector''' return self.get(Service.selector_path) or {} def add_ports(self, inc_ports): ''' add a port object to the ports list ''' if not isinstance(inc_ports, list): inc_ports = [inc_ports] ports = self.get_ports() if not ports: self.put(Service.port_path, inc_ports) else: ports.extend(inc_ports) return True def find_ports(self, inc_port): ''' find a specific port ''' for port in self.get_ports(): if port['port'] == inc_port['port']: return port return None def delete_ports(self, inc_ports): ''' remove a port from a service ''' if not isinstance(inc_ports, list): inc_ports = [inc_ports] ports = self.get(Service.port_path) or [] if not ports: return True removed = False for inc_port in inc_ports: port = self.find_ports(inc_port) if port: ports.remove(port) removed = True return removed def add_cluster_ip(self, sip): '''add cluster ip''' self.put(Service.cluster_ip, sip) def add_portal_ip(self, pip): '''add cluster ip''' self.put(Service.portal_ip, pip) def get_external_ips(self): ''' get a list of external_ips ''' return self.get(Service.external_ips) or [] def add_external_ips(self, inc_external_ips): ''' add an external_ip to the external_ips list ''' if not isinstance(inc_external_ips, list): inc_external_ips = [inc_external_ips] external_ips = self.get_external_ips() if not external_ips: self.put(Service.external_ips, inc_external_ips) else: external_ips.extend(inc_external_ips) return True def find_external_ips(self, inc_external_ip): ''' find a specific external IP ''' val = None try: idx = self.get_external_ips().index(inc_external_ip) val = self.get_external_ips()[idx] except ValueError: pass return val def delete_external_ips(self, inc_external_ips): ''' remove an external IP from a service ''' if not isinstance(inc_external_ips, list): inc_external_ips = [inc_external_ips] external_ips = self.get(Service.external_ips) or [] if not external_ips: return True removed = False for inc_external_ip in inc_external_ips: external_ip = self.find_external_ips(inc_external_ip) if external_ip: external_ips.remove(external_ip) removed = True return removed # -*- -*- -*- End included fragment: lib/service.py -*- -*- -*- # -*- -*- -*- Begin included fragment: lib/volume.py -*- -*- -*- class Volume(object): ''' Class to represent an openshift volume object''' volume_mounts_path = {"pod": "spec.containers[0].volumeMounts", "dc": "spec.template.spec.containers[0].volumeMounts", "rc": "spec.template.spec.containers[0].volumeMounts", } volumes_path = {"pod": "spec.volumes", "dc": "spec.template.spec.volumes", "rc": "spec.template.spec.volumes", } @staticmethod def create_volume_structure(volume_info): ''' return a properly structured volume ''' volume_mount = None volume = {'name': volume_info['name']} volume_type = volume_info['type'].lower() if volume_type == 'secret': volume['secret'] = {} volume[volume_info['type']] = {'secretName': volume_info['secret_name']} volume_mount = {'mountPath': volume_info['path'], 'name': volume_info['name']} elif volume_type == 'emptydir': volume['emptyDir'] = {} volume_mount = {'mountPath': volume_info['path'], 'name': volume_info['name']} elif volume_type == 'pvc' or volume_type == 'persistentvolumeclaim': volume['persistentVolumeClaim'] = {} volume['persistentVolumeClaim']['claimName'] = volume_info['claimName'] volume['persistentVolumeClaim']['claimSize'] = volume_info['claimSize'] elif volume_type == 'hostpath': volume['hostPath'] = {} volume['hostPath']['path'] = volume_info['path'] elif volume_type == 'configmap': volume['configMap'] = {} volume['configMap']['name'] = volume_info['configmap_name'] volume_mount = {'mountPath': volume_info['path'], 'name': volume_info['name']} return (volume, volume_mount) # -*- -*- -*- End included fragment: lib/volume.py -*- -*- -*- # -*- -*- -*- Begin included fragment: class/oc_version.py -*- -*- -*- # pylint: disable=too-many-instance-attributes class OCVersion(OpenShiftCLI): ''' Class to wrap the oc command line tools ''' # pylint allows 5 # pylint: disable=too-many-arguments def __init__(self, config, debug): ''' Constructor for OCVersion ''' super(OCVersion, self).__init__(None, config) self.debug = debug def get(self): '''get and return version information ''' results = {} version_results = self._version() if version_results['returncode'] == 0: filtered_vers = Utils.filter_versions(version_results['results']) custom_vers = Utils.add_custom_versions(filtered_vers) results['returncode'] = version_results['returncode'] results.update(filtered_vers) results.update(custom_vers) return results raise OpenShiftCLIError('Problem detecting openshift version.') @staticmethod def run_ansible(params): '''run the idempotent ansible code''' oc_version = OCVersion(params['kubeconfig'], params['debug']) if params['state'] == 'list': #pylint: disable=protected-access result = oc_version.get() return {'state': params['state'], 'results': result, 'changed': False} # -*- -*- -*- End included fragment: class/oc_version.py -*- -*- -*- # -*- -*- -*- Begin included fragment: class/oc_adm_registry.py -*- -*- -*- class RegistryException(Exception): ''' Registry Exception Class ''' pass class RegistryConfig(OpenShiftCLIConfig): ''' RegistryConfig is a DTO for the registry. ''' def __init__(self, rname, namespace, kubeconfig, registry_options): super(RegistryConfig, self).__init__(rname, namespace, kubeconfig, registry_options) class Registry(OpenShiftCLI): ''' Class to wrap the oc command line tools ''' volume_mount_path = 'spec.template.spec.containers[0].volumeMounts' volume_path = 'spec.template.spec.volumes' env_path = 'spec.template.spec.containers[0].env' def __init__(self, registry_config, verbose=False): ''' Constructor for Registry a registry consists of 3 or more parts - dc/docker-registry - svc/docker-registry Parameters: :registry_config: :verbose: ''' super(Registry, self).__init__(registry_config.namespace, registry_config.kubeconfig, verbose) self.version = OCVersion(registry_config.kubeconfig, verbose) self.svc_ip = None self.portal_ip = None self.config = registry_config self.verbose = verbose self.registry_parts = [{'kind': 'dc', 'name': self.config.name}, {'kind': 'svc', 'name': self.config.name}, ] self.__prepared_registry = None self.volume_mounts = [] self.volumes = [] if self.config.config_options['volume_mounts']['value']: for volume in self.config.config_options['volume_mounts']['value']: volume_info = {'secret_name': volume.get('secret_name', None), 'name': volume.get('name', None), 'type': volume.get('type', None), 'path': volume.get('path', None), 'claimName': volume.get('claim_name', None), 'claimSize': volume.get('claim_size', None), } vol, vol_mount = Volume.create_volume_structure(volume_info) self.volumes.append(vol) self.volume_mounts.append(vol_mount) self.dconfig = None self.svc = None @property def deploymentconfig(self): ''' deploymentconfig property ''' return self.dconfig @deploymentconfig.setter def deploymentconfig(self, config): ''' setter for deploymentconfig property ''' self.dconfig = config @property def service(self): ''' service property ''' return self.svc @service.setter def service(self, config): ''' setter for service property ''' self.svc = config @property def prepared_registry(self): ''' prepared_registry property ''' if not self.__prepared_registry: results = self.prepare_registry() if not results or ('returncode' in results and results['returncode'] != 0): raise RegistryException('Could not perform registry preparation. {}'.format(results)) self.__prepared_registry = results return self.__prepared_registry @prepared_registry.setter def prepared_registry(self, data): ''' setter method for prepared_registry attribute ''' self.__prepared_registry = data def get(self): ''' return the self.registry_parts ''' self.deploymentconfig = None self.service = None rval = 0 for part in self.registry_parts: result = self._get(part['kind'], name=part['name']) if result['returncode'] == 0 and part['kind'] == 'dc': self.deploymentconfig = DeploymentConfig(result['results'][0]) elif result['returncode'] == 0 and part['kind'] == 'svc': self.service = Service(result['results'][0]) if result['returncode'] != 0: rval = result['returncode'] return {'returncode': rval, 'deploymentconfig': self.deploymentconfig, 'service': self.service} def exists(self): '''does the object exist?''' if self.deploymentconfig and self.service: return True return False def delete(self, complete=True): '''return all pods ''' parts = [] for part in self.registry_parts: if not complete and part['kind'] == 'svc': continue parts.append(self._delete(part['kind'], part['name'])) # Clean up returned results rval = 0 for part in parts: # pylint: disable=invalid-sequence-index if 'returncode' in part and part['returncode'] != 0: rval = part['returncode'] return {'returncode': rval, 'results': parts} def prepare_registry(self): ''' prepare a registry for instantiation ''' options = self.config.to_option_list(ascommalist='labels') cmd = ['registry'] cmd.extend(options) cmd.extend(['--dry-run=True', '-o', 'json']) results = self.openshift_cmd(cmd, oadm=True, output=True, output_type='json') # probably need to parse this # pylint thinks results is a string # pylint: disable=no-member if results['returncode'] != 0 and 'items' not in results['results']: raise RegistryException('Could not perform registry preparation. {}'.format(results)) service = None deploymentconfig = None # pylint: disable=invalid-sequence-index for res in results['results']['items']: if res['kind'] == 'DeploymentConfig': deploymentconfig = DeploymentConfig(res) elif res['kind'] == 'Service': service = Service(res) # Verify we got a service and a deploymentconfig if not service or not deploymentconfig: return results # results will need to get parsed here and modifications added deploymentconfig = DeploymentConfig(self.add_modifications(deploymentconfig)) # modify service ip if self.svc_ip: service.put('spec.clusterIP', self.svc_ip) if self.portal_ip: service.put('spec.portalIP', self.portal_ip) # the dry-run doesn't apply the selector correctly if self.service: service.put('spec.selector', self.service.get_selector()) # need to create the service and the deploymentconfig service_file = Utils.create_tmp_file_from_contents('service', service.yaml_dict) deployment_file = Utils.create_tmp_file_from_contents('deploymentconfig', deploymentconfig.yaml_dict) return {"service": service, "service_file": service_file, "service_update": False, "deployment": deploymentconfig, "deployment_file": deployment_file, "deployment_update": False} def create(self): '''Create a registry''' results = [] self.needs_update() # if the object is none, then we need to create it # if the object needs an update, then we should call replace # Handle the deploymentconfig if self.deploymentconfig is None: results.append(self._create(self.prepared_registry['deployment_file'])) elif self.prepared_registry['deployment_update']: results.append(self._replace(self.prepared_registry['deployment_file'])) # Handle the service if self.service is None: results.append(self._create(self.prepared_registry['service_file'])) elif self.prepared_registry['service_update']: results.append(self._replace(self.prepared_registry['service_file'])) # Clean up returned results rval = 0 for result in results: # pylint: disable=invalid-sequence-index if 'returncode' in result and result['returncode'] != 0: rval = result['returncode'] return {'returncode': rval, 'results': results} def update(self): '''run update for the registry. This performs a replace if required''' # Store the current service IP if self.service: svcip = self.service.get('spec.clusterIP') if svcip: self.svc_ip = svcip portip = self.service.get('spec.portalIP') if portip: self.portal_ip = portip results = [] if self.prepared_registry['deployment_update']: results.append(self._replace(self.prepared_registry['deployment_file'])) if self.prepared_registry['service_update']: results.append(self._replace(self.prepared_registry['service_file'])) # Clean up returned results rval = 0 for result in results: if result['returncode'] != 0: rval = result['returncode'] return {'returncode': rval, 'results': results} def add_modifications(self, deploymentconfig): ''' update a deployment config with changes ''' # The environment variable for REGISTRY_HTTP_SECRET is autogenerated # We should set the generated deploymentconfig to the in memory version # the following modifications will overwrite if needed if self.deploymentconfig: result = self.deploymentconfig.get_env_var('REGISTRY_HTTP_SECRET') if result: deploymentconfig.update_env_var('REGISTRY_HTTP_SECRET', result['value']) # Currently we know that our deployment of a registry requires a few extra modifications # Modification 1 # we need specific environment variables to be set for key, value in self.config.config_options['env_vars'].get('value', {}).items(): if not deploymentconfig.exists_env_key(key): deploymentconfig.add_env_value(key, value) else: deploymentconfig.update_env_var(key, value) # Modification 2 # we need specific volume variables to be set for volume in self.volumes: deploymentconfig.update_volume(volume) for vol_mount in self.volume_mounts: deploymentconfig.update_volume_mount(vol_mount) # Modification 3 # Edits edit_results = [] for edit in self.config.config_options['edits'].get('value', []): if edit['action'] == 'put': edit_results.append(deploymentconfig.put(edit['key'], edit['value'])) if edit['action'] == 'update': edit_results.append(deploymentconfig.update(edit['key'], edit['value'], edit.get('index', None), edit.get('curr_value', None))) if edit['action'] == 'append': edit_results.append(deploymentconfig.append(edit['key'], edit['value'])) if edit_results and not any([res[0] for res in edit_results]): return None return deploymentconfig.yaml_dict def needs_update(self): ''' check to see if we need to update ''' exclude_list = ['clusterIP', 'portalIP', 'type', 'protocol'] if self.service is None or \ not Utils.check_def_equal(self.prepared_registry['service'].yaml_dict, self.service.yaml_dict, exclude_list, debug=self.verbose): self.prepared_registry['service_update'] = True exclude_list = ['dnsPolicy', 'terminationGracePeriodSeconds', 'restartPolicy', 'timeoutSeconds', 'livenessProbe', 'readinessProbe', 'terminationMessagePath', 'securityContext', 'imagePullPolicy', 'protocol', # ports.portocol: TCP 'type', # strategy: {'type': 'rolling'} 'defaultMode', # added on secrets 'activeDeadlineSeconds', # added in 1.5 for timeouts ] if self.deploymentconfig is None or \ not Utils.check_def_equal(self.prepared_registry['deployment'].yaml_dict, self.deploymentconfig.yaml_dict, exclude_list, debug=self.verbose): self.prepared_registry['deployment_update'] = True return self.prepared_registry['deployment_update'] or self.prepared_registry['service_update'] or False # In the future, we would like to break out each ansible state into a function. # pylint: disable=too-many-branches,too-many-return-statements @staticmethod def run_ansible(params, check_mode): '''run idempotent ansible code''' registry_options = {'images': {'value': params['images'], 'include': True}, 'latest_images': {'value': params['latest_images'], 'include': True}, 'labels': {'value': params['labels'], 'include': True}, 'ports': {'value': ','.join(params['ports']), 'include': True}, 'replicas': {'value': params['replicas'], 'include': True}, 'selector': {'value': params['selector'], 'include': True}, 'service_account': {'value': params['service_account'], 'include': True}, 'mount_host': {'value': params['mount_host'], 'include': True}, 'env_vars': {'value': params['env_vars'], 'include': False}, 'volume_mounts': {'value': params['volume_mounts'], 'include': False}, 'edits': {'value': params['edits'], 'include': False}, 'tls_key': {'value': params['tls_key'], 'include': True}, 'tls_certificate': {'value': params['tls_certificate'], 'include': True}, } # Do not always pass the daemonset and enforce-quota parameters because they are not understood # by old versions of oc. # Default value is false. So, it's safe to not pass an explicit false value to oc versions which # understand these parameters. if params['daemonset']: registry_options['daemonset'] = {'value': params['daemonset'], 'include': True} if params['enforce_quota']: registry_options['enforce_quota'] = {'value': params['enforce_quota'], 'include': True} rconfig = RegistryConfig(params['name'], params['namespace'], params['kubeconfig'], registry_options) ocregistry = Registry(rconfig, params['debug']) api_rval = ocregistry.get() state = params['state'] ######## # get ######## if state == 'list': if api_rval['returncode'] != 0: return {'failed': True, 'msg': api_rval} return {'changed': False, 'results': api_rval, 'state': state} ######## # Delete ######## if state == 'absent': if not ocregistry.exists(): return {'changed': False, 'state': state} if check_mode: return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a delete.'} # Unsure as to why this is angry with the return type. # pylint: disable=redefined-variable-type api_rval = ocregistry.delete() if api_rval['returncode'] != 0: return {'failed': True, 'msg': api_rval} return {'changed': True, 'results': api_rval, 'state': state} if state == 'present': ######## # Create ######## if not ocregistry.exists(): if check_mode: return {'changed': True, 'msg': 'CHECK_MODE: Would have performed a create.'} api_rval = ocregistry.create() if api_rval['returncode'] != 0: return {'failed': True, 'msg': api_rval} return {'changed': True, 'results': api_rval, 'state': state} ######## # Update ######## if not params['force'] and not ocregistry.needs_update(): return {'changed': False, 'state': state} if check_mode: return {'changed': True, 'msg': 'CHECK_MODE: Would have performed an update.'} api_rval = ocregistry.update() if api_rval['returncode'] != 0: return {'failed': True, 'msg': api_rval} return {'changed': True, 'results': api_rval, 'state': state} return {'failed': True, 'msg': 'Unknown state passed. %s' % state} # -*- -*- -*- End included fragment: class/oc_adm_registry.py -*- -*- -*- # -*- -*- -*- Begin included fragment: ansible/oc_adm_registry.py -*- -*- -*- def main(): ''' ansible oc module for registry ''' module = AnsibleModule( argument_spec=dict( state=dict(default='present', type='str', choices=['present', 'absent']), debug=dict(default=False, type='bool'), namespace=dict(default='default', type='str'), name=dict(default=None, required=True, type='str'), kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), images=dict(default=None, type='str'), latest_images=dict(default=False, type='bool'), labels=dict(default=None, type='dict'), ports=dict(default=['5000'], type='list'), replicas=dict(default=1, type='int'), selector=dict(default=None, type='str'), service_account=dict(default='registry', type='str'), mount_host=dict(default=None, type='str'), volume_mounts=dict(default=None, type='list'), env_vars=dict(default={}, type='dict'), edits=dict(default=[], type='list'), enforce_quota=dict(default=False, type='bool'), force=dict(default=False, type='bool'), daemonset=dict(default=False, type='bool'), tls_key=dict(default=None, type='str'), tls_certificate=dict(default=None, type='str'), ), supports_check_mode=True, ) results = Registry.run_ansible(module.params, module.check_mode) if 'failed' in results: module.fail_json(**results) module.exit_json(**results) if __name__ == '__main__': main() # -*- -*- -*- End included fragment: ansible/oc_adm_registry.py -*- -*- -*-
DG-i/openshift-ansible
roles/lib_openshift/library/oc_adm_registry.py
Python
apache-2.0
94,103
<?php namespace Google\AdsApi\AdManager\v202111; /** * This file was generated from WSDL. DO NOT EDIT. */ class PrecisionError extends \Google\AdsApi\AdManager\v202111\ApiError { /** * @var string $reason */ protected $reason = null; /** * @param string $fieldPath * @param \Google\AdsApi\AdManager\v202111\FieldPathElement[] $fieldPathElements * @param string $trigger * @param string $errorString * @param string $reason */ public function __construct($fieldPath = null, array $fieldPathElements = null, $trigger = null, $errorString = null, $reason = null) { parent::__construct($fieldPath, $fieldPathElements, $trigger, $errorString); $this->reason = $reason; } /** * @return string */ public function getReason() { return $this->reason; } /** * @param string $reason * @return \Google\AdsApi\AdManager\v202111\PrecisionError */ public function setReason($reason) { $this->reason = $reason; return $this; } }
googleads/googleads-php-lib
src/Google/AdsApi/AdManager/v202111/PrecisionError.php
PHP
apache-2.0
1,074
package com.zxinsight.classifier.ruleengine.admin; import java.rmi.RemoteException; import java.util.Map; import javax.rules.admin.LocalRuleExecutionSetProvider; import javax.rules.admin.RuleAdministrator; import javax.rules.admin.RuleExecutionSet; import javax.rules.admin.RuleExecutionSetDeregistrationException; import javax.rules.admin.RuleExecutionSetProvider; import javax.rules.admin.RuleExecutionSetRegisterException; @SuppressWarnings("rawtypes") public class RuleAdministratorImpl implements RuleAdministrator { @Override public void deregisterRuleExecutionSet(String bindUri, Map properties) throws RuleExecutionSetDeregistrationException, RemoteException { RuleExecutionSetRepository repository = RuleExecutionSetRepository .getInstance(); if (repository.getRuleExecutionSet(bindUri) == null) { throw new RuleExecutionSetDeregistrationException( "no execution set bound to: " + bindUri); } repository.unregisterRuleExecutionSet(bindUri); } @Override public LocalRuleExecutionSetProvider getLocalRuleExecutionSetProvider( Map properties) throws RemoteException { return new LocalRuleExecutionSetProviderImple(); } @Override public RuleExecutionSetProvider getRuleExecutionSetProvider(Map properties) throws RemoteException { return new RuleExecutionSetProviderImpl(); } @Override public void registerRuleExecutionSet(String bindUri, RuleExecutionSet ruleExecutionSet, Map properties) throws RuleExecutionSetRegisterException, RemoteException { RuleExecutionSetRepository repository = RuleExecutionSetRepository .getInstance(); repository.registerRuleExecutionSet(bindUri, ruleExecutionSet); } }
kevin-ww/commentClassifier
src/main/java/com/zxinsight/classifier/ruleengine/admin/RuleAdministratorImpl.java
Java
apache-2.0
1,738
'use strict'; angular.module('playgroundApp', [ 'playgroundApp.filters', 'playgroundApp.services', 'playgroundApp.directives', 'ngRoute', 'ui.bootstrap', 'ui', ]) .config(function($locationProvider, $routeProvider, $httpProvider, $dialogProvider) { $locationProvider.html5Mode(true); // TODO: add list of promises to be resolved for injection // TODO: resolved promises are injected into controller // TODO: see http://www.youtube.com/watch?v=P6KITGRQujQ $routeProvider .when('/playground/', { templateUrl: '/playground/main.html', controller: MainController, }) .when('/playground/p/:project_id/', { templateUrl: '/playground/project.html', controller: ProjectController, reloadOnSearch: false, }); $httpProvider.interceptors.push('pgHttpInterceptor'); // TODO: test these defaults? $dialogProvider.options({ backdropFade: true, modalFade: true, }); }) .value('ui.config', { codemirror: { lineNumbers: true, matchBrackets: true, autofocus: true, undoDepth: 440, // default = 40 } });
jackpunt/playground
app/js/app.js
JavaScript
apache-2.0
1,117
package trendli.me.makhana.common.entities; import java.util.Arrays; import java.util.Collections; import java.util.List; public enum ActionType { MOVE( "Moving", "newTile" ), FABRICATING( "Fabricating" ); private final String verb; private final List< String > dataKeys; private ActionType( String verb, String... dataKeys ) { this.verb = verb; if ( dataKeys != null ) { this.dataKeys = Arrays.asList( dataKeys ); } else { this.dataKeys = Collections.emptyList( ); } } /** * @return the dataKeys */ public List< String > getDataKeys( ) { return dataKeys; } /** * @return the verb */ public String getVerb( ) { return verb; } }
elliottmb/makhana
common/src/main/java/trendli/me/makhana/common/entities/ActionType.java
Java
apache-2.0
806
/* Copyright 2020 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kilt import ( "github.com/google/kilt/pkg/rework" log "github.com/golang/glog" "github.com/spf13/cobra" ) var reworkCmd = &cobra.Command{ Use: "rework", Short: "Rework the patches belonging to patchsets", Long: `Rework patchsets, allowing patches to be redistributed and re-ordered in the branch. The rework command will create a working area detached form the current kilt branch where modifications can be staged without changing the original branch. Kilt will examine the patchsets in the branch and determine which patches belonging to patchsets need to be reworked, and create a queue of operations that the user will drive. The user can also perform other rework-related operations, such as re-ordering or merging patches. Once the user is finished, kilt will verify that the rework is valid, and modify the previous kilt branch to point to the result of the rework. A rework is considered valid if the end state is identical to the initial state -- the diff between them is empty.`, Args: argsRework, Run: runRework, } var reworkFlags = struct { begin bool finish bool validate bool rContinue bool abort bool skip bool force bool auto bool patchsets []string all bool }{} func init() { rootCmd.AddCommand(reworkCmd) reworkCmd.Flags().BoolVar(&reworkFlags.begin, "begin", true, "begin rework") reworkCmd.Flags().MarkHidden("begin") reworkCmd.Flags().BoolVar(&reworkFlags.finish, "finish", false, "validate and finish rework") reworkCmd.Flags().BoolVar(&reworkFlags.abort, "abort", false, "abort rework") reworkCmd.Flags().BoolVarP(&reworkFlags.force, "force", "f", false, "when finishing, force finish rework, regardless of validation") reworkCmd.Flags().BoolVar(&reworkFlags.validate, "validate", false, "validate rework") reworkCmd.Flags().BoolVar(&reworkFlags.rContinue, "continue", false, "continue rework") reworkCmd.Flags().BoolVar(&reworkFlags.skip, "skip", false, "skip rework step") reworkCmd.Flags().BoolVar(&reworkFlags.auto, "auto", false, "attempt to automatically complete rework") reworkCmd.Flags().BoolVarP(&reworkFlags.all, "all", "a", false, "specify all patchsets for rework") reworkCmd.Flags().StringSliceVarP(&reworkFlags.patchsets, "patchset", "p", nil, "specify individual patchset for rework") } func argsRework(*cobra.Command, []string) error { return nil } func runRework(cmd *cobra.Command, args []string) { var c *rework.Command var err error switch { case reworkFlags.finish: reworkFlags.auto = true c, err = rework.NewFinishCommand(reworkFlags.force) case reworkFlags.abort: c, err = rework.NewAbortCommand() case reworkFlags.skip: c, err = rework.NewSkipCommand() case reworkFlags.validate: c, err = rework.NewValidateCommand() case reworkFlags.rContinue: c, err = rework.NewContinueCommand() case reworkFlags.begin: targets := []rework.TargetSelector{rework.FloatingTargets{}} if reworkFlags.all { targets = append(targets, rework.AllTargets{}) } else if len(reworkFlags.patchsets) > 0 { for _, p := range reworkFlags.patchsets { targets = append(targets, rework.PatchsetTarget{Name: p}) } } c, err = rework.NewBeginCommand(targets...) default: log.Exitf("No operation specified") } if err != nil { log.Exitf("Rework failed: %v", err) } if reworkFlags.auto { err = c.ExecuteAll() } else { err = c.Execute() } if err != nil { log.Errorf("Rework failed: %v", err) } if err = c.Save(); err != nil { log.Exitf("Failed to save rework state: %v", err) } }
google/kilt
pkg/cmd/kilt/rework.go
GO
apache-2.0
4,097
/* * Copyright 2014-2015 Nikos Grammatikos * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://raw.githubusercontent.com/nikosgram13/OglofusProtection/master/LICENSE * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package me.nikosgram.oglofus.protection; import com.google.common.base.Optional; import com.sk89q.intake.argument.ArgumentException; import com.sk89q.intake.argument.ArgumentParseException; import com.sk89q.intake.argument.CommandArgs; import com.sk89q.intake.parametric.ProvisionException; import me.nikosgram.oglofus.protection.api.ActionResponse; import me.nikosgram.oglofus.protection.api.CommandExecutor; import me.nikosgram.oglofus.protection.api.entity.User; import me.nikosgram.oglofus.protection.api.message.MessageType; import me.nikosgram.oglofus.protection.api.region.ProtectionRank; import me.nikosgram.oglofus.protection.api.region.ProtectionRegion; import me.nikosgram.oglofus.protection.api.region.ProtectionStaff; import org.apache.commons.lang3.ClassUtils; import org.spongepowered.api.entity.player.Player; import org.spongepowered.api.service.user.UserStorage; import org.spongepowered.api.util.command.CommandSource; import javax.annotation.Nullable; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; public class OglofusProtectionStaff implements ProtectionStaff { private final List<User> staff = new ArrayList<User>(); private final Map<UUID, ProtectionRank> ranks = new HashMap<UUID, ProtectionRank>(); private final User owner; private final ProtectionRegion region; private final OglofusSponge sponge; protected OglofusProtectionStaff(ProtectionRegion region, OglofusSponge sponge) { this.region = region; this.sponge = sponge; owner = sponge.getUserManager().getUser(UUID.fromString(sponge.connector.getString( "oglofus_regions", "uuid", region.getUuid().toString(), "owner" ).get())).get(); Map<String, String> staff = sponge.connector.getStringMap( "oglofus_regions", "uuid", region.getUuid().toString(), new String[]{"player", "rank"} ); for (String uid : staff.keySet()) { UUID uuid = UUID.fromString(uid); this.staff.add(sponge.getUserManager().getUser(uuid).get()); ranks.put(uuid, ProtectionRank.valueOf(staff.get(uid))); } } @Override public UUID getOwnerUuid() { return owner.getUuid(); } @Override public User getOwner() { return owner; } @Override @SuppressWarnings("unchecked") public <T> Optional<T> getOwnerAs(Class<T> tClass) { if (ClassUtils.isAssignable(tClass, Player.class)) { return (Optional<T>) sponge.server.getPlayer(owner.getUuid()); } else if (ClassUtils.isAssignable(tClass, User.class)) { UserStorage storage; if ((storage = sponge.game.getServiceManager().provide(UserStorage.class).orNull()) != null) { return (Optional<T>) storage.get(owner.getUuid()).orNull(); } } return Optional.absent(); } @Override @SuppressWarnings("unchecked") public <T> Collection<T> getOfficersAs(Class<T> tClass) { List<T> returned = new ArrayList<T>(); if (ClassUtils.isAssignable(tClass, Player.class)) { for (UUID uuid : getOfficersUuid()) { Player player; if ((player = sponge.server.getPlayer(uuid).orNull()) != null) { returned.add((T) player); } } } return returned; } @Override public Collection<UUID> getOfficersUuid() { List<UUID> returned = new ArrayList<UUID>(); for (User user : getOfficers()) { returned.add(user.getUuid()); } return returned; } @Override public Collection<User> getOfficers() { List<User> returned = new ArrayList<User>(); for (User user : this) { if (ranks.get(user.getUuid()).equals(ProtectionRank.Officer)) { returned.add(user); } } return returned; } @Override @SuppressWarnings("unchecked") public <T> Collection<T> getMembersAs(Class<T> tClass) { List<T> returned = new ArrayList<T>(); if (ClassUtils.isAssignable(tClass, Player.class)) { for (UUID uuid : getMembersUuid()) { Player player; if ((player = sponge.server.getPlayer(uuid).orNull()) != null) { returned.add((T) player); } } } return returned; } @Override public Collection<UUID> getMembersUuid() { List<UUID> returned = new ArrayList<UUID>(); for (User user : getMembers()) { returned.add(user.getUuid()); } return returned; } @Override public Collection<User> getMembers() { List<User> returned = new ArrayList<User>(); for (User user : this) { if (ranks.get(user.getUuid()).equals(ProtectionRank.Member)) { returned.add(user); } } return returned; } @Override @SuppressWarnings("unchecked") public <T> Collection<T> getStaffAs(Class<T> tClass) { List<T> returned = new ArrayList<T>(); if (ClassUtils.isAssignable(tClass, Player.class)) { for (User user : this) { Player player; if ((player = sponge.server.getPlayer(user.getUuid()).orNull()) != null) { returned.add((T) player); } } } return returned; } @Override public Collection<UUID> getStaffUuid() { Collection<UUID> returned = new ArrayList<UUID>(); for (User user : this) { returned.add(user.getUuid()); } return returned; } @Override public boolean isOwner(UUID target) { return owner.getUuid().equals(target); } @Override public boolean isOwner(User target) { return owner.getUuid().equals(target.getUuid()); } @Override public boolean isOfficer(UUID target) { return ranks.containsKey(target) && ranks.get(target).equals(ProtectionRank.Officer); } @Override public boolean isOfficer(User target) { return ranks.containsKey(target.getUuid()) && ranks.get(target.getUuid()).equals(ProtectionRank.Officer); } @Override public boolean isMember(UUID target) { return ranks.containsKey(target) && ranks.get(target).equals(ProtectionRank.Member); } @Override public boolean isMember(User target) { return ranks.containsKey(target.getUuid()) && ranks.get(target.getUuid()).equals(ProtectionRank.Member); } @Override public boolean isStaff(UUID target) { return ranks.containsKey(target); } @Override public boolean isStaff(User target) { return ranks.containsKey(target.getUuid()); } @Override public boolean hasOwnerAccess(UUID target) { return isOwner(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.owner"); } @Override public boolean hasOwnerAccess(User target) { return isOwner(target) || target.hasPermission("oglofus.protection.bypass.owner"); } @Override public boolean hasOfficerAccess(UUID target) { return isOfficer(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.officer"); } @Override public boolean hasOfficerAccess(User target) { return isOfficer(target) || target.hasPermission("oglofus.protection.bypass.officer"); } @Override public boolean hasMemberAccess(UUID target) { return isMember(target) || sponge.getUserManager().getUser(target).get().hasPermission("oglofus.protection.bypass.officer"); } @Override public boolean hasMemberAccess(User target) { return isMember(target) || target.hasPermission("oglofus.protection.bypass.member"); } @Override public ProtectionRank getRank(UUID target) { return ranks.containsKey(target) ? ranks.get(target) : ProtectionRank.None; } @Override public ProtectionRank getRank(User target) { return ranks.containsKey(target.getUuid()) ? ranks.get(target.getUuid()) : ProtectionRank.None; } @Override public void broadcast(String message) { broadcast(MessageType.CHAT, message); } @Override public void broadcast(String message, ProtectionRank rank) { broadcast(MessageType.CHAT, message, rank); } @Override public void broadcast(MessageType type, String message) { for (User user : this) { user.sendMessage(type, message); } } @Override public void broadcast(MessageType type, String message, ProtectionRank rank) { switch (rank) { case Member: for (User user : getMembers()) { user.sendMessage(type, message); } break; case Officer: for (User user : getOfficers()) { user.sendMessage(type, message); } break; case Owner: owner.sendMessage(type, message); break; } } @Override public void broadcastRaw(Object message) { for (User user : this) { user.sendMessage(message); } } @Override public void broadcastRaw(Object message, ProtectionRank rank) { switch (rank) { case Member: for (User user : getMembers()) { user.sendMessage(message); } break; case Officer: for (User user : getOfficers()) { user.sendMessage(message); } break; case Owner: owner.sendMessage(message); break; } } @Override public void broadcastRaw(MessageType type, Object message) { throw new UnsupportedOperationException("Not supported yet."); } @Override public void broadcastRaw(MessageType type, Object message, ProtectionRank rank) { throw new UnsupportedOperationException("Not supported yet."); } @Override public ActionResponse reFlag() { //TODO: make it. return null; } @Override public ActionResponse invite(Object sender, UUID target) { return sponge.getUserManager().invite(sender, target, region); } @Override public ActionResponse invite(CommandExecutor sender, UUID target) { return null; } @Override public ActionResponse invite(Object sender, User target) { return null; } @Override public ActionResponse invite(CommandExecutor sender, User target) { return null; } @Override public ActionResponse invite(UUID target) { return sponge.getUserManager().invite(target, region); } @Override public ActionResponse invite(User target) { return null; } @Override public ActionResponse kick(Object sender, UUID target) { if (sender instanceof CommandSource) { if (sender instanceof Player) { if (region.getProtectionStaff().hasOwnerAccess(((Player) sender).getUniqueId())) { //TODO: call the handler PlayerKickHandler. return kick(target); } return ActionResponse.Failure.setMessage("access"); } if (((CommandSource) sender).hasPermission("oglofus.protection.bypass")) { return kick(target); } return ActionResponse.Failure.setMessage("access"); } return ActionResponse.Failure.setMessage("object"); } @Override public ActionResponse kick(CommandExecutor sender, UUID target) { return null; } @Override public ActionResponse kick(Object sender, User target) { return null; } @Override public ActionResponse kick(CommandExecutor sender, User target) { return null; } @Override public ActionResponse kick(UUID target) { //TODO: call the handler PlayerKickHandler. return null; } @Override public ActionResponse kick(User target) { return null; } @Override public ActionResponse promote(Object sender, UUID target) { return null; } @Override public ActionResponse promote(CommandExecutor sender, UUID target) { return null; } @Override public ActionResponse promote(Object sender, User target) { return null; } @Override public ActionResponse promote(CommandExecutor sender, User target) { return null; } @Override public ActionResponse promote(UUID target) { return null; } @Override public ActionResponse promote(User target) { return null; } @Override public ActionResponse demote(Object sender, UUID target) { return null; } @Override public ActionResponse demote(CommandExecutor sender, UUID target) { return null; } @Override public ActionResponse demote(Object sender, User target) { return null; } @Override public ActionResponse demote(CommandExecutor sender, User target) { return null; } @Override public ActionResponse demote(UUID target) { return null; } @Override public ActionResponse demote(User target) { return null; } @Override public ActionResponse changeRank(Object sender, UUID target, ProtectionRank rank) { return null; } @Override public ActionResponse changeRank(CommandExecutor sender, UUID target, ProtectionRank rank) { return null; } @Override public ActionResponse changeRank(Object sender, User target, ProtectionRank rank) { return null; } @Override public ActionResponse changeRank(CommandExecutor sender, User target, ProtectionRank rank) { return null; } @Override public ActionResponse changeRank(UUID target, ProtectionRank rank) { return null; } @Override public ActionResponse changeRank(User target, ProtectionRank rank) { return null; } @Override public Iterator<User> iterator() { return staff.iterator(); } @Override public boolean isProvided() { return false; } @Nullable @Override public User get(CommandArgs arguments, List<? extends Annotation> modifiers) throws ArgumentException, ProvisionException { String name = arguments.next(); Optional<User> user = sponge.getUserManager().getUser(name); if (user.isPresent() && isStaff(user.get())) { return user.get(); } else { throw new ArgumentParseException(String.format("I can't find the Staff with name '%s'.", name)); } } @Override public List<String> getSuggestions(String prefix) { List<String> returned = new ArrayList<String>(); for (User user : this) { if (user.getName().startsWith(prefix)) { returned.add(user.getName()); } } return returned; } }
Oglofus/OglofusProtection
sponge/src/main/java/me/nikosgram/oglofus/protection/OglofusProtectionStaff.java
Java
apache-2.0
16,214
package commons; import org.makagiga.commons.ConfigFile; import org.makagiga.test.AbstractEnumTest; import org.makagiga.test.Test; import org.makagiga.test.TestMethod; import org.makagiga.test.Tester; @Test(className = ConfigFile.Format.class) public final class TestConfigFile_Format extends AbstractEnumTest<ConfigFile.Format> { // public public TestConfigFile_Format() { super( ConfigFile.Format.values(), ConfigFile.Format.DESKTOP, ConfigFile.Format.INI ); } @Test public void test_commons() { for (final ConfigFile.Format i : ConfigFile.Format.values()) { assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { i.validateGroup(null); } } ); assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { i.validateGroup(""); } } ); assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { i.validateKey(null); } } ); assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { i.validateKey(""); } } ); } final String LONG_VALUE = "AZaz09-"; final String SHORT_VALUE = "X"; // DESKTOP ConfigFile.Format f = ConfigFile.Format.DESKTOP; assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { ConfigFile.Format.DESKTOP.validateGroup("["); } } ); assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { ConfigFile.Format.DESKTOP.validateGroup("]"); } } ); assert f.validateGroup(SHORT_VALUE) == SHORT_VALUE; assert f.validateGroup(LONG_VALUE) == LONG_VALUE; assertIllegalArgumentException(new Tester.Code() { public void run() throws Throwable { ConfigFile.Format.DESKTOP.validateKey("="); } } ); assert f.validateKey(SHORT_VALUE) == SHORT_VALUE; assert f.validateKey(LONG_VALUE) == LONG_VALUE; f.validateGroup(" "); f.validateGroup("Foo Bar"); // INI f = ConfigFile.Format.INI; assert f.validateGroup(SHORT_VALUE) == SHORT_VALUE; assert f.validateGroup(LONG_VALUE) == LONG_VALUE; assert f.validateKey(SHORT_VALUE) == SHORT_VALUE; assert f.validateKey(LONG_VALUE) == LONG_VALUE; } @Test( methods = @TestMethod(name = "equals", parameters = "String, String") ) public void test_equals() { ConfigFile.Format f; f = ConfigFile.Format.DESKTOP; assert f.equals("foo", "foo"); assert !f.equals("foo", "FOO"); f = ConfigFile.Format.INI; assert f.equals("foo", "foo"); assert f.equals("foo", "FOO"); } @Test( methods = @TestMethod(name = "escape", parameters = "String") ) public void test_escape() { assertNull(ConfigFile.Format.escape(null)); assertEmpty(ConfigFile.Format.escape("")); assertEquals("\\tFoo\\sBar\\r\\nBaz\\\\", ConfigFile.Format.escape("\tFoo Bar\r\nBaz\\")); } @Test( methods = @TestMethod(name = "unescape", parameters = "String") ) public void test_unescape() { assertNull(ConfigFile.Format.unescape(null)); assertEmpty(ConfigFile.Format.unescape("")); assertEquals("Foo Bar", ConfigFile.Format.unescape("Foo Bar")); assertEquals("\tFoo Bar\r\nBaz\\", ConfigFile.Format.unescape("\\tFoo\\sBar\\r\\nBaz\\\\")); assertEquals("\n\n \\\\", ConfigFile.Format.unescape("\\n\\n\\s\\s\\\\\\\\")); } @Test( methods = @TestMethod(name = "getComment") ) public void test_getComment() { assert ConfigFile.Format.DESKTOP.getComment().equals("#"); assert ConfigFile.Format.INI.getComment().equals(";"); } @Test( methods = @TestMethod(name = "getEOL") ) public void test_getEOL() { assert ConfigFile.Format.DESKTOP.getEOL().equals("\n"); assert ConfigFile.Format.INI.getEOL().equals("\r\n"); } @Test( methods = @TestMethod(name = "getSuffix") ) public void test_getSuffix() { assert ConfigFile.Format.DESKTOP.getSuffix().equals(".desktop"); assert ConfigFile.Format.INI.getSuffix().equals(".ini"); } @Test( methods = @TestMethod(name = "isCaseSensitive") ) public void test_isCaseSensitive() { assert ConfigFile.Format.DESKTOP.isCaseSensitive(); assert !ConfigFile.Format.INI.isCaseSensitive(); } }
stuffer2325/Makagiga
test/src/commons/TestConfigFile_Format.java
Java
apache-2.0
4,186
package org.apache.rave.portal.service.impl; import org.apache.rave.model.ExcercicesHasTrainingPlan; import org.apache.rave.model.Serie; import org.apache.rave.model.TrainingPlan; import org.apache.rave.portal.repository.ExcercicesHasTrainingPlanRepository; import org.apache.rave.portal.repository.SerieRepository; import org.apache.rave.portal.repository.TrainingPlanRepository; import org.apache.rave.portal.service.TrainingPlanService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.ArrayList; import java.util.Collection; /** * Created by fhernandez on 23/09/14. */ @Service public class DefaultTrainingPlanService implements TrainingPlanService { private final Logger logger = LoggerFactory.getLogger(DefaultTrainingPlanService.class); private final TrainingPlanRepository trainingPlanRepository; private final ExcercicesHasTrainingPlanRepository exercisesHasTrainingPlanRepository; private final SerieRepository serieRepository; @Autowired public DefaultTrainingPlanService(TrainingPlanRepository trainingPlanRepository,ExcercicesHasTrainingPlanRepository exercisesHasTrainingPlanRepository,SerieRepository serieRepository) { this.trainingPlanRepository = trainingPlanRepository; this.exercisesHasTrainingPlanRepository = exercisesHasTrainingPlanRepository; this.serieRepository = serieRepository; } @Override @Transactional public TrainingPlan getById(Long trainingPlanId) { TrainingPlan trainingPlan =trainingPlanRepository.getById(trainingPlanId); if(trainingPlan!=null) { trainingPlan.getExercisesHasTrainingplans().size(); } return trainingPlan; } @Transactional public TrainingPlan save(TrainingPlan newPlan) { Collection<ExcercicesHasTrainingPlan> exerciseList=newPlan.getExercisesHasTrainingplans(); try { if(newPlan.getEntityId()==null) { newPlan = trainingPlanRepository.save(newPlan); } for (ExcercicesHasTrainingPlan exerciseHasTraining : exerciseList) { Serie serie = serieRepository.save(exerciseHasTraining.getSerie()); exerciseHasTraining.setSerie(serie); exerciseHasTraining.setSerieId(serie.getEntityId()); exerciseHasTraining.setTrainingplanId(newPlan.getEntityId()); exerciseHasTraining.setTrainingPlan(newPlan); } exercisesHasTrainingPlanRepository.saveList(exerciseList); }catch(Exception e){ logger.error("Exception saving plan " + e); } return newPlan; } public Collection<TrainingPlan> getByTrainerID(Long trainerId){ return trainingPlanRepository.getByTrainerID(trainerId); } }
lletsica/my_test_repo
rave-components/rave-core/src/main/java/org/apache/rave/portal/service/impl/DefaultTrainingPlanService.java
Java
apache-2.0
2,990
/* =========================================================================== Copyright 2002-2010 Martin Dvorak Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========================================================================== */ package com.mindcognition.mindraider.ui.swing.trash; import java.awt.BorderLayout; import java.awt.GridLayout; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusEvent; import java.util.HashMap; import javax.swing.JButton; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JToolBar; import javax.swing.JTree; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeExpansionListener; import javax.swing.event.TreeModelEvent; import javax.swing.event.TreeModelListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.event.TreeWillExpandListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.ExpandVetoException; import javax.swing.tree.MutableTreeNode; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import com.emental.mindraider.core.MindRaider; import com.emental.mindraider.core.rest.Metadata; import com.emental.mindraider.core.rest.ResourceDescriptor; import com.emental.mindraider.core.rest.resource.FolderResource; import com.emental.mindraider.core.rest.resource.OutlineResource; import com.emental.mindraider.ui.dialogs.ProgressDialogJFrame; import com.emental.mindraider.ui.gfx.IconsRegistry; import com.mindcognition.mindraider.application.model.label.LabelCustodianListener; import com.mindcognition.mindraider.l10n.Messages; import com.mindcognition.mindraider.ui.swing.dialogs.RestoreNotebookJDialog; import com.mindcognition.mindraider.ui.swing.explorer.ExplorerJPanel; import com.mindcognition.mindraider.utils.SwingWorker; public class TrashJPanel extends JPanel implements TreeWillExpandListener, TreeExpansionListener, LabelCustodianListener { private static final Logger logger = Logger.getLogger(TrashJPanel.class); public static final int LEVEL_ROOT = 0; public static final int LEVEL_FOLDERS = 1; public static final int LEVEL_NOTEBOOKS = 2; /* * UI components */ protected DefaultMutableTreeNode rootNode; protected DefaultTreeModel treeModel; protected final JTree tree; protected JButton undoButton, emptyButton, deleteButton; private Toolkit toolkit = Toolkit.getDefaultToolkit(); /* * model */ private HashMap treeNodeToResourceUriMap; /* * singleton */ private static TrashJPanel singleton; public static TrashJPanel getInstance() { if (singleton == null) { singleton = new TrashJPanel(); } return singleton; } private ResourceDescriptor[] discardedNotebooksDescriptors; /** * Constructor. */ private TrashJPanel() { treeNodeToResourceUriMap = new HashMap(); rootNode = new DefaultMutableTreeNode(Messages.getString("TrashJPanel.notebookArchive")); treeModel = new DefaultTreeModel(rootNode); treeModel.addTreeModelListener(new MyTreeModelListener()); tree = new JTree(treeModel); tree.setEditable(false); tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION); tree.addTreeExpansionListener(this); tree.addTreeWillExpandListener(this); tree.setShowsRootHandles(true); tree.putClientProperty("JTree.lineStyle", "Angled"); // tree rendered // TODO implement own renderer in order to tooltips tree.setCellRenderer(new TrashTreeCellRenderer(IconsRegistry .getImageIcon("trashFull.png"), IconsRegistry .getImageIcon("explorerNotebookIcon.png"))); setLayout(new BorderLayout()); // control panel JToolBar tp = new JToolBar(); tp.setLayout(new GridLayout(1, 6)); undoButton = new JButton("", IconsRegistry .getImageIcon("trashUndo.png")); undoButton.setEnabled(false); undoButton.setToolTipText("Restore Outline"); undoButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree .getLastSelectedPathComponent(); if (node == null) { return; } new RestoreNotebookJDialog( (String)treeNodeToResourceUriMap.get(node), "Restore Outline", "Restore", true); } }); tp.add(undoButton); deleteButton = new JButton("", IconsRegistry .getImageIcon("explorerDeleteSmall.png")); deleteButton.setToolTipText("Delete Outline"); deleteButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree .getLastSelectedPathComponent(); if (node == null) { return; } int result = JOptionPane.showConfirmDialog( MindRaider.mainJFrame, "Do you really want to DELETE this Outline?", "Delete Outline", JOptionPane.YES_NO_OPTION); if (result == JOptionPane.YES_OPTION) { MindRaider.labelCustodian .deleteOutline((String) treeNodeToResourceUriMap .get(node)); refresh(); ExplorerJPanel.getInstance().refresh(); } } }); tp.add(deleteButton); emptyButton = new JButton("", IconsRegistry .getImageIcon("trashEmpty.png")); emptyButton.setToolTipText(Messages.getString("TrashJPanel.emptyArchive")); emptyButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { int result = JOptionPane .showConfirmDialog( MindRaider.mainJFrame, "Do you really want to DELETE all discarded Outlines?", "Empty Trash", JOptionPane.YES_NO_OPTION); if (result == JOptionPane.YES_OPTION) { final SwingWorker worker = new SwingWorker() { public Object construct() { ProgressDialogJFrame progressDialogJFrame = new ProgressDialogJFrame( "Empty Trash", "<html><br>&nbsp;&nbsp;<b>Deleting:</b>&nbsp;&nbsp;</html>"); try { ResourceDescriptor[] resourceDescriptors = MindRaider.labelCustodian .getDiscardedOutlineDescriptors(); if (resourceDescriptors != null) { for (int i = 0; i < resourceDescriptors.length; i++) { MindRaider.labelCustodian .deleteOutline(resourceDescriptors[i] .getUri()); } refresh(); } } finally { if (progressDialogJFrame != null) { progressDialogJFrame.dispose(); } } return null; } }; worker.start(); } } }); tp.add(emptyButton); add(tp, BorderLayout.NORTH); // add the tree JScrollPane scrollPane = new JScrollPane(tree); add(scrollPane); // build the whole tree buildTree(); // click handler tree.addTreeSelectionListener(new TreeSelectionListener() { public void valueChanged(TreeSelectionEvent e) { DefaultMutableTreeNode node = (DefaultMutableTreeNode) tree .getLastSelectedPathComponent(); if (node == null) { return; } logger.debug("Tree selection path: " + node.getPath()[node.getLevel()]); enableDisableToolbarButtons(node.getLevel()); } }); } /** * Build tree. This method is called on startup and tree refresh in order to * reload disc content. Adding/removing of particular nodes during the * program run is performed on individual nodes. */ void buildTree() { discardedNotebooksDescriptors = MindRaider.labelCustodian .getDiscardedOutlineDescriptors(); if (!ArrayUtils.isEmpty(discardedNotebooksDescriptors)) { for (int i = 0; i < discardedNotebooksDescriptors.length; i++) { addDiscardedNotebookNode(discardedNotebooksDescriptors[i] .getLabel(), discardedNotebooksDescriptors[i].getUri()); } // now expland all rows for (int i = 0; i < tree.getRowCount(); i++) { tree.expandRow(i); } } tree.setSelectionRow(0); enableDisableToolbarButtons(0); } /** * Add discarded notebook node. * * @param uri * notebook node. * @return the node. */ public DefaultMutableTreeNode addDiscardedNotebookNode(String label, String uri) { DefaultMutableTreeNode parent = null; Object child = label; DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(child); // store node to map to be able to get URI from node object treeNodeToResourceUriMap.put(childNode, uri); if (parent == null) { parent = rootNode; } treeModel.insertNodeInto(childNode, parent, parent.getChildCount()); return childNode; } /** * Call this method in order to update the tree. */ public void refresh() { clear(); buildTree(); } /** * Move notebook up in the folder. * * @param notebookUri * @param folderUri */ protected boolean moveNotebookUp(String folderUri, String notebookUri) { logger.debug(" moveNotebookUp: " + folderUri + " " + notebookUri); if (folderUri != null && notebookUri != null) { try { // add notebook to folder boolean result = MindRaider.labelCustodian.moveNotebookUp( folderUri, notebookUri); // TODO PERFORMANCE move it just in the tree instead of refresh refresh(); return result; } catch (Exception e1) { logger.error("moveNotebookUp(String, String)", e1); JOptionPane.showMessageDialog(TrashJPanel.this, "Outline Manipulation Error", "Unable to move outline up: " + e1.getMessage(), JOptionPane.ERROR_MESSAGE); return false; } } logger.debug("Outline wont be added URIs are null!"); return false; } /** * Move notebook down in the folder. * * @param notebookUri * @param folderUri */ protected boolean moveNotebookDown(String folderUri, String notebookUri) { logger.debug(" moveNotebookDown: " + folderUri + " " + notebookUri); if (folderUri != null && notebookUri != null) { try { boolean result = MindRaider.labelCustodian.moveNotebookDown( folderUri, notebookUri); // TODO PERFORMANCE move it just in the tree instead of refresh refresh(); return result; } catch (Exception e1) { logger.error("moveNotebookDown(String, String)", e1); JOptionPane.showMessageDialog(TrashJPanel.this, "Outline Manipulation Error", "Unable to move outline down: " + e1.getMessage(), JOptionPane.ERROR_MESSAGE); return false; } } logger.debug("Outline wont be added URIs are null!"); return false; } /** * Add notebook node to folder node (on new notebook creation). * * @param notebookUri * newly created notebook URI. */ public void addNotebookToFolder(String notebookUri) { logger.debug(" URI of created notebook is: " + notebookUri); if (notebookUri != null) { // add notebook to selected folder TreePath treePath = tree.getSelectionPath(); String folderUri = (String) treeNodeToResourceUriMap.get(treePath .getLastPathComponent()); logger.debug("Enclosing folder URI is: " + folderUri); if (folderUri != null) { try { // add notebook to folder MindRaider.labelCustodian.addOutline(folderUri, notebookUri); // now add it in the tree OutlineResource notebookResource = MindRaider.outlineCustodian .getActiveOutlineResource(); addNotebookNode((DefaultMutableTreeNode) treePath .getLastPathComponent(), notebookResource.resource .getMetadata().getUri().toASCIIString(), notebookResource.getLabel()); } catch (Exception e1) { logger.error("addNotebookToFolder(String)", e1); JOptionPane.showMessageDialog(TrashJPanel.this, "Outline Creation Error", "Unable to add Outline to folder: " + e1.getMessage(), JOptionPane.ERROR_MESSAGE); return; } } } else { logger .debug("Outline wont be added to folder - it's URI is null!"); } } /** * Remove all nodes except the root node. */ public void clear() { rootNode.removeAllChildren(); treeModel.reload(); treeNodeToResourceUriMap.clear(); } /** * Remove the currently selected node. */ public void removeCurrentNode() { TreePath currentSelection = tree.getSelectionPath(); if (currentSelection != null) { DefaultMutableTreeNode currentNode = (DefaultMutableTreeNode) (currentSelection .getLastPathComponent()); MutableTreeNode parent = (MutableTreeNode) (currentNode.getParent()); if (parent != null) { treeModel.removeNodeFromParent(currentNode); return; } } // Either there was no selection, or the root was selected. toolkit.beep(); } /** * Add child to the currently selected node. */ public DefaultMutableTreeNode addObject(Object child) { DefaultMutableTreeNode parentNode = null; TreePath parentPath = tree.getSelectionPath(); if (parentPath == null) { parentNode = rootNode; } else { parentNode = (DefaultMutableTreeNode) (parentPath .getLastPathComponent()); } return addObject(parentNode, child, true); } public DefaultMutableTreeNode addObject(DefaultMutableTreeNode parent, Object child) { return addObject(parent, child, false); } /** * Add folder node. * * @param uri * folder URI. * @return the node. */ public DefaultMutableTreeNode addFolderNode(String uri) { DefaultMutableTreeNode parent = null; // get label from URI FolderResource resource = new FolderResource(MindRaider.labelCustodian .get(uri)); Object child = resource.getLabel(); DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(child); // store node to map to be able to get URI from node object treeNodeToResourceUriMap.put(childNode, uri); if (parent == null) { parent = rootNode; } treeModel.insertNodeInto(childNode, parent, parent.getChildCount()); return childNode; } /** * Add notebook node. * * @param parent * folder node. * @param uri * notebook URI. * @param label * notebook label. * @return the node. */ public DefaultMutableTreeNode addNotebookNode( DefaultMutableTreeNode parent, String uri, String label) { Object child = label; DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(child); // store node to map to be able to get URI from node object treeNodeToResourceUriMap.put(childNode, uri); if (parent == null) { parent = rootNode; } treeModel.insertNodeInto(childNode, parent, parent.getChildCount()); return childNode; } /** * Add an child object to a parent object. * * @param parent * the parent object. * @param child * the child object. * @param shouldBeVisible * if <code>true</code> the object should be visible. * @return Returns a <code>DefaultMutableTreeNode</code> */ public DefaultMutableTreeNode addObject(DefaultMutableTreeNode parent, Object child, boolean shouldBeVisible) { DefaultMutableTreeNode childNode = new DefaultMutableTreeNode(child); if (parent == null) { parent = rootNode; } treeModel.insertNodeInto(childNode, parent, parent.getChildCount()); // Make sure the user can see the lovely new node. if (shouldBeVisible) { tree.scrollPathToVisible(new TreePath(childNode.getPath())); } return childNode; } /** * Custom MyTreeModelListerer class. */ class MyTreeModelListener implements TreeModelListener { /** * Logger for this class. */ private final Logger logger = Logger .getLogger(MyTreeModelListener.class); /** * @see javax.swing.event.TreeModelListener#treeNodesChanged(javax.swing.event.TreeModelEvent) */ public void treeNodesChanged(TreeModelEvent e) { DefaultMutableTreeNode node; node = (DefaultMutableTreeNode) (e.getTreePath() .getLastPathComponent()); /* * If the event lists children, then the changed node is the child * of the node we've already gotten. Otherwise, the changed node and * the specified node are the same. */ // ToDo try { int index = e.getChildIndices()[0]; node = (DefaultMutableTreeNode) (node.getChildAt(index)); } catch (NullPointerException exc) { // } logger.debug("The user has finished editing the node."); logger.debug("New value: " + node.getUserObject()); } public void treeNodesInserted(TreeModelEvent e) { } public void treeNodesRemoved(TreeModelEvent e) { } public void treeStructureChanged(TreeModelEvent e) { } } public void treeCollapsed(TreeExpansionEvent e) { logger.debug("Tree colapsed event..." + e.getPath()); } /** * @see javax.swing.event.TreeExpansionListener#treeExpanded(javax.swing.event.TreeExpansionEvent) */ public void treeExpanded(TreeExpansionEvent e) { logger.debug("Tree expanded event..." + e.getPath()); } /** * @see javax.swing.event.TreeWillExpandListener#treeWillCollapse(javax.swing.event.TreeExpansionEvent) */ public void treeWillCollapse(TreeExpansionEvent e) throws ExpandVetoException { logger.debug("Tree will collapse " + e.getPath()); } /** * @see javax.swing.event.TreeWillExpandListener#treeWillExpand(javax.swing.event.TreeExpansionEvent) */ public void treeWillExpand(TreeExpansionEvent e) throws ExpandVetoException { logger.debug("Tree will expand " + e.getPath()); /* * DefaultMutableTreeNode node = (DefaultMutableTreeNode) * tree.getLastSelectedPathComponent(); if (node == null) { return; } * logger.debug(""+node.getPath()[node.getLevel()]); // buttons * disabling switch(node.getLevel()) { case LEVEL_FOLDERS: // disconnect * childrens from the node Enumeration enumeration=node.children(); // * delete nodes itself while (enumeration.hasMoreElements()) { Object * object=enumeration.nextElement(); * treeNodeToResourceUriMap.remove(object); * treeModel.removeNodeFromParent((MutableTreeNode)object); } // get * folder URI logger.debug("Expanding folder: * "+treeNodeToResourceUriMap.get(node)); FolderResource folder =new * FolderResource(MindRaider.folderCustodian.get((String)treeNodeToResourceUriMap.get(node))); * String[] notebookUris=folder.getNotebookUris(); if (notebookUris != * null) { for (int i= 0; i < notebookUris.length; i++) { * NotebookResource notebook=new * NotebookResource(MindRider.notebookCustodian.get(notebookUris[i])); * addNotebookNode(node,notebook.resource.metadata.uri.toASCIIString(),notebook.getLabel()); } } } */ } /** * @see com.emental.LabelCustodianListener.folder.FolderCustodianListener#folderCreated() */ public void labelCreated(FolderResource folder) { Metadata meta = folder.getResource().getMetadata(); logger.debug("Folder created: " + meta.getUri().toASCIIString()); // handle creation of the folder addFolderNode(meta.getUri().toASCIIString()); } /** * @see java.awt.event.FocusListener#focusGained(java.awt.event.FocusEvent) */ public void focusGained(FocusEvent arg0) { // TODO Auto-generated method stub } /** * Change status in the toolbar buttons. * * @param level * The level could be <code>LEVEL_ROOT</code> or * <code>LEVEL_FOLDERS</code> */ protected void enableDisableToolbarButtons(int level) { // buttons disabling switch (level) { case LEVEL_ROOT: undoButton.setEnabled(false); deleteButton.setEnabled(false); emptyButton.setEnabled(true); break; case LEVEL_FOLDERS: undoButton.setEnabled(true); deleteButton.setEnabled(true); emptyButton.setEnabled(true); break; } } private static final long serialVersionUID = 5028293540089775890L; }
dvorka/mindraider
mr7/src/main/java/com/mindcognition/mindraider/ui/swing/trash/TrashJPanel.java
Java
apache-2.0
24,481
package com.fpliu.newton.ui.list; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.GridView; /** * @author 792793182@qq.com 2017-06-30. */ public interface IGrid<T, V extends GridView> extends ICommon<T> { V getGridView(); void setItemAdapter(ItemAdapter<T> itemAdapter); ItemAdapter<T> getItemAdapter(); void setOnItemClickListener(AdapterView.OnItemClickListener listener); int getItemViewTypeCount(); int getItemViewType(int position); View getItemView(int position, View convertView, ViewGroup parent); void notifyDataSetChanged(); void setNumColumns(int numColumns); }
leleliu008/Android-List
library/src/main/java/com/fpliu/newton/ui/list/IGrid.java
Java
apache-2.0
688
/******************************************************************************* * Copyright (c) 2012-2013 University of Stuttgart. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and the Apache License 2.0 which both accompany this distribution, * and are available at http://www.eclipse.org/legal/epl-v10.html * and http://www.apache.org/licenses/LICENSE-2.0 * * Contributors: * Oliver Kopp - initial API and implementation *******************************************************************************/ /** * This package contains the REST resources * * Mostly, they produces Viewables, where a JSP and the current resource is * passed As the JSP itself handles plain Java objects and not Responses, the * resources have also methods returning POJOs. This might be ugly design, but * was quick to implement. * * The package structure is mirrored in src/main/webapp/jsp to ease finding the * JSPs belonging to a resource. * * The resources are <em>not</em> in line with the resource model of the TOSCA * container. Especially, we do not employ HATEOAS here. */ package org.eclipse.winery.repository.resources;
YannicSowoidnich/winery
org.eclipse.winery.repository/src/main/java/org/eclipse/winery/repository/resources/package-info.java
Java
apache-2.0
1,234
/** * Package: MAG - VistA Imaging WARNING: Per VHA Directive 2004-038, this routine should not be modified. Date Created: Jul 10, 2012 Site Name: Washington OI Field Office, Silver Spring, MD Developer: VHAISWWERFEJ Description: ;; +--------------------------------------------------------------------+ ;; Property of the US Government. ;; No permission to copy or redistribute this software is given. ;; Use of unreleased versions of this software requires the user ;; to execute a written test agreement with the VistA Imaging ;; Development Office of the Department of Veterans Affairs, ;; telephone (301) 734-0100. ;; ;; The Food and Drug Administration classifies this software as ;; a Class II medical device. As such, it may not be changed ;; in any way. Modifications to this software may result in an ;; adulterated medical device under 21CFR820, the use of which ;; is considered to be a violation of US Federal Statutes. ;; +--------------------------------------------------------------------+ */ package gov.va.med.imaging.pathology.rest.translator; import java.util.Date; import org.junit.Test; import static org.junit.Assert.*; /** * @author VHAISWWERFEJ * */ public class PathologyRestTranslatorTest { @Test public void testDateTranslation() { try { Date date = PathologyRestTranslator.translateDate("201207101435"); System.out.println("Date: " + date); } catch(Exception ex) { ex.printStackTrace(); fail(ex.getMessage()); } } }
VHAINNOVATIONS/Telepathology
Source/Java/PathologyWebApp/main/test/java/gov/va/med/imaging/pathology/rest/translator/PathologyRestTranslatorTest.java
Java
apache-2.0
1,621
/* * Trap * An anti-pryer server for better privacy * * This file is a part of Trap project * * Copyright 2016 Rain Lee <raincious@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package logger import ( "github.com/raincious/trap/trap/core/types" "bufio" "fmt" "time" ) type FilePrinter struct { writer *bufio.Writer writeCounts uint16 } func NewFilePrinter(w *bufio.Writer) (*FilePrinter, *types.Throw) { _, writeErr := w.Write([]byte("")) if writeErr != nil { return nil, types.ConvertError(writeErr) } return &FilePrinter{ writer: w, }, nil } func (l *FilePrinter) save(w types.String, c types.String, t time.Time, m types.String) { _, err := l.writer.WriteString(fmt.Sprintf("<%s> %s [%s]: %s\r\n", w, c, t.Format(time.StampMilli), m)) if err != nil { panic(fmt.Errorf("Can't write log file due to error: %s", err)) } l.writeCounts += 1 if l.writeCounts > 10 { l.writer.Flush() l.writeCounts = 0 } } func (l *FilePrinter) Info(c types.String, t time.Time, m types.String) { l.save("INF", c, t, m) } func (l *FilePrinter) Debug(c types.String, t time.Time, m types.String) { l.save("DBG", c, t, m) } func (l *FilePrinter) Warning(c types.String, t time.Time, m types.String) { l.save("WRN", c, t, m) } func (l *FilePrinter) Error(c types.String, t time.Time, m types.String) { l.save("ERR", c, t, m) } func (l *FilePrinter) Print(c types.String, t time.Time, m types.String) { l.save("DEF", c, t, m) }
raincious/trap
trap/logger/file.go
GO
apache-2.0
1,990
define([ 'jquery', 'underscore', 'backbone', 'app' ], function ( $, _, Backbone, app ) { var Models = {}, Collections = {}, Views = {}; Models.Project = Backbone.Model.extend(); Collections.Projects = Backbone.Model.extend({ model: Models.Project, url: function() { return app.api('projects/' + this.get('platform') + (this.get('uri') ? '/' + this.get('uri') : '')); }, parse: function(res) { return { projects: res }; } }); Models.Project = Backbone.Model.extend({ url: function() { return app.api('projects?' + this.get('params')); } }); return { Models: Models, Collections: Collections, Views: Views }; });
Redmart/os-mobilizer
frontend/source/js/modules/Projects.js
JavaScript
apache-2.0
666
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Train a ResNet-50 model on ImageNet on TPU.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import csv import os import re import sys import time from absl import app from absl import flags import tensorflow.compat.v1 as tf # For Cloud environment, add parent directory for imports sys.path.append(os.path.dirname(os.path.abspath(sys.path[0]))) from official.resnet import imagenet_input # pylint: disable=g-import-not-at-top from official.resnet import resnet_main from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver from tensorflow.contrib import tpu as contrib_tpu from tensorflow.python.estimator import estimator FLAGS = tf.flags.FLAGS CKPT_PATTERN = r'model\.ckpt-(?P<gs>[0-9]+)\.data' flags.DEFINE_string( 'data_dir_small', default=None, help=('The directory where the resized (160x160) ImageNet input data is ' 'stored. This is only to be used in conjunction with the ' 'resnet_benchmark.py script.')) flags.DEFINE_bool( 'use_fast_lr', default=False, help=('Enabling this uses a faster learning rate schedule along with ' 'different image sizes in the input pipeline. This is only to be ' 'used in conjunction with the resnet_benchmark.py script.')) # Number of training and evaluation images in the standard ImageNet dataset NUM_TRAIN_IMAGES = 1281167 NUM_EVAL_IMAGES = 50000 def main(unused_argv): tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver( FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project) config = contrib_tpu.RunConfig( cluster=tpu_cluster_resolver, model_dir=FLAGS.model_dir, save_checkpoints_steps=FLAGS.iterations_per_loop, keep_checkpoint_max=None, tpu_config=contrib_tpu.TPUConfig( iterations_per_loop=FLAGS.iterations_per_loop, num_shards=FLAGS.num_cores, per_host_input_for_training=contrib_tpu.InputPipelineConfig.PER_HOST_V2)) # pylint: disable=line-too-long # Input pipelines are slightly different (with regards to shuffling and # preprocessing) between training and evaluation. imagenet_train = imagenet_input.ImageNetInput( is_training=True, data_dir=FLAGS.data_dir, use_bfloat16=True, transpose_input=FLAGS.transpose_input) imagenet_eval = imagenet_input.ImageNetInput( is_training=False, data_dir=FLAGS.data_dir, use_bfloat16=True, transpose_input=FLAGS.transpose_input) if FLAGS.use_fast_lr: resnet_main.LR_SCHEDULE = [ # (multiplier, epoch to start) tuples (1.0, 4), (0.1, 21), (0.01, 35), (0.001, 43) ] imagenet_train_small = imagenet_input.ImageNetInput( is_training=True, image_size=128, data_dir=FLAGS.data_dir_small, num_parallel_calls=FLAGS.num_parallel_calls, use_bfloat16=True, transpose_input=FLAGS.transpose_input, cache=True) imagenet_eval_small = imagenet_input.ImageNetInput( is_training=False, image_size=128, data_dir=FLAGS.data_dir_small, num_parallel_calls=FLAGS.num_parallel_calls, use_bfloat16=True, transpose_input=FLAGS.transpose_input, cache=True) imagenet_train_large = imagenet_input.ImageNetInput( is_training=True, image_size=288, data_dir=FLAGS.data_dir, num_parallel_calls=FLAGS.num_parallel_calls, use_bfloat16=True, transpose_input=FLAGS.transpose_input) imagenet_eval_large = imagenet_input.ImageNetInput( is_training=False, image_size=288, data_dir=FLAGS.data_dir, num_parallel_calls=FLAGS.num_parallel_calls, use_bfloat16=True, transpose_input=FLAGS.transpose_input) resnet_classifier = contrib_tpu.TPUEstimator( use_tpu=FLAGS.use_tpu, model_fn=resnet_main.resnet_model_fn, config=config, train_batch_size=FLAGS.train_batch_size, eval_batch_size=FLAGS.eval_batch_size) if FLAGS.mode == 'train': current_step = estimator._load_global_step_from_checkpoint_dir(FLAGS.model_dir) # pylint: disable=protected-access,line-too-long batches_per_epoch = NUM_TRAIN_IMAGES / FLAGS.train_batch_size tf.logging.info('Training for %d steps (%.2f epochs in total). Current' ' step %d.' % (FLAGS.train_steps, FLAGS.train_steps / batches_per_epoch, current_step)) start_timestamp = time.time() # This time will include compilation time # Write a dummy file at the start of training so that we can measure the # runtime at each checkpoint from the file write time. tf.gfile.MkDir(FLAGS.model_dir) if not tf.gfile.Exists(os.path.join(FLAGS.model_dir, 'START')): with tf.gfile.GFile(os.path.join(FLAGS.model_dir, 'START'), 'w') as f: f.write(str(start_timestamp)) if FLAGS.use_fast_lr: small_steps = int(18 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size) normal_steps = int(41 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size) large_steps = int(min(50 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size, FLAGS.train_steps)) resnet_classifier.train( input_fn=imagenet_train_small.input_fn, max_steps=small_steps) resnet_classifier.train( input_fn=imagenet_train.input_fn, max_steps=normal_steps) resnet_classifier.train( input_fn=imagenet_train_large.input_fn, max_steps=large_steps) else: resnet_classifier.train( input_fn=imagenet_train.input_fn, max_steps=FLAGS.train_steps) else: assert FLAGS.mode == 'eval' start_timestamp = tf.gfile.Stat( os.path.join(FLAGS.model_dir, 'START')).mtime_nsec results = [] eval_steps = NUM_EVAL_IMAGES // FLAGS.eval_batch_size ckpt_steps = set() all_files = tf.gfile.ListDirectory(FLAGS.model_dir) for f in all_files: mat = re.match(CKPT_PATTERN, f) if mat is not None: ckpt_steps.add(int(mat.group('gs'))) ckpt_steps = sorted(list(ckpt_steps)) tf.logging.info('Steps to be evaluated: %s' % str(ckpt_steps)) for step in ckpt_steps: ckpt = os.path.join(FLAGS.model_dir, 'model.ckpt-%d' % step) batches_per_epoch = NUM_TRAIN_IMAGES // FLAGS.train_batch_size current_epoch = step // batches_per_epoch if FLAGS.use_fast_lr: if current_epoch < 18: eval_input_fn = imagenet_eval_small.input_fn if current_epoch >= 18 and current_epoch < 41: eval_input_fn = imagenet_eval.input_fn if current_epoch >= 41: # 41: eval_input_fn = imagenet_eval_large.input_fn else: eval_input_fn = imagenet_eval.input_fn end_timestamp = tf.gfile.Stat(ckpt + '.index').mtime_nsec elapsed_hours = (end_timestamp - start_timestamp) / (1e9 * 3600.0) tf.logging.info('Starting to evaluate.') eval_start = time.time() # This time will include compilation time eval_results = resnet_classifier.evaluate( input_fn=eval_input_fn, steps=eval_steps, checkpoint_path=ckpt) eval_time = int(time.time() - eval_start) tf.logging.info('Eval results: %s. Elapsed seconds: %d' % (eval_results, eval_time)) results.append([ current_epoch, elapsed_hours, '%.2f' % (eval_results['top_1_accuracy'] * 100), '%.2f' % (eval_results['top_5_accuracy'] * 100), ]) time.sleep(60) with tf.gfile.GFile(os.path.join(FLAGS.model_dir, 'results.tsv'), 'wb') as tsv_file: # pylint: disable=line-too-long writer = csv.writer(tsv_file, delimiter='\t') writer.writerow(['epoch', 'hours', 'top1Accuracy', 'top5Accuracy']) writer.writerows(results) if __name__ == '__main__': tf.logging.set_verbosity(tf.logging.INFO) app.run(main)
tensorflow/tpu
models/official/resnet/benchmark/resnet_benchmark.py
Python
apache-2.0
8,651
<?php require_once "db_config.php"; session_start(); if(!array_key_exists('id', $_SESSION)) { header("location : index.php"); } ?> <!DOCTYPE html> <!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]--> <!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]--> <!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]--> <!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]--> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <title></title> <meta name="description" content=""> <meta name="viewport" content="width=device-width"> <link rel="stylesheet" href="css/bootstrap.min.css"> <style> body { padding-top: 60px; padding-bottom: 40px; } </style> <link rel="stylesheet" href="css/bootstrap-responsive.min.css"> <link rel="stylesheet" href="css/main.css"> <script src="js/vendor/modernizr-2.6.2-respond-1.1.0.min.js"></script> </head> <body> <!--[if lt IE 7]> <p class="chromeframe">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> or <a href="http://www.google.com/chromeframe/?redirect=true">activate Google Chrome Frame</a> to improve your experience.</p> <![endif]--> <!-- This code is taken from http://twitter.github.com/bootstrap/examples/hero.html --> <!-- <div class="navbar navbar-inverse navbar-fixed-top"> <div class="navbar-inner"> <div class="container"> <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse"> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </a> <a class="brand" href="#">Project name</a> <div class="nav-collapse collapse"> <ul class="nav"> <li class="active"><a href="#">Home</a></li> <li><a href="#about">About</a></li> <li><a href="#contact">Contact</a></li> <li class="dropdown"> <a href="#" class="dropdown-toggle" data-toggle="dropdown">Dropdown <b class="caret"></b></a> <ul class="dropdown-menu"> <li><a href="#">Action</a></li> <li><a href="#">Another action</a></li> <li><a href="#">Something else here</a></li> <li class="divider"></li> <li class="nav-header">Nav header</li> <li><a href="#">Separated link</a></li> <li><a href="#">One more separated link</a></li> </ul> </li> </ul> <form class="navbar-form pull-right" action='login.php' method='post'> <input class="span2" type="text" placeholder="user" name='user' id='user'> <input class="span2" type="password" placeholder="password" name='password' id='password'> <input type='submit'class="btn" name='Submit' value='Submit' /> </form> </div><!--/.nav-collapse </div> </div> </div> --> <?php require_once "menu_func.php"; ?> <div class="container"> <!-- Main hero unit for a primary marketing message or call to action --> <div class="hero-unit"> <h1>Добре дошъл <?php echo $_SESSION['user']; ?></h1> <p>Да по едитнем малко ? </p> </div> <!-- Example row of columns --> <div class="row"> <div class="span4"> <h2>Добавяне на Меню</h2> <p> <form class="navbar-form pull-right" action="addmenu.php" method="post"> <input class="span2" type="text" placeholder="Име" name="menu_name" id="menu_name"><br /> <input class="span2" type="text" placeholder="Линк" name="menu_addr" id="menu_addr"><br /> <select name="menu_type" id="menu_type" > <option value ="top">Меню</option> <option value ="dropdown">Drop</option> <option value ="usermenu">User</option> </select> <br /> <input type="submit"class="btn" name="Submit" value="Submit" /> </form> </p> </div> <!-- <div class="span4"> <h2>Heading</h2> <p>Donec id elit non mi porta gravida at eget metus. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus. Etiam porta sem malesuada magna mollis euismod. Donec sed odio dui. </p> <p><a class="btn" href="#">View details &raquo;</a></p> </div> <div class="span4"> <h2>Heading</h2> <p>Donec sed odio dui. Cras justo odio, dapibus ac facilisis in, egestas eget quam. Vestibulum id ligula porta felis euismod semper. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.</p> <p><a class="btn" href="#">View details &raquo;</a></p> </div> --> </div> <hr> <footer> <p>&copy; Company 2012</p> </footer> </div> <!-- /container --> <script src="//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script> <script>window.jQuery || document.write('<script src="js/vendor/jquery-1.9.1.min.js"><\/script>')</script> <script src="js/vendor/bootstrap.min.js"></script> <script src="js/main.js"></script> <script> var _gaq=[['_setAccount','UA-XXXXX-X'],['_trackPageview']]; (function(d,t){var g=d.createElement(t),s=d.getElementsByTagName(t)[0]; g.src=('https:'==location.protocol?'//ssl':'//www')+'.google-analytics.com/ga.js'; s.parentNode.insertBefore(g,s)}(document,'script')); </script> </body> </html>
PuloV/homepage
admin/acp.php
PHP
apache-2.0
6,900
package pl.matisoft.soy.config; import com.google.template.soy.jssrc.SoyJsSrcOptions; import com.google.template.soy.tofu.SoyTofuOptions; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.web.context.support.ServletContextResource; import org.springframework.web.servlet.ViewResolver; import pl.matisoft.soy.ContentNegotiator; import pl.matisoft.soy.DefaultContentNegotiator; import pl.matisoft.soy.SoyTemplateViewResolver; import pl.matisoft.soy.bundle.DefaultSoyMsgBundleResolver; import pl.matisoft.soy.bundle.SoyMsgBundleResolver; import pl.matisoft.soy.compile.DefaultTofuCompiler; import pl.matisoft.soy.compile.TofuCompiler; import pl.matisoft.soy.data.DefaultToSoyDataConverter; import pl.matisoft.soy.data.ToSoyDataConverter; import pl.matisoft.soy.data.adjust.ModelAdjuster; import pl.matisoft.soy.data.adjust.SpringModelAdjuster; import pl.matisoft.soy.global.compile.CompileTimeGlobalModelResolver; import pl.matisoft.soy.global.compile.EmptyCompileTimeGlobalModelResolver; import pl.matisoft.soy.global.runtime.EmptyGlobalRuntimeModelResolver; import pl.matisoft.soy.global.runtime.GlobalRuntimeModelResolver; import pl.matisoft.soy.holder.CompiledTemplatesHolder; import pl.matisoft.soy.holder.DefaultCompiledTemplatesHolder; import pl.matisoft.soy.locale.LocaleProvider; import pl.matisoft.soy.locale.SpringLocaleProvider; import pl.matisoft.soy.render.DefaultTemplateRenderer; import pl.matisoft.soy.render.TemplateRenderer; import pl.matisoft.soy.template.DefaultTemplateFilesResolver; import pl.matisoft.soy.template.TemplateFilesResolver; import javax.inject.Inject; import javax.servlet.ServletContext; /** * Created with IntelliJ IDEA. * User: mati * Date: 12/11/2013 * Time: 19:55 */ @Configuration public class SpringSoyViewBaseConfig { @Value("${soy.hot.reload.mode:false}") private boolean hotReloadMode; @Value("${soy.templates.resolve.recursively:true}") private boolean recursive; @Value("${soy.templates.file.extension:soy}") private String fileExtension; @Value("${soy.templates.directory:/WEB-INF/templates}") private String templatesPath; @Value("${soy.i18n.xliff.path:xliffs/messages}") private String messagesPath; @Value("${soy.encoding:utf-8}") private String encoding; @Value("${soy.i18n.fallback.to.english:true}") private boolean fallbackToEnglish; @Value("${soy.preCompile.templates:false}") private boolean preCompileTemplates; @Value("${soy.indexView:index}") private String indexView; @Value("${soy.logical.prefix:soy:}") private String logicalPrefix; @Value("${soy.resolver.order:2147483647}") private int order; @Inject private ServletContext servletContext; @Bean public LocaleProvider soyLocaleProvider() { return new SpringLocaleProvider(); } @Bean public DefaultTemplateFilesResolver soyTemplateFilesResolver() throws Exception { final DefaultTemplateFilesResolver defaultTemplateFilesResolver = new DefaultTemplateFilesResolver(); defaultTemplateFilesResolver.setHotReloadMode(hotReloadMode); defaultTemplateFilesResolver.setRecursive(recursive); defaultTemplateFilesResolver.setFilesExtension(fileExtension); defaultTemplateFilesResolver.setTemplatesLocation(new ServletContextResource(servletContext, templatesPath)); return defaultTemplateFilesResolver; } @Bean public CompileTimeGlobalModelResolver soyCompileTimeGlobalModelResolver() { return new EmptyCompileTimeGlobalModelResolver(); } @Bean public ToSoyDataConverter soyToSoyDataConverter() { return new DefaultToSoyDataConverter(); } @Bean public SoyJsSrcOptions soyJsSourceOptions() { return new SoyJsSrcOptions(); } @Bean public SoyTofuOptions soyTofuOptions() { final SoyTofuOptions soyTofuOptions = new SoyTofuOptions(); soyTofuOptions.setUseCaching(!hotReloadMode); return soyTofuOptions; } @Bean public TofuCompiler soyTofuCompiler(final CompileTimeGlobalModelResolver compileTimeGlobalModelResolver, final SoyJsSrcOptions soyJsSrcOptions, final SoyTofuOptions soyTofuOptions) { final DefaultTofuCompiler defaultTofuCompiler = new DefaultTofuCompiler(); defaultTofuCompiler.setHotReloadMode(hotReloadMode); defaultTofuCompiler.setCompileTimeGlobalModelResolver(compileTimeGlobalModelResolver); defaultTofuCompiler.setSoyJsSrcOptions(soyJsSrcOptions); defaultTofuCompiler.setSoyTofuOptions(soyTofuOptions); return defaultTofuCompiler; } @Bean public SoyMsgBundleResolver soyMsgBundleResolver() { final DefaultSoyMsgBundleResolver defaultSoyMsgBundleResolver = new DefaultSoyMsgBundleResolver(); defaultSoyMsgBundleResolver.setHotReloadMode(hotReloadMode); defaultSoyMsgBundleResolver.setMessagesPath(messagesPath); defaultSoyMsgBundleResolver.setFallbackToEnglish(fallbackToEnglish); return defaultSoyMsgBundleResolver; } @Bean public CompiledTemplatesHolder soyTemplatesHolder(final TemplateFilesResolver templateFilesResolver, final TofuCompiler tofuCompiler) throws Exception { final DefaultCompiledTemplatesHolder defaultCompiledTemplatesHolder = new DefaultCompiledTemplatesHolder(); defaultCompiledTemplatesHolder.setHotReloadMode(hotReloadMode); defaultCompiledTemplatesHolder.setPreCompileTemplates(preCompileTemplates); defaultCompiledTemplatesHolder.setTemplatesFileResolver(templateFilesResolver); defaultCompiledTemplatesHolder.setTofuCompiler(tofuCompiler); return defaultCompiledTemplatesHolder; } @Bean public TemplateRenderer soyTemplateRenderer(final ToSoyDataConverter toSoyDataConverter) { final DefaultTemplateRenderer defaultTemplateRenderer = new DefaultTemplateRenderer(); defaultTemplateRenderer.setHotReloadMode(hotReloadMode); defaultTemplateRenderer.setToSoyDataConverter(toSoyDataConverter); return defaultTemplateRenderer; } @Bean public ModelAdjuster soySpringModelAdjuster() { return new SpringModelAdjuster(); } @Bean public GlobalRuntimeModelResolver soyGlobalRuntimeModelResolver() { return new EmptyGlobalRuntimeModelResolver(); } @Bean public ContentNegotiator contentNegotiator() { return new DefaultContentNegotiator(); } @Bean public ViewResolver soyViewResolver(final CompiledTemplatesHolder compiledTemplatesHolder, final ModelAdjuster modelAdjuster, final TemplateRenderer templateRenderer, final LocaleProvider localeProvider, final GlobalRuntimeModelResolver globalRuntimeModelResolver, final ContentNegotiator contentNegotiator, final SoyMsgBundleResolver msgBundleResolver) throws Exception { final SoyTemplateViewResolver soyTemplateViewResolver = new SoyTemplateViewResolver(); soyTemplateViewResolver.setSoyMsgBundleResolver(msgBundleResolver); soyTemplateViewResolver.setCompiledTemplatesHolder(compiledTemplatesHolder); soyTemplateViewResolver.setEncoding(encoding); soyTemplateViewResolver.setGlobalRuntimeModelResolver(globalRuntimeModelResolver); soyTemplateViewResolver.setHotReloadMode(hotReloadMode); soyTemplateViewResolver.setIndexView(indexView); soyTemplateViewResolver.setLocaleProvider(localeProvider); soyTemplateViewResolver.setModelAdjuster(modelAdjuster); soyTemplateViewResolver.setTemplateRenderer(templateRenderer); soyTemplateViewResolver.setPrefix(logicalPrefix); soyTemplateViewResolver.setOrder(order); soyTemplateViewResolver.setRedirectContextRelative(true); soyTemplateViewResolver.setRedirectHttp10Compatible(true); soyTemplateViewResolver.setContentNegotiator(contentNegotiator); return soyTemplateViewResolver; } }
matiwinnetou/spring-soy-view
spring-soy-view/src/main/java/pl/matisoft/soy/config/SpringSoyViewBaseConfig.java
Java
apache-2.0
8,365
from socket import inet_ntoa from struct import pack def calcDottedNetmask(mask): bits = 0 for i in xrange(32 - mask, 32): bits |= (1 << i) packed_value = pack('!I', bits) addr = inet_ntoa(packed_value) return addr
openbmc/openbmc-test-automation
lib/pythonutil.py
Python
apache-2.0
245
//------------------------------------------------------------------------------------- // ExportXmlParser.cpp // // Simple callback non-validating XML parser implementation. // // Microsoft XNA Developer Connection. // Copyright © Microsoft Corporation. All rights reserved. //------------------------------------------------------------------------------------- #include "stdafx.h" #include "ExportXmlParser.h" namespace ATG { //------------------------------------------------------------------------------------- // Name: XMLParser::XMLParser //------------------------------------------------------------------------------------- XMLParser::XMLParser() { m_pWritePtr = m_pWriteBuf; m_pReadPtr = m_pReadBuf; m_pISAXCallback = NULL; m_hFile = INVALID_HANDLE_VALUE; } //------------------------------------------------------------------------------------- // Name: XMLParser::~XMLParser //------------------------------------------------------------------------------------- XMLParser::~XMLParser() { } //------------------------------------------------------------------------------------- // Name: XMLParser::FillBuffer // Desc: Reads a block from the current open file //------------------------------------------------------------------------------------- VOID XMLParser::FillBuffer() { DWORD NChars; m_pReadPtr = m_pReadBuf; if( m_hFile == NULL ) { if( m_uInXMLBufferCharsLeft > XML_READ_BUFFER_SIZE ) NChars = XML_READ_BUFFER_SIZE; else NChars = m_uInXMLBufferCharsLeft; CopyMemory( m_pReadBuf, m_pInXMLBuffer, NChars ); m_uInXMLBufferCharsLeft -= NChars; m_pInXMLBuffer += NChars; } else { ReadFile( m_hFile, m_pReadBuf, XML_READ_BUFFER_SIZE, &NChars, NULL ); } m_dwCharsConsumed += NChars; __int64 iProgress = ( (__int64)m_dwCharsConsumed * 1000 ) / (__int64)m_dwCharsTotal; m_pISAXCallback->SetParseProgress( (DWORD)iProgress ); m_pReadBuf[ NChars ] = '\0'; m_pReadBuf[ NChars + 1] = '\0'; } //------------------------------------------------------------------------------------- // Name: XMLParser::SkipNextAdvance // Desc: Puts the last character read back on the input stream //------------------------------------------------------------------------------------- VOID XMLParser::SkipNextAdvance() { m_bSkipNextAdvance = TRUE; } //------------------------------------------------------------------------------------- // Name: XMLParser::ConsumeSpace // Desc: Skips spaces in the current stream //------------------------------------------------------------------------------------- HRESULT XMLParser::ConsumeSpace() { HRESULT hr; // Skip spaces if( FAILED( hr = AdvanceCharacter() ) ) return hr; while ( ( m_Ch == ' ' ) || ( m_Ch == '\t' ) || ( m_Ch == '\n' ) || ( m_Ch == '\r' ) ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; } SkipNextAdvance(); return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::ConvertEscape // Desc: Copies and converts an escape sequence into m_pWriteBuf //------------------------------------------------------------------------------------- HRESULT XMLParser::ConvertEscape() { HRESULT hr; WCHAR wVal = 0; if( FAILED( hr = AdvanceCharacter() ) ) return hr; // all escape sequences start with &, so ignore the first character if( FAILED( hr = AdvanceCharacter() ) ) return hr; if ( m_Ch == '#' ) // character as hex or decimal { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if ( m_Ch == 'x' ) // hex number { if( FAILED( hr = AdvanceCharacter() ) ) return hr; while ( m_Ch != ';' ) { wVal *= 16; if ( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) ) { wVal += m_Ch - '0'; } else if ( ( m_Ch >= 'a' ) && ( m_Ch <= 'f' ) ) { wVal += m_Ch - 'a' + 10; } else if ( ( m_Ch >= 'A' ) && ( m_Ch <= 'F' ) ) { wVal += m_Ch - 'A' + 10; } else { Error( E_INVALID_XML_SYNTAX, "Expected hex digit as part of &#x escape sequence" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; } } else // decimal number { while ( m_Ch != ';' ) { wVal *= 10; if ( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) ) { wVal += m_Ch - '0'; } else { Error( E_INVALID_XML_SYNTAX, "Expected decimal digit as part of &# escape sequence" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; } } // copy character into the buffer m_Ch = wVal; return S_OK; } // must be an entity reference WCHAR *pEntityRefVal = m_pWritePtr; UINT EntityRefLen; SkipNextAdvance(); if( FAILED( hr = AdvanceName() ) ) return hr; EntityRefLen = (UINT)( m_pWritePtr - pEntityRefVal ); m_pWritePtr = pEntityRefVal; if ( EntityRefLen == 0 ) { Error( E_INVALID_XML_SYNTAX, "Expecting entity name after &" ); return E_INVALID_XML_SYNTAX; } if( !wcsncmp( pEntityRefVal, L"lt", EntityRefLen ) ) wVal = '<'; else if( !wcsncmp( pEntityRefVal, L"gt", EntityRefLen ) ) wVal = '>'; else if( !wcsncmp( pEntityRefVal, L"amp", EntityRefLen ) ) wVal = '&'; else if( !wcsncmp( pEntityRefVal, L"apos", EntityRefLen ) ) wVal = '\''; else if( !wcsncmp( pEntityRefVal, L"quot", EntityRefLen ) ) wVal = '"'; else { Error( E_INVALID_XML_SYNTAX, "Unrecognized entity name after & - (should be lt, gt, amp, apos, or quot)" ); return E_INVALID_XML_SYNTAX; // return false if unrecognized token sequence } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != ';' ) { Error( E_INVALID_XML_SYNTAX, "Expected terminating ; for entity reference" ); return E_INVALID_XML_SYNTAX; // malformed reference - needs terminating ; } m_Ch = wVal; return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceAttrVal // Desc: Copies an attribute value into m_pWrite buf, skipping surrounding quotes //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceAttrVal() { HRESULT hr; WCHAR wQuoteChar; if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( ( m_Ch != '"' ) && ( m_Ch != '\'' ) ) { Error( E_INVALID_XML_SYNTAX, "Attribute values must be enclosed in quotes" ); return E_INVALID_XML_SYNTAX; } wQuoteChar = m_Ch; for( ;; ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; else if( m_Ch == wQuoteChar ) break; else if( m_Ch == '&' ) { SkipNextAdvance(); if( FAILED( hr = ConvertEscape() ) ) return hr; } else if( m_Ch == '<' ) { Error( E_INVALID_XML_SYNTAX, "Illegal character '<' in element tag" ); return E_INVALID_XML_SYNTAX; } // copy character into the buffer if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE ) { Error( E_INVALID_XML_SYNTAX, "Total element tag size may not be more than %d characters", XML_WRITE_BUFFER_SIZE ); return E_INVALID_XML_SYNTAX; } *m_pWritePtr = m_Ch; m_pWritePtr++; } return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceName // Desc: Copies a name into the m_pWriteBuf - returns TRUE on success, FALSE on failure // Ignores leading whitespace. Currently does not support unicode names //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceName() { HRESULT hr; if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( ( ( m_Ch < 'A' ) || ( m_Ch > 'Z' ) ) && ( ( m_Ch < 'a' ) || ( m_Ch > 'z' ) ) && ( m_Ch != '_' ) && ( m_Ch != ':' ) ) { Error( E_INVALID_XML_SYNTAX, "Names must start with an alphabetic character or _ or :" ); return E_INVALID_XML_SYNTAX; } while( ( ( m_Ch >= 'A' ) && ( m_Ch <= 'Z' ) ) || ( ( m_Ch >= 'a' ) && ( m_Ch <= 'z' ) ) || ( ( m_Ch >= '0' ) && ( m_Ch <= '9' ) ) || ( m_Ch == '_' ) || ( m_Ch == ':' ) || ( m_Ch == '-' ) || ( m_Ch == '.' ) ) { if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE ) { Error( E_INVALID_XML_SYNTAX, "Total element tag size may not be more than %d characters", XML_WRITE_BUFFER_SIZE ); return E_INVALID_XML_SYNTAX; } *m_pWritePtr = m_Ch; m_pWritePtr++; if( FAILED( hr = AdvanceCharacter() ) ) return hr; } SkipNextAdvance(); return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceCharacter // Desc: Copies the character at *m_pReadPtr to m_Ch // handling difference in UTF16 / UTF8, and big/little endian // and getting another chunk of the file if needed // Returns S_OK if there are more characters, E_ABORT for no characters to read //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceCharacter( BOOL bOkToFail ) { if( m_bSkipNextAdvance ) { m_bSkipNextAdvance = FALSE; return S_OK; } // If we hit EOF in the middle of a character, // it's ok-- we'll just have a corrupt last character // (the buffer is padded with double NULLs ) if( *m_pReadPtr == '\0' ) { // Read more from the file FillBuffer(); // We are at EOF if it is still NULL if( *m_pReadPtr == '\0' ) { if( !bOkToFail ) { Error( E_INVALID_XML_SYNTAX, "Unexpected EOF while parsing XML file" ); return E_INVALID_XML_SYNTAX; } else { return E_FAIL; } } } if( m_bUnicode == FALSE ) { m_Ch = *((CHAR *)m_pReadPtr); m_pReadPtr++; } else // if( m_bUnicode == TRUE ) { m_Ch = *((WCHAR *)m_pReadPtr); if( m_bReverseBytes ) { m_Ch = ( m_Ch << 8 ) + ( m_Ch >> 8 ); } m_pReadPtr += 2; } if( m_Ch == '\n' ) { m_pISAXCallback->m_LineNum++; m_pISAXCallback->m_LinePos = 0; } else if( m_Ch != '\r' ) m_pISAXCallback->m_LinePos++; return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceElement // Desc: Builds <element> data, calls callback //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceElement() { HRESULT hr; // write ptr at the beginning of the buffer m_pWritePtr = m_pWriteBuf; if( FAILED( hr = AdvanceCharacter() ) ) return hr; // if first character wasn't '<', we wouldn't be here if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch == '!' ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if ( m_Ch == '-' ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != '-' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '-' after '<!-'" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceComment() ) ) return hr; return S_OK; } if( m_Ch != '[' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != 'C' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != 'D' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != 'A' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != 'T' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != 'A' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != '[' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '<![CDATA['" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = AdvanceCDATA() ) ) return hr; } else if( m_Ch == '/' ) { WCHAR *pEntityRefVal = m_pWritePtr; if( FAILED( hr = AdvanceName() ) ) return hr; if( FAILED( m_pISAXCallback->ElementEnd( pEntityRefVal, (UINT) ( m_pWritePtr - pEntityRefVal ) ) ) ) return E_ABORT; if( FAILED( hr = ConsumeSpace() ) ) return hr; if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != '>' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '>' after name for closing entity reference" ); return E_INVALID_XML_SYNTAX; } } else if( m_Ch == '?' ) { // just skip any xml header tag since not really important after identifying character set for( ;; ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if ( m_Ch == '>' ) return S_OK; } } else { XMLAttribute Attributes[ XML_MAX_ATTRIBUTES_PER_ELEMENT ]; UINT NumAttrs; WCHAR *pEntityRefVal = m_pWritePtr; UINT EntityRefLen; NumAttrs = 0; SkipNextAdvance(); // Entity tag if( FAILED( hr = AdvanceName() ) ) return hr; EntityRefLen = (UINT)( m_pWritePtr - pEntityRefVal ); if( FAILED( hr = ConsumeSpace() ) ) return hr; if( FAILED( hr = AdvanceCharacter() ) ) return hr; // read attributes while( ( m_Ch != '>' ) && ( m_Ch != '/' ) ) { SkipNextAdvance(); if ( NumAttrs >= XML_MAX_ATTRIBUTES_PER_ELEMENT ) { Error( E_INVALID_XML_SYNTAX, "Elements may not have more than %d attributes", XML_MAX_ATTRIBUTES_PER_ELEMENT ); return E_INVALID_XML_SYNTAX; } Attributes[ NumAttrs ].strName = m_pWritePtr; // Attribute name if( FAILED( hr = AdvanceName() ) ) return hr; Attributes[ NumAttrs ].NameLen = (UINT)( m_pWritePtr - Attributes[ NumAttrs ].strName ); if( FAILED( hr = ConsumeSpace() ) ) return hr; if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != '=' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '=' character after attribute name" ); return E_INVALID_XML_SYNTAX; } if( FAILED( hr = ConsumeSpace() ) ) return hr; Attributes[ NumAttrs ].strValue = m_pWritePtr; if( FAILED( hr = AdvanceAttrVal() ) ) return hr; Attributes[ NumAttrs ].ValueLen = (UINT)( m_pWritePtr - Attributes[ NumAttrs ].strValue ); ++NumAttrs; if( FAILED( hr = ConsumeSpace() ) ) return hr; if( FAILED( hr = AdvanceCharacter() ) ) return hr; } if( m_Ch == '/' ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if( m_Ch != '>' ) { Error( E_INVALID_XML_SYNTAX, "Expecting '>' after '/' in element tag" ); return E_INVALID_XML_SYNTAX; } if( FAILED( m_pISAXCallback->ElementBegin( pEntityRefVal, EntityRefLen, Attributes, NumAttrs ) ) ) return E_ABORT; if( FAILED( m_pISAXCallback->ElementEnd( pEntityRefVal, EntityRefLen ) ) ) return E_ABORT; } else { if( FAILED( m_pISAXCallback->ElementBegin( pEntityRefVal, EntityRefLen, Attributes, NumAttrs ) ) ) return E_ABORT; } } return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceCDATA // Desc: Read a CDATA section //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceCDATA() { HRESULT hr; WORD wStage = 0; if( FAILED( m_pISAXCallback->CDATABegin() ) ) return E_ABORT; for( ;; ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; *m_pWritePtr = m_Ch; m_pWritePtr++; if( ( m_Ch == ']' ) && ( wStage == 0 ) ) wStage = 1; else if( ( m_Ch == ']' ) && ( wStage == 1 ) ) wStage = 2; else if( ( m_Ch == '>' ) && ( wStage == 2 ) ) { m_pWritePtr -= 3; break; } else wStage = 0; if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE ) { if( FAILED( m_pISAXCallback->CDATAData( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), TRUE ) ) ) return E_ABORT; m_pWritePtr = m_pWriteBuf; } } if( FAILED( m_pISAXCallback->CDATAData( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) ) return E_ABORT; m_pWritePtr = m_pWriteBuf; if( FAILED( m_pISAXCallback->CDATAEnd() ) ) return E_ABORT; return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::AdvanceComment // Desk: Skips over a comment //------------------------------------------------------------------------------------- HRESULT XMLParser::AdvanceComment() { HRESULT hr; WORD wStage; wStage = 0; for( ;; ) { if( FAILED( hr = AdvanceCharacter() ) ) return hr; if (( m_Ch == '-' ) && ( wStage == 0 )) wStage = 1; else if (( m_Ch == '-' ) && ( wStage == 1 )) wStage = 2; else if (( m_Ch == '>' ) && ( wStage == 2 )) break; else wStage = 0; } return S_OK; } //------------------------------------------------------------------------------------- // Name: XMLParser::RegisterSAXCallbackInterface // Desc: Registers callback interface //------------------------------------------------------------------------------------- VOID XMLParser::RegisterSAXCallbackInterface( ISAXCallback *pISAXCallback ) { m_pISAXCallback = pISAXCallback; } //------------------------------------------------------------------------------------- // Name: XMLParser::GetSAXCallbackInterface // Desc: Returns current callback interface //------------------------------------------------------------------------------------- ISAXCallback* XMLParser::GetSAXCallbackInterface() { return m_pISAXCallback; } //------------------------------------------------------------------------------------- // Name: XMLParser::MainParseLoop // Desc: Main Loop to Parse Data - source agnostic //------------------------------------------------------------------------------------- HRESULT XMLParser::MainParseLoop() { BOOL bWhiteSpaceOnly = TRUE; HRESULT hr = S_OK; if( FAILED( m_pISAXCallback->StartDocument() ) ) return E_ABORT; m_pWritePtr = m_pWriteBuf; FillBuffer(); if ( *((WCHAR *) m_pReadBuf ) == 0xFEFF ) { m_bUnicode = TRUE; m_bReverseBytes = FALSE; m_pReadPtr += 2; } else if ( *((WCHAR *) m_pReadBuf ) == 0xFFFE ) { m_bUnicode = TRUE; m_bReverseBytes = TRUE; m_pReadPtr += 2; } else if ( *((WCHAR *) m_pReadBuf ) == 0x003C ) { m_bUnicode = TRUE; m_bReverseBytes = FALSE; } else if ( *((WCHAR *) m_pReadBuf ) == 0x3C00 ) { m_bUnicode = TRUE; m_bReverseBytes = TRUE; } else if ( m_pReadBuf[ 0 ] == 0x3C ) { m_bUnicode = FALSE; m_bReverseBytes = FALSE; } else { Error( E_INVALID_XML_SYNTAX, "Unrecognized encoding (parser does not support UTF-8 language encodings)" ); return E_INVALID_XML_SYNTAX; } for( ;; ) { if( FAILED( AdvanceCharacter( TRUE ) ) ) { if ( ( (UINT) ( m_pWritePtr - m_pWriteBuf ) != 0 ) && ( !bWhiteSpaceOnly ) ) { if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) ) return E_ABORT; bWhiteSpaceOnly = TRUE; } if( FAILED( m_pISAXCallback->EndDocument() ) ) return E_ABORT; return S_OK; } if( m_Ch == '<' ) { if( ( (UINT) ( m_pWritePtr - m_pWriteBuf ) != 0 ) && ( !bWhiteSpaceOnly ) ) { if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf, (UINT)( m_pWritePtr - m_pWriteBuf ), FALSE ) ) ) return E_ABORT; bWhiteSpaceOnly = TRUE; } SkipNextAdvance(); m_pWritePtr = m_pWriteBuf; if( FAILED( hr = AdvanceElement() ) ) return hr; m_pWritePtr = m_pWriteBuf; } else { if( m_Ch == '&' ) { SkipNextAdvance(); if( FAILED( hr = ConvertEscape() ) ) return hr; } if( bWhiteSpaceOnly && ( m_Ch != ' ' ) && ( m_Ch != '\n' ) && ( m_Ch != '\r' ) && ( m_Ch != '\t' ) ) { bWhiteSpaceOnly = FALSE; } *m_pWritePtr = m_Ch; m_pWritePtr++; if( m_pWritePtr - m_pWriteBuf >= XML_WRITE_BUFFER_SIZE ) { if( !bWhiteSpaceOnly ) { if( FAILED( m_pISAXCallback->ElementContent( m_pWriteBuf, ( UINT ) ( m_pWritePtr - m_pWriteBuf ), TRUE ) ) ) { return E_ABORT; } } m_pWritePtr = m_pWriteBuf; bWhiteSpaceOnly = TRUE; } } } } //------------------------------------------------------------------------------------- // Name: XMLParser::ParseXMLFile // Desc: Builds element data //------------------------------------------------------------------------------------- HRESULT XMLParser::ParseXMLFile( CONST CHAR *strFilename ) { HRESULT hr; if( m_pISAXCallback == NULL ) return E_NOINTERFACE; m_pISAXCallback->m_LineNum = 1; m_pISAXCallback->m_LinePos = 0; m_pISAXCallback->m_strFilename = strFilename; // save this off only while we parse the file m_bSkipNextAdvance = FALSE; m_pReadPtr = m_pReadBuf; m_pReadBuf[ 0 ] = '\0'; m_pReadBuf[ 1 ] = '\0'; m_pInXMLBuffer = NULL; m_uInXMLBufferCharsLeft = 0; m_hFile = CreateFile( strFilename, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_FLAG_SEQUENTIAL_SCAN, NULL ); if( m_hFile == INVALID_HANDLE_VALUE ) { Error( E_COULD_NOT_OPEN_FILE, "Error opening file" ); hr = E_COULD_NOT_OPEN_FILE; } else { LARGE_INTEGER iFileSize; GetFileSizeEx( m_hFile, &iFileSize ); m_dwCharsTotal = (DWORD)iFileSize.QuadPart; m_dwCharsConsumed = 0; hr = MainParseLoop(); } // Close the file if( m_hFile != INVALID_HANDLE_VALUE ) CloseHandle( m_hFile ); m_hFile = INVALID_HANDLE_VALUE; // we no longer own strFilename, so un-set it m_pISAXCallback->m_strFilename = NULL; return hr; } //------------------------------------------------------------------------------------- // Name: XMLParser::ParseXMLFile // Desc: Builds element data //------------------------------------------------------------------------------------- HRESULT XMLParser::ParseXMLBuffer( CONST CHAR *strBuffer, UINT uBufferSize ) { HRESULT hr; if( m_pISAXCallback == NULL ) return E_NOINTERFACE; m_pISAXCallback->m_LineNum = 1; m_pISAXCallback->m_LinePos = 0; m_pISAXCallback->m_strFilename = ""; // save this off only while we parse the file m_bSkipNextAdvance = FALSE; m_pReadPtr = m_pReadBuf; m_pReadBuf[ 0 ] = '\0'; m_pReadBuf[ 1 ] = '\0'; m_hFile = NULL; m_pInXMLBuffer = strBuffer; m_uInXMLBufferCharsLeft = uBufferSize; m_dwCharsTotal = m_uInXMLBufferCharsLeft; m_dwCharsConsumed = 0; hr = MainParseLoop(); // we no longer own strFilename, so un-set it m_pISAXCallback->m_strFilename = NULL; return hr; } //------------------------------------------------------------------------------------- // XMLParser::Error() // Logs an error through the callback interface //------------------------------------------------------------------------------------- VOID XMLParser::Error( HRESULT hErr, CONST CHAR* strFormat, ... ) { CONST INT MAX_OUTPUT_STR = 160; CHAR strBuffer[ MAX_OUTPUT_STR ]; va_list pArglist; va_start( pArglist, strFormat ); vsprintf_s( strBuffer, strFormat, pArglist ); m_pISAXCallback->Error( hErr, strBuffer ); va_end( pArglist ); } } // namespace ATG
MorrigansWings/GamePhysics
PlanetSim/DirectX/Samples/C++/Misc/ContentExporter/ExportObjects/ExportXmlParser.cpp
C++
apache-2.0
29,004
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution.testframework.sm.runner; import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil; import com.intellij.execution.testframework.sm.runner.events.*; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.util.*; /** * This class fires events to SMTRunnerEventsListener in event dispatch thread. * * @author: Roman Chernyatchik */ public class GeneralToSMTRunnerEventsConvertor extends GeneralTestEventsProcessor { private final Map<String, SMTestProxy> myRunningTestsFullNameToProxy = ContainerUtil.newConcurrentMap(); private final TestSuiteStack mySuitesStack; private final Map<String, List<SMTestProxy>> myCurrentChildren = new HashMap<>(); private boolean myIsTestingFinished; public GeneralToSMTRunnerEventsConvertor(Project project, @NotNull SMTestProxy.SMRootTestProxy testsRootNode, @NotNull String testFrameworkName) { super(project, testFrameworkName, testsRootNode); mySuitesStack = new TestSuiteStack(testFrameworkName); } @Override protected SMTestProxy createProxy(String testName, String locationHint, String metaInfo, String id, String parentNodeId) { SMTestProxy proxy = super.createProxy(testName, locationHint, metaInfo, id, parentNodeId); SMTestProxy currentSuite = getCurrentSuite(); currentSuite.addChild(proxy); return proxy; } @Override protected SMTestProxy createSuite(String suiteName, String locationHint, String metaInfo, String id, String parentNodeId) { SMTestProxy newSuite = super.createSuite(suiteName, locationHint, metaInfo, id, parentNodeId); final SMTestProxy parentSuite = getCurrentSuite(); parentSuite.addChild(newSuite); mySuitesStack.pushSuite(newSuite); return newSuite; } @Override public void onSuiteTreeEnded(String suiteName) { myBuildTreeRunnables.add(() -> mySuitesStack.popSuite(suiteName)); super.onSuiteTreeEnded(suiteName); } @Override public void onStartTesting() { //fire mySuitesStack.pushSuite(myTestsRootProxy); myTestsRootProxy.setStarted(); //fire fireOnTestingStarted(myTestsRootProxy); } @Override public void onTestsReporterAttached() { fireOnTestsReporterAttached(myTestsRootProxy); } @Override public void onFinishTesting() { fireOnBeforeTestingFinished(myTestsRootProxy); // has been already invoked! // We don't know whether process was destroyed by user // or it finished after all tests have been run // Lets assume, if at finish all suites except root suite are passed // then all is ok otherwise process was terminated by user if (myIsTestingFinished) { // has been already invoked! return; } myIsTestingFinished = true; // We don't know whether process was destroyed by user // or it finished after all tests have been run // Lets assume, if at finish all suites except root suite are passed // then all is ok otherwise process was terminated by user if (!isTreeComplete(myRunningTestsFullNameToProxy.keySet(), myTestsRootProxy)) { myTestsRootProxy.setTerminated(); myRunningTestsFullNameToProxy.clear(); } mySuitesStack.clear(); myTestsRootProxy.setFinished(); myCurrentChildren.clear(); //fire events fireOnTestingFinished(myTestsRootProxy); super.onFinishTesting(); } @Override public void setPrinterProvider(@NotNull TestProxyPrinterProvider printerProvider) { } @Override public void onTestStarted(@NotNull final TestStartedEvent testStartedEvent) { //Duplicated event // creates test // adds to running tests map //Progress started //fire events final String testName = testStartedEvent.getName(); final String locationUrl = testStartedEvent.getLocationUrl(); final boolean isConfig = testStartedEvent.isConfig(); final String fullName = getFullTestName(testName); if (myRunningTestsFullNameToProxy.containsKey(fullName)) { //Duplicated event logProblem("Test [" + fullName + "] has been already started"); if (SMTestRunnerConnectionUtil.isInDebugMode()) { return; } } SMTestProxy parentSuite = getCurrentSuite(); SMTestProxy testProxy = findChild(parentSuite, locationUrl != null ? locationUrl : fullName, false); if (testProxy == null) { // creates test testProxy = new SMTestProxy(testName, false, locationUrl, testStartedEvent.getMetainfo(), false); testProxy.setConfig(isConfig); if (myTreeBuildBeforeStart) testProxy.setTreeBuildBeforeStart(); if (myLocator != null) { testProxy.setLocator(myLocator); } parentSuite.addChild(testProxy); } // adds to running tests map myRunningTestsFullNameToProxy.put(fullName, testProxy); //Progress started testProxy.setStarted(); //fire events fireOnTestStarted(testProxy); } @Override public void onSuiteStarted(@NotNull final TestSuiteStartedEvent suiteStartedEvent) { //new suite //Progress started //fire event final String suiteName = suiteStartedEvent.getName(); final String locationUrl = suiteStartedEvent.getLocationUrl(); SMTestProxy parentSuite = getCurrentSuite(); SMTestProxy newSuite = findChild(parentSuite, locationUrl != null ? locationUrl : suiteName, true); if (newSuite == null) { //new suite newSuite = new SMTestProxy(suiteName, true, locationUrl, suiteStartedEvent.getMetainfo(), parentSuite.isPreservePresentableName()); if (myTreeBuildBeforeStart) { newSuite.setTreeBuildBeforeStart(); } if (myLocator != null) { newSuite.setLocator(myLocator); } parentSuite.addChild(newSuite); } initCurrentChildren(newSuite, true); mySuitesStack.pushSuite(newSuite); //Progress started newSuite.setSuiteStarted(); //fire event fireOnSuiteStarted(newSuite); } private void initCurrentChildren(SMTestProxy newSuite, boolean preferSuite) { if (myTreeBuildBeforeStart) { for (SMTestProxy proxy : newSuite.getChildren()) { if (!proxy.isFinal() || preferSuite && proxy.isSuite()) { String url = proxy.getLocationUrl(); if (url != null) { myCurrentChildren.computeIfAbsent(url, l -> new ArrayList<>()).add(proxy); } myCurrentChildren.computeIfAbsent(proxy.getName(), l -> new ArrayList<>()).add(proxy); } } } } private SMTestProxy findChild(SMTestProxy parentSuite, String fullName, boolean preferSuite) { if (myTreeBuildBeforeStart) { Set<SMTestProxy> acceptedProxies = new LinkedHashSet<>(); Collection<? extends SMTestProxy> children = myCurrentChildren.get(fullName); if (children == null) { initCurrentChildren(parentSuite, preferSuite); children = myCurrentChildren.get(fullName); } if (children != null) { //null if child started second time for (SMTestProxy proxy : children) { if (!proxy.isFinal() || preferSuite && proxy.isSuite()) { acceptedProxies.add(proxy); } } if (!acceptedProxies.isEmpty()) { return acceptedProxies.stream() .filter(proxy -> proxy.isSuite() == preferSuite && proxy.getParent() == parentSuite) .findFirst() .orElse(acceptedProxies.iterator().next()); } } } return null; } @Override public void onTestFinished(@NotNull final TestFinishedEvent testFinishedEvent) { final String testName = testFinishedEvent.getName(); final Long duration = testFinishedEvent.getDuration(); final String fullTestName = getFullTestName(testName); final SMTestProxy testProxy = getProxyByFullTestName(fullTestName); if (testProxy == null) { logProblem("Test wasn't started! TestFinished event: name = {" + testName + "}. " + cannotFindFullTestNameMsg(fullTestName)); return; } testProxy.setDuration(duration != null ? duration : 0); testProxy.setFrameworkOutputFile(testFinishedEvent.getOutputFile()); testProxy.setFinished(); myRunningTestsFullNameToProxy.remove(fullTestName); clearCurrentChildren(fullTestName, testProxy); //fire events fireOnTestFinished(testProxy); } private void clearCurrentChildren(String fullTestName, SMTestProxy testProxy) { myCurrentChildren.remove(fullTestName); String url = testProxy.getLocationUrl(); if (url != null) { myCurrentChildren.remove(url); } } @Override public void onSuiteFinished(@NotNull final TestSuiteFinishedEvent suiteFinishedEvent) { //fire events final String suiteName = suiteFinishedEvent.getName(); final SMTestProxy mySuite = mySuitesStack.popSuite(suiteName); if (mySuite != null) { mySuite.setFinished(); myCurrentChildren.remove(suiteName); String locationUrl = mySuite.getLocationUrl(); if (locationUrl != null) { myCurrentChildren.remove(locationUrl); } //fire events fireOnSuiteFinished(mySuite); } } @Override public void onUncapturedOutput(@NotNull final String text, final Key outputType) { final SMTestProxy currentProxy = findCurrentTestOrSuite(); currentProxy.addOutput(text, outputType); } @Override public void onError(@NotNull final String localizedMessage, @Nullable final String stackTrace, final boolean isCritical) { final SMTestProxy currentProxy = findCurrentTestOrSuite(); currentProxy.addError(localizedMessage, stackTrace, isCritical); } @Override public void onTestFailure(@NotNull final TestFailedEvent testFailedEvent) { // if hasn't been already reported // 1. report // 2. add failure // fire event final String testName = testFailedEvent.getName(); if (testName == null) { logProblem("No test name specified in " + testFailedEvent); return; } final String localizedMessage = testFailedEvent.getLocalizedFailureMessage(); final String stackTrace = testFailedEvent.getStacktrace(); final boolean isTestError = testFailedEvent.isTestError(); final String comparisionFailureActualText = testFailedEvent.getComparisonFailureActualText(); final String comparisionFailureExpectedText = testFailedEvent.getComparisonFailureExpectedText(); final boolean inDebugMode = SMTestRunnerConnectionUtil.isInDebugMode(); final String fullTestName = getFullTestName(testName); SMTestProxy testProxy = getProxyByFullTestName(fullTestName); if (testProxy == null) { logProblem("Test wasn't started! TestFailure event: name = {" + testName + "}" + ", message = {" + localizedMessage + "}" + ", stackTrace = {" + stackTrace + "}. " + cannotFindFullTestNameMsg(fullTestName)); if (inDebugMode) { return; } else { // if hasn't been already reported // 1. report onTestStarted(new TestStartedEvent(testName, null)); // 2. add failure testProxy = getProxyByFullTestName(fullTestName); } } if (testProxy == null) { return; } if (comparisionFailureActualText != null && comparisionFailureExpectedText != null) { testProxy.setTestComparisonFailed(localizedMessage, stackTrace, comparisionFailureActualText, comparisionFailureExpectedText, testFailedEvent); } else if (comparisionFailureActualText == null && comparisionFailureExpectedText == null) { testProxy.setTestFailed(localizedMessage, stackTrace, isTestError); } else { testProxy.setTestFailed(localizedMessage, stackTrace, isTestError); logProblem("Comparison failure actual and expected texts should be both null or not null.\n" + "Expected:\n" + comparisionFailureExpectedText + "\n" + "Actual:\n" + comparisionFailureActualText); } // fire event fireOnTestFailed(testProxy); } @Override public void onTestIgnored(@NotNull final TestIgnoredEvent testIgnoredEvent) { // try to fix // 1. report test opened // 2. report failure // fire event final String testName = testIgnoredEvent.getName(); if (testName == null) { logProblem("TestIgnored event: no name"); } String ignoreComment = testIgnoredEvent.getIgnoreComment(); final String stackTrace = testIgnoredEvent.getStacktrace(); final String fullTestName = getFullTestName(testName); SMTestProxy testProxy = getProxyByFullTestName(fullTestName); if (testProxy == null) { final boolean debugMode = SMTestRunnerConnectionUtil.isInDebugMode(); logProblem("Test wasn't started! " + "TestIgnored event: name = {" + testName + "}, " + "message = {" + ignoreComment + "}. " + cannotFindFullTestNameMsg(fullTestName)); if (debugMode) { return; } else { // try to fix // 1. report test opened onTestStarted(new TestStartedEvent(testName, null)); // 2. report failure testProxy = getProxyByFullTestName(fullTestName); } } if (testProxy == null) { return; } testProxy.setTestIgnored(ignoreComment, stackTrace); // fire event fireOnTestIgnored(testProxy); } @Override public void onTestOutput(@NotNull final TestOutputEvent testOutputEvent) { final String testName = testOutputEvent.getName(); final String text = testOutputEvent.getText(); final Key outputType = testOutputEvent.getOutputType(); final String fullTestName = getFullTestName(testName); final SMTestProxy testProxy = getProxyByFullTestName(fullTestName); if (testProxy == null) { logProblem("Test wasn't started! TestOutput event: name = {" + testName + "}, " + "outputType = " + outputType + ", " + "text = {" + text + "}. " + cannotFindFullTestNameMsg(fullTestName)); return; } testProxy.addOutput(text, outputType); } @Override public void onTestsCountInSuite(final int count) { fireOnTestsCountInSuite(count); } @NotNull protected final SMTestProxy getCurrentSuite() { final SMTestProxy currentSuite = mySuitesStack.getCurrentSuite(); if (currentSuite != null) { return currentSuite; } // current suite shouldn't be null otherwise test runner isn't correct // or may be we are in debug mode logProblem("Current suite is undefined. Root suite will be used."); return myTestsRootProxy; } protected String getFullTestName(final String testName) { // Test name should be unique return testName; } protected int getRunningTestsQuantity() { return myRunningTestsFullNameToProxy.size(); } @Nullable protected SMTestProxy getProxyByFullTestName(final String fullTestName) { return myRunningTestsFullNameToProxy.get(fullTestName); } @TestOnly protected void clearInternalSuitesStack() { mySuitesStack.clear(); } private String cannotFindFullTestNameMsg(String fullTestName) { return "Cant find running test for [" + fullTestName + "]. Current running tests: {" + dumpRunningTestsNames() + "}"; } private StringBuilder dumpRunningTestsNames() { final Set<String> names = myRunningTestsFullNameToProxy.keySet(); final StringBuilder namesDump = new StringBuilder(); for (String name : names) { namesDump.append('[').append(name).append(']').append(','); } return namesDump; } /* * Remove listeners, etc */ @Override public void dispose() { super.dispose(); if (!myRunningTestsFullNameToProxy.isEmpty()) { final Application application = ApplicationManager.getApplication(); if (!application.isHeadlessEnvironment() && !application.isUnitTestMode()) { logProblem("Not all events were processed! " + dumpRunningTestsNames()); } } myRunningTestsFullNameToProxy.clear(); mySuitesStack.clear(); } private SMTestProxy findCurrentTestOrSuite() { //if we can locate test - we will send output to it, otherwise to current test suite SMTestProxy currentProxy = null; Iterator<SMTestProxy> iterator = myRunningTestsFullNameToProxy.values().iterator(); if (iterator.hasNext()) { //current test currentProxy = iterator.next(); if (iterator.hasNext()) { //if there are multiple tests running call put output to the suite currentProxy = null; } } if (currentProxy == null) { //current suite // // ProcessHandler can fire output available event before processStarted event final SMTestProxy currentSuite = mySuitesStack.getCurrentSuite(); currentProxy = currentSuite != null ? currentSuite : myTestsRootProxy; } return currentProxy; } }
msebire/intellij-community
platform/smRunner/src/com/intellij/execution/testframework/sm/runner/GeneralToSMTRunnerEventsConvertor.java
Java
apache-2.0
17,626
package br.copacabana; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import javax.cache.Cache; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.web.servlet.ModelAndView; import br.com.copacabana.cb.entities.Address; import br.com.copacabana.cb.entities.Client; import br.com.copacabana.cb.entities.MealOrder; import br.com.copacabana.cb.entities.OrderedPlate; import br.com.copacabana.cb.entities.Plate; import br.com.copacabana.cb.entities.Restaurant; import br.com.copacabana.cb.entities.TurnType; import br.com.copacabana.cb.entities.WorkingHours.DayOfWeek; import br.copacabana.order.paypal.PayPalProperties.PayPalConfKeys; import br.copacabana.spring.AddressManager; import br.copacabana.spring.ClientManager; import br.copacabana.spring.ConfigurationManager; import br.copacabana.spring.PlateManager; import br.copacabana.spring.RestaurantManager; import br.copacabana.usecase.control.UserActionManager; import br.copacabana.util.TimeController; import com.google.appengine.api.datastore.Key; import com.google.appengine.api.datastore.KeyFactory; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; /** * @author Rafael Coutinho */ public class PlaceOrderController extends JsonViewController { private String formView; private String successView; @Override protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { Map<String, Object> model = new HashMap<String, Object>(); model.put("mode", "view"); try { Cache cache = CacheController.getCache(); if (cache.get(PayPalConfKeys.pppFixedRate.name()) == null) { ConfigurationManager cm = new ConfigurationManager(); cache.put(PayPalConfKeys.pppFixedRate.name(), cm.getConfigurationValue(PayPalConfKeys.pppFixedRate.name())); cache.put(PayPalConfKeys.pppPercentageValue.name(), cm.getConfigurationValue(PayPalConfKeys.pppPercentageValue.name())); } if (!Authentication.isUserLoggedIn(request.getSession())) { String orderData = request.getParameter("orderData"); request.getSession().setAttribute("orderData", orderData); model.put("forwardUrl", "/continueOrder.jsp"); UserActionManager.startOrderNotLogged(orderData, request.getSession().getId()); return new ModelAndView(getFormView(), model); } else { String orderData = ""; JsonObject user = Authentication.getLoggedUser(request.getSession()); String loggedUserId = user.get("entity").getAsJsonObject().get("id").getAsString(); if (request.getParameter("orderData") == null) { orderData = (String) request.getSession().getAttribute("orderData"); } else { orderData = request.getParameter("orderData"); } log.log(Level.INFO, "OrderJSon: {0}", orderData); JsonParser pa = new JsonParser(); JsonObject orderDataJson = (JsonObject) pa.parse(orderData); ClientManager cman = new ClientManager(); Client c = cman.find(KeyFactory.stringToKey(loggedUserId), Client.class); MealOrder mo = getMealOrder(c, orderDataJson); request.getSession().setAttribute("clientPhone", ""); DateSerializer dateSerializer = new DateSerializer(request); DateDeSerializer dateDeSerializer = new DateDeSerializer(request); GsonBuilder gsonBuilder = GsonBuilderFactory.getInstance();// new // GsonBuilder().setPrettyPrinting().serializeNulls().excludeFieldsWithoutExposeAnnotation(); gsonBuilder.registerTypeAdapter(Date.class, dateSerializer); gsonBuilder.registerTypeAdapter(Date.class, dateDeSerializer); gsonBuilder.registerTypeAdapter(Key.class, new KeyDeSerializer()); gsonBuilder.registerTypeAdapter(Key.class, new KeySerializer()); Gson gson = gsonBuilder.create(); model.putAll(updateModelData(mo, c, gson)); String json = gson.toJson(mo); // Or use new json = GsonBuilderFactory.escapeString(json); request.getSession().setAttribute("orderData", json); UserActionManager.startOrder(json, loggedUserId, request.getSession().getId()); return new ModelAndView(getSuccessView(), model); } } catch (Exception e) { log.log(Level.SEVERE, "Failed to place order."); try { String orderData = ""; log.log(Level.SEVERE, "Checking logged user."); JsonObject user = Authentication.getLoggedUser(request.getSession()); if (user == null) { log.log(Level.SEVERE, "user is not logged in."); } String loggedUserId = user.get("entity").getAsJsonObject().get("id").getAsString(); log.log(Level.SEVERE, "logged user id {0}", loggedUserId); if (request.getParameter("orderData") == null) { log.log(Level.SEVERE, "Order is not in request, checking session"); orderData = (String) request.getSession().getAttribute("orderData"); } else { log.log(Level.SEVERE, "Order is in request"); orderData = request.getParameter("orderData"); } if (orderData == null) { log.log(Level.SEVERE, "Order was null!"); } log.log(Level.SEVERE, "Order is order :" + orderData); log.log(Level.SEVERE, "Exception was {0}.", e); log.log(Level.SEVERE, "Error was {0}.", e.getMessage()); UserActionManager.registerMajorError(request, e, loggedUserId, request.getSession().getId(), "placing order"); } catch (Exception ex) { log.log(Level.SEVERE, "Failed during loggin of error was {0}.", e); UserActionManager.registerMajorError(request, e, "placing order 2"); } throw e; } } public static Map<String, Object> updateModelData(MealOrder mo, Client c, Gson gson) { Map<String, Object> model = new HashMap<String, Object>(); RestaurantManager rman = new RestaurantManager(); Restaurant r = rman.getRestaurant(mo.getRestaurant()); Boolean b = r.getOnlyForRetrieval(); if (b != null && true == b) { model.put("onlyForRetrieval", Boolean.TRUE); } else { model.put("onlyForRetrieval", Boolean.FALSE); } model.put("restaurantAddressKey", KeyFactory.keyToString(r.getAddress())); model.put("clientCpf", c.getCpf()); model.put("level", c.getLevel().ordinal()); JsonObject json = new JsonObject(); ConfigurationManager cm = new ConfigurationManager(); String hasSpecificLogic = cm.getConfigurationValue("hasSpecificLogic"); model.put("noTakeAwayOrders", "false"); if (hasSpecificLogic != null && hasSpecificLogic.endsWith("true")) { json = getSteakHouseSpecificData(mo, c, gson); getMakisSpecificLogic(mo, c, gson, json); getPapagaiosSpecificLogic(mo, c, gson, json); getPizzadoroSpecificLogic(mo,c,gson,json); if (noTakeAwayOrders(mo) == true) { model.put("noTakeAwayOrders", "true"); } } model.put("hasSpecificLogic", json.toString()); if (json.get("javascript") != null && json.get("javascript").getAsString().length() > 0) { model.put("hasSpecificLogicJavascript", json.get("javascript").getAsString()); } Address restAddress = new AddressManager().getAddress(r.getAddress()); model.put("restaurantAddress", gson.toJson(restAddress)); return model; } private static boolean noTakeAwayOrders(MealOrder mo) { ConfigurationManager cm = new ConfigurationManager(); String ids = cm.getConfigurationValue("no.takeaway.ids"); String restId = KeyFactory.keyToString(mo.getRestaurant()); if (ids.contains(restId)) { return true; } return false; } private static void getPapagaiosSpecificLogic(MealOrder mo, Client c, Gson gson, JsonObject json) { ConfigurationManager cm = new ConfigurationManager(); String idStr = cm.getConfigurationValue("papagaios.id"); if (idStr != null && idStr.length() > 0) { Key k = KeyFactory.stringToKey(idStr); if (k.equals(mo.getRestaurant())) { json.add("javascript", new JsonPrimitive("/scripts/custom/papagaios.js")); } } } private static void getPizzadoroSpecificLogic(MealOrder mo, Client c, Gson gson, JsonObject json) { ConfigurationManager cm = new ConfigurationManager(); String idStr = cm.getConfigurationValue("pizzadoro.id"); if (idStr != null && idStr.length() > 0) { Key k = KeyFactory.stringToKey(idStr); if (k.equals(mo.getRestaurant())) { json.add("javascript", new JsonPrimitive("/scripts/custom/pizzadoro.js")); } } } private static void getMakisSpecificLogic(MealOrder mo, Client c, Gson gson, JsonObject json) { try { ConfigurationManager cm = new ConfigurationManager(); PlateManager pm = new PlateManager(); String makisIdStr = cm.getConfigurationValue("makis.Id"); if (makisIdStr != null && makisIdStr.length() > 0) { Key makis = KeyFactory.stringToKey(makisIdStr); if (makis != null && makis.equals(mo.getRestaurant())) { String packageId = cm.getConfigurationValue("makis.package.id"); if (packageId != null && packageId.length() > 0) { json.add("makisPackageCostId", new JsonPrimitive(packageId)); json.add("makisMsg", new JsonPrimitive(cm.getConfigurationValue("makis.msg"))); boolean isIncluded = false; Key packageKey = KeyFactory.stringToKey(packageId); for (Iterator<OrderedPlate> iterator = mo.getPlates().iterator(); iterator.hasNext();) { OrderedPlate plate = (OrderedPlate) iterator.next(); if (Boolean.FALSE.equals(plate.getIsFraction()) && plate.getPlate().equals(packageKey)) { isIncluded = true; break; } } if (isIncluded == false) { Plate packagePlate = pm.get(packageKey); OrderedPlate oplate = new OrderedPlate(); oplate.setName(packagePlate.getName()); oplate.setPrice(packagePlate.getPrice()); oplate.setPriceInCents(packagePlate.getPriceInCents()); oplate.setQty(1); oplate.setPlate(packageKey); mo.getPlates().add(oplate); } } } } } catch (Exception e) { log.log(Level.SEVERE, "failed to add makis specific logic", e); } } private static JsonObject getSteakHouseSpecificData(MealOrder mo, Client c, Gson gson) { JsonObject json = new JsonObject(); json.add("freeDelivery", new JsonPrimitive("false")); try { ConfigurationManager cm = new ConfigurationManager(); String steakIdStr = cm.getConfigurationValue("steakHouse.Id"); if (steakIdStr != null && steakIdStr.length() > 0) { Key steak = KeyFactory.stringToKey(steakIdStr); if (steak.equals(mo.getRestaurant())) { if (!TimeController.getDayOfWeek().equals(DayOfWeek.SATURDAY) && !TimeController.getDayOfWeek().equals(DayOfWeek.SUNDAY)) { if (TimeController.getCurrentTurn().equals(TurnType.LUNCH)) { String foodCatsStr = cm.getConfigurationValue("steakHouse.FoodCats"); if (foodCatsStr != null && foodCatsStr.length() > 0) { String[] foodCatsArray = foodCatsStr.split("\\|"); Set<Key> foodCats = new HashSet<Key>(); for (int i = 0; i < foodCatsArray.length; i++) { if (foodCatsArray[i].length() > 0) { foodCats.add(KeyFactory.stringToKey(foodCatsArray[i])); } } List<OrderedPlate> plates = mo.getPlates(); PlateManager pm = new PlateManager(); for (Iterator iterator = plates.iterator(); iterator.hasNext();) { OrderedPlate orderedPlate = (OrderedPlate) iterator.next(); Plate p = null; if (Boolean.TRUE.equals(orderedPlate.getIsFraction())) { p = pm.getPlate(orderedPlate.getFractionPlates().iterator().next()); } else { p = pm.getPlate(orderedPlate.getPlate()); } if (!foodCats.contains(p.getFoodCategory())) { json.add("freeDelivery", new JsonPrimitive("false")); return json; } } json.add("freeDelivery", new JsonPrimitive("true")); json.add("msg", new JsonPrimitive(cm.getConfigurationValue("steakHouse.msg"))); } } } } } } catch (Exception e) { log.log(Level.SEVERE, "Could not set up things for SteakHouse", e); } return json; } public MealOrder getMealOrder(Client c, JsonObject sessionOderData) { MealOrder mo = new MealOrder(); mo.setClient(c); if (c.getContact() != null) { mo.setClientPhone(c.getContact().getPhone()); } mo.setAddress(getAddress(sessionOderData, c)); mo.setObservation(getObservation(sessionOderData)); mo.setRestaurant(getRestKey(sessionOderData)); mo.setPlates(getPlates(sessionOderData)); return mo; } private Key getAddress(JsonObject sessionOderData, Client c) { try { if (sessionOderData.get("address") == null) { if (c.getMainAddress() != null) { return c.getMainAddress(); } else { return null; } } else { if (sessionOderData.get("address") != null && !sessionOderData.get("address").isJsonNull() ) { return KeyFactory.stringToKey(sessionOderData.get("address").getAsString()); }else{ return null; } } } catch (Exception e) { log.log(Level.SEVERE, "no address da sessão havia {0}", sessionOderData.get("address")); log.log(Level.SEVERE, "Error ao buscar endereço de cliente ou em sessão", e); return null; } } public List<OrderedPlate> getPlates(JsonObject sessionOderData) { List<OrderedPlate> orderedPlates = new ArrayList<OrderedPlate>(); JsonArray array = sessionOderData.get("plates").getAsJsonArray(); for (int i = 0; i < array.size(); i++) { JsonObject pjson = array.get(i).getAsJsonObject(); orderedPlates.add(getOrdered(pjson)); } return orderedPlates; } private OrderedPlate getOrdered(JsonObject pjson) { OrderedPlate oplate = new OrderedPlate(); oplate.setName(pjson.get("name").getAsString()); oplate.setPrice(pjson.get("price").getAsDouble()); oplate.setPriceInCents(Double.valueOf(pjson.get("price").getAsDouble() * 100.0).intValue()); oplate.setQty(pjson.get("qty").getAsInt()); if (pjson.get("isFraction").getAsBoolean() == true) { oplate.setIsFraction(Boolean.TRUE); Set<Key> fractionPlates = new HashSet<Key>(); JsonArray fractionKeys = pjson.get("fractionKeys").getAsJsonArray(); for (int i = 0; i < fractionKeys.size(); i++) { Key fractionKey = KeyFactory.stringToKey(fractionKeys.get(i).getAsString()); fractionPlates.add(fractionKey); } oplate.setFractionPlates(fractionPlates); return oplate; } else { String pkey = ""; if (pjson.get("plate").isJsonObject()) { pkey = pjson.get("plate").getAsJsonObject().get("id").getAsString(); } else { pkey = pjson.get("plate").getAsString(); } oplate.setPlate(KeyFactory.stringToKey(pkey)); return oplate; } } public Key getRestKey(JsonObject sessionOderData) { String restKey; if (sessionOderData.get("restaurant") != null) { if (sessionOderData.get("restaurant").isJsonObject()) { restKey = sessionOderData.get("restaurant").getAsJsonObject().get("id").getAsString(); } else { restKey = sessionOderData.get("restaurant").getAsString(); } } else { restKey = sessionOderData.get("plates").getAsJsonArray().get(0).getAsJsonObject().get("plate").getAsJsonObject().get("value").getAsJsonObject().get("restaurant").getAsString(); } return KeyFactory.stringToKey(restKey); } public String getObservation(JsonObject sessionOderData) { return sessionOderData.get("observation").getAsString(); } public String getFormView() { return formView; } public void setFormView(String formView) { this.formView = formView; } public String getSuccessView() { return successView; } public void setSuccessView(String successView) { this.successView = successView; } }
rafaelcoutinho/comendobemdelivery
src/br/copacabana/PlaceOrderController.java
Java
apache-2.0
16,224
current_dir = File.dirname(__FILE__) log_level :info log_location STDOUT node_name "user" client_key "#{ENV['HOME']}/.ssh/user.pem" validation_client_name "user-validator" validation_key "#{current_dir}/validator.pem" chef_server_url "https://api.opscode.com/organizations/user-organization" cache_type 'BasicFile' cache_options(:path => "#{ENV['HOME']}/.chef/checksums" ) cookbook_path "#{current_dir}/../cookbooks" # required to extract the right interface for knife ssh knife[:ssh_attribute] = "ipaddress" knife[:joyent_username] = ENV['SDC_USERNAME'] || 'user' knife[:joyent_keyname] = ENV['SDC_CLI_KEY_ID'] || 'keyname' knife[:joyent_keyfile] = ENV['SDC_CLI_IDENTITY'] || "#{ENV['HOME']}/.ssh/id_rsa" knife[:joyent_api_url] = 'https://us-sw-1.api.joyentcloud.com/'
wanelo-chef/smartos-chef-repo
.chef/knife.rb
Ruby
apache-2.0
874
using System.Threading; using System.Threading.Tasks; using MediatR; namespace CoreDocker.Core.Framework.CommandQuery { public class MediatorCommander : ICommander { private readonly IMediator _mediator; public MediatorCommander(IMediator mediator) { _mediator = mediator; } #region Implementation of ICommander public async Task Notify<T>(T notificationRequest, CancellationToken cancellationToken) where T : CommandNotificationBase { await _mediator.Publish(notificationRequest); } public async Task<CommandResult> Execute<T>(T commandRequest, CancellationToken cancellationToken) where T : CommandRequestBase { return await _mediator.Send(commandRequest); } #endregion } }
rolfwessels/CoreDocker
src/CoreDocker.Core/Framework/CommandQuery/MediatorCommander.cs
C#
apache-2.0
828
package hska.iwi.eShopMaster.model.businessLogic.manager.impl; import com.fasterxml.jackson.databind.ObjectMapper; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import hska.iwi.eShopMaster.model.businessLogic.manager.CategoryManager; import hska.iwi.eShopMaster.model.businessLogic.manager.entity.Category; import hska.iwi.eShopMaster.model.businessLogic.manager.entity.User; import java.util.List; import javax.ws.rs.core.MediaType; import org.apache.log4j.Logger; public class CategoryManagerImpl implements CategoryManager { private final static String BASIS_URL_CATEGORY = "http://localhost:8081/api/catalog/category/"; private final Logger logger = Logger.getLogger(CategoryManagerImpl.class); private final ObjectMapper parser = new ObjectMapper(); private final User currentUser; public CategoryManagerImpl(User currentUser) { this.currentUser = currentUser; } @Override public List<Category> getCategories() { List<Category> categories = null; try { Client client = Client.create(); WebResource webResource = client .resource(BASIS_URL_CATEGORY); ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON_TYPE) .get(ClientResponse.class); categories = parser.readValue(response.getEntity(String.class), List.class); } catch (Exception ex) { logger.error(ex); } return categories; } @Override public Category getCategory(int id) { Category category = null; try { Client client = Client.create(); WebResource webResource = client .resource(BASIS_URL_CATEGORY) .path(String.valueOf(id)); ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON_TYPE) .get(ClientResponse.class); category = parser.readValue(response.getEntity(String.class), Category.class); } catch (Exception ex) { logger.error(ex); } return category; } @Override public void addCategory(String name) { Category category = new Category(name); try { Client client = Client.create(); WebResource webResource = client .resource(BASIS_URL_CATEGORY); webResource.type(MediaType.APPLICATION_JSON_TYPE) .accept(MediaType.APPLICATION_JSON_TYPE) .header("usr", currentUser.getUsername()) .header("pass", currentUser.getPassword()) .post(ClientResponse.class, parser.writeValueAsString(category)); } catch (Exception ex) { logger.error(ex); } } @Override public void delCategoryById(int id) { try { Client client = Client.create(); WebResource webResource = client .resource(BASIS_URL_CATEGORY) .path(String.valueOf(id)); webResource.accept(MediaType.APPLICATION_JSON_TYPE) .header("usr", currentUser.getUsername()) .header("pass", currentUser.getPassword()) .delete(); } catch (Exception ex) { logger.error(ex); } } }
Am3o/eShop
WebShopStart/src/main/java/hska/iwi/eShopMaster/model/businessLogic/manager/impl/CategoryManagerImpl.java
Java
apache-2.0
3,084
function f() { var x=arguments[12]; }
freedot/tstolua
tests/cases/compiler/arguments.ts
TypeScript
apache-2.0
41
import App from '../containers/App'; import { PageNotFound } from '../components'; import homeRoute from '../features/home/route'; import taggrRoute from '../features/taggr/route'; const routes = [{ path: '/', component: App, childRoutes: [ homeRoute, taggrRoute, { path: '*', name: 'Page not found', component: PageNotFound }, ], }]; // Handle isIndex property of route config: // 1. remove the first child with isIndex=true from childRoutes // 2. assign it to the indexRoute property of the parent. function handleIndexRoute(route) { if (!route.childRoutes || !route.childRoutes.length) { return; } route.childRoutes = route.childRoutes.filter(child => { // eslint-disable-line if (child.isIndex) { /* istanbul ignore next */ if (process.env.NODE_ENV === 'dev' && route.indexRoute) { console.error('More than one index route: ', route); } /* istanbul ignore else */ if (!route.indexRoute) { delete child.path; // eslint-disable-line route.indexRoute = child; // eslint-disable-line return false; } } return true; }); route.childRoutes.forEach(handleIndexRoute); } routes.forEach(handleIndexRoute); export default routes;
thehig/taggr
src/common/routeConfig.js
JavaScript
apache-2.0
1,246
/** * Created by txs on 2016/10/17. */ public class Student { String name; int grade; @Override public String toString() { String temp = ""; temp += "name: " + name + "\n"; temp += "grade: " + grade + "\n"; return temp; } @Override public boolean equals(Object obj) { if(this==obj) return true; boolean r = false; if(obj instanceof Student){ Student temp = (Student)obj; if(this.name.equals(temp.name) && this.grade == temp.grade) r = true; } return r; } }
txs72/BUPTJava
slides/06/overrding/Student.java
Java
apache-2.0
625
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.util.containers.ContainerUtil; import consulo.util.lang.Pair; import consulo.util.lang.StringUtil; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Collections; import java.util.List; public final class VcsConsoleLine { private final List<Pair<String, ConsoleViewContentType>> myChunks; private VcsConsoleLine(@Nonnull List<Pair<String, ConsoleViewContentType>> chunks) { myChunks = chunks; } public void print(@Nonnull ConsoleView console) { ConsoleViewContentType lastType = ConsoleViewContentType.NORMAL_OUTPUT; for (Pair<String, ConsoleViewContentType> chunk : myChunks) { console.print(chunk.first, chunk.second); lastType = chunk.second; } console.print("\n", lastType); } @Nullable public static VcsConsoleLine create(@Nullable String message, @Nonnull ConsoleViewContentType contentType) { return create(Collections.singletonList(Pair.create(message, contentType))); } @Nullable public static VcsConsoleLine create(@Nonnull List<Pair<String, ConsoleViewContentType>> lineChunks) { List<Pair<String, ConsoleViewContentType>> chunks = ContainerUtil.filter(lineChunks, it -> !StringUtil.isEmptyOrSpaces(it.first)); if (chunks.isEmpty()) return null; return new VcsConsoleLine(chunks); } }
consulo/consulo
modules/base/vcs-api/src/main/java/com/intellij/openapi/vcs/VcsConsoleLine.java
Java
apache-2.0
1,604
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brixcms.web.nodepage; import org.apache.wicket.IRequestTarget; import org.apache.wicket.Page; import org.apache.wicket.PageParameters; import org.apache.wicket.RequestCycle; import org.apache.wicket.model.IModel; import org.apache.wicket.request.target.component.IPageRequestTarget; import org.apache.wicket.util.lang.Objects; import org.apache.wicket.util.string.StringValue; import org.brixcms.exception.BrixException; import org.brixcms.jcr.wrapper.BrixNode; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; public class BrixPageParameters implements Serializable { // ------------------------------ FIELDS ------------------------------ private static final long serialVersionUID = 1L; private List<String> indexedParameters = null; ; private List<QueryStringParameter> queryStringParameters = null; // -------------------------- STATIC METHODS -------------------------- public static boolean equals(BrixPageParameters p1, BrixPageParameters p2) { if (Objects.equal(p1, p2)) { return true; } if (p1 == null && p2.getIndexedParamsCount() == 0 && p2.getQueryParamKeys().isEmpty()) { return true; } if (p2 == null && p1.getIndexedParamsCount() == 0 && p1.getQueryParamKeys().isEmpty()) { return true; } return false; } public int getIndexedParamsCount() { return indexedParameters != null ? indexedParameters.size() : 0; } public static BrixPageParameters getCurrent() { IRequestTarget target = RequestCycle.get().getRequestTarget(); // this is required for getting current page parameters from page constructor // (the actual page instance is not constructed yet. if (target instanceof PageParametersRequestTarget) { return ((PageParametersRequestTarget) target).getPageParameters(); } else { return getCurrentPage().getBrixPageParameters(); } } // --------------------------- CONSTRUCTORS --------------------------- public BrixPageParameters() { } public BrixPageParameters(PageParameters params) { if (params != null) { for (String name : params.keySet()) { addQueryParam(name, params.get(name)); } } } public void addQueryParam(String name, Object value) { addQueryParam(name, value, -1); } public BrixPageParameters(BrixPageParameters copy) { if (copy == null) { throw new IllegalArgumentException("Copy argument may not be null."); } if (copy.indexedParameters != null) this.indexedParameters = new ArrayList<String>(copy.indexedParameters); if (copy.queryStringParameters != null) this.queryStringParameters = new ArrayList<QueryStringParameter>( copy.queryStringParameters); } // ------------------------ CANONICAL METHODS ------------------------ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof BrixPageParameters == false) { return false; } BrixPageParameters rhs = (BrixPageParameters) obj; if (!Objects.equal(indexedParameters, rhs.indexedParameters)) { return false; } if (queryStringParameters == null || rhs.queryStringParameters == null) { return rhs.queryStringParameters == queryStringParameters; } if (queryStringParameters.size() != rhs.queryStringParameters.size()) { return false; } for (String key : getQueryParamKeys()) { List<StringValue> values1 = getQueryParams(key); Set<String> v1 = new TreeSet<String>(); List<StringValue> values2 = rhs.getQueryParams(key); Set<String> v2 = new TreeSet<String>(); for (StringValue sv : values1) { v1.add(sv.toString()); } for (StringValue sv : values2) { v2.add(sv.toString()); } if (v1.equals(v2) == false) { return false; } } return true; } public Set<String> getQueryParamKeys() { if (queryStringParameters == null || queryStringParameters.isEmpty()) { return Collections.emptySet(); } Set<String> set = new TreeSet<String>(); for (QueryStringParameter entry : queryStringParameters) { set.add(entry.key); } return Collections.unmodifiableSet(set); } public List<StringValue> getQueryParams(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { List<StringValue> result = new ArrayList<StringValue>(); for (QueryStringParameter entry : queryStringParameters) { if (entry.key.equals(name)) { result.add(StringValue.valueOf(entry.value)); } } return Collections.unmodifiableList(result); } else { return Collections.emptyList(); } } // -------------------------- OTHER METHODS -------------------------- public void addQueryParam(String name, Object value, int index) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (value == null) { throw new IllegalArgumentException("Parameter value may not be null."); } if (queryStringParameters == null) queryStringParameters = new ArrayList<QueryStringParameter>(1); QueryStringParameter entry = new QueryStringParameter(name, value.toString()); if (index == -1) queryStringParameters.add(entry); else queryStringParameters.add(index, entry); } void assign(BrixPageParameters other) { if (this != other) { this.indexedParameters = other.indexedParameters; this.queryStringParameters = other.queryStringParameters; } } public void clearIndexedParams() { this.indexedParameters = null; } public void clearQueryParams() { this.queryStringParameters = null; } public StringValue getIndexedParam(int index) { if (indexedParameters != null) { if (index >= 0 && index < indexedParameters.size()) { String value = indexedParameters.get(index); return StringValue.valueOf(value); } } return StringValue.valueOf((String) null); } public StringValue getQueryParam(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { for (QueryStringParameter entry : queryStringParameters) { if (entry.key.equals(name)) { return StringValue.valueOf(entry.value); } } } return StringValue.valueOf((String) null); } public List<QueryStringParameter> getQueryStringParams() { if (queryStringParameters == null) { return Collections.emptyList(); } else { return Collections.unmodifiableList(new ArrayList<QueryStringParameter>( queryStringParameters)); } } ; public void removeIndexedParam(int index) { if (indexedParameters != null) { if (index >= 0 && index < indexedParameters.size()) { indexedParameters.remove(index); } } } public void setIndexedParam(int index, Object object) { if (indexedParameters == null) indexedParameters = new ArrayList<String>(index); for (int i = indexedParameters.size(); i <= index; ++i) { indexedParameters.add(null); } String value = object != null ? object.toString() : null; indexedParameters.set(index, value); } public void setQueryParam(String name, Object value) { setQueryParam(name, value, -1); } public void setQueryParam(String name, Object value, int index) { removeQueryParam(name); if (value != null) { addQueryParam(name, value); } } public void removeQueryParam(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { for (Iterator<QueryStringParameter> i = queryStringParameters.iterator(); i.hasNext();) { QueryStringParameter e = i.next(); if (e.key.equals(name)) { i.remove(); } } } } public String toCallbackURL() { return urlFor(getCurrentPage()); } /** * Constructs a url to the specified page appending these page parameters * * @param page * @return url */ public String urlFor(BrixNodeWebPage page) { IRequestTarget target = new BrixNodeRequestTarget(page, this); return RequestCycle.get().urlFor(target).toString(); } static BrixNodeWebPage getCurrentPage() { IRequestTarget target = RequestCycle.get().getRequestTarget(); BrixNodeWebPage page = null; if (target != null && target instanceof IPageRequestTarget) { Page p = ((IPageRequestTarget) target).getPage(); if (p instanceof BrixNodeWebPage) { page = (BrixNodeWebPage) p; } } if (page == null) { throw new BrixException( "Couldn't obtain the BrixNodeWebPage instance from RequestTarget."); } return page; } /** * Constructs a url to the specified page appending these page parameters * * @param * @return url */ public String urlFor(IModel<BrixNode> node) { IRequestTarget target = new BrixNodeRequestTarget(node, this); return RequestCycle.get().urlFor(target).toString(); } // -------------------------- INNER CLASSES -------------------------- public static class QueryStringParameter implements Serializable { private static final long serialVersionUID = 1L; private final String key; private final String value; public QueryStringParameter(String key, String value) { this.key = key; this.value = value; } public String getKey() { return key; } public String getValue() { return value; } } }
kbachl/brix-cms-backup
brix-core/src/main/java/org/brixcms/web/nodepage/BrixPageParameters.java
Java
apache-2.0
11,587
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.09.09 at 01:22:27 PM CEST // package test; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="seq" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "fpage") public class Fpage { @XmlValue protected String content; @XmlAttribute(name = "content-type") @XmlSchemaType(name = "anySimpleType") protected String contentType; @XmlAttribute @XmlSchemaType(name = "anySimpleType") protected String seq; /** * Gets the value of the content property. * * @return * possible object is * {@link String } * */ public String getContent() { return content; } /** * Sets the value of the content property. * * @param value * allowed object is * {@link String } * */ public void setContent(String value) { this.content = value; } /** * Gets the value of the contentType property. * * @return * possible object is * {@link String } * */ public String getContentType() { return contentType; } /** * Sets the value of the contentType property. * * @param value * allowed object is * {@link String } * */ public void setContentType(String value) { this.contentType = value; } /** * Gets the value of the seq property. * * @return * possible object is * {@link String } * */ public String getSeq() { return seq; } /** * Sets the value of the seq property. * * @param value * allowed object is * {@link String } * */ public void setSeq(String value) { this.seq = value; } }
BlueBrain/bluima
modules/bluima_xml/src/test/Fpage.java
Java
apache-2.0
3,031
'use strict'; const Task = require('co-task'); const sql = require('../api/helpers/sql'); module.exports = { up: function (queryInterface, Sequelize) { return Task.spawn(function* () { yield queryInterface.addColumn('ClassicSalads', 'ClassicSaladCatagoryId', Sequelize.INTEGER); yield sql.foreignKeyUp(queryInterface, 'ClassicSalads', 'ClassicSaladCatagoryId', 'ClassicSaladCatagories', 'id'); }); }, down: function (queryInterface, Sequelize) { return Task.spawn(function* () { yield sql.foreignKeyDown(queryInterface, 'ClassicSalads', 'ClassicSaladCatagoryId', 'ClassicSaladCatagories', 'id'); yield queryInterface.removeColumn('ClassicSalads', 'ClassicSaladCatagoryId'); }); } };
arcana261/SaladioBackEnd
migrations/20161221201621-associate-classicsaladcatagory-and-classicsalad.js
JavaScript
apache-2.0
734
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.action.hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.oozie.action.ActionExecutorException; import org.apache.oozie.util.XLog; import org.jdom.Element; import org.jdom.Namespace; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; public abstract class ScriptLanguageActionExecutor extends JavaActionExecutor { public ScriptLanguageActionExecutor(String type) { super(type); } @Override public List<Class<?>> getLauncherClasses() { return null; } protected boolean shouldAddScriptToCache(){ return true; } @Override protected Configuration setupLauncherConf(Configuration conf, Element actionXml, Path appPath, Context context) throws ActionExecutorException { super.setupLauncherConf(conf, actionXml, appPath, context); if(shouldAddScriptToCache()) { addScriptToCache(conf, actionXml, appPath, context); } return conf; } protected void addScriptToCache(Configuration conf, Element actionXml, Path appPath, Context context) throws ActionExecutorException { Namespace ns = actionXml.getNamespace(); String script = actionXml.getChild("script", ns).getTextTrim(); String name = new Path(script).getName(); String scriptContent = context.getProtoActionConf().get(this.getScriptName()); Path scriptFile = null; if (scriptContent != null) { // Create script on filesystem if this is // an http submission job; FSDataOutputStream dos = null; try { Path actionPath = context.getActionDir(); scriptFile = new Path(actionPath, script); FileSystem fs = context.getAppFileSystem(); dos = fs.create(scriptFile); dos.write(scriptContent.getBytes(StandardCharsets.UTF_8)); addToCache(conf, actionPath, script + "#" + name, false); } catch (Exception ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FAILED_OPERATION", XLog .format("Not able to write script file {0} on hdfs", scriptFile), ex); } finally { try { if (dos != null) { dos.close(); } } catch (IOException ex) { XLog.getLog(getClass()).error("Error: " + ex.getMessage()); } } } else { addToCache(conf, appPath, script + "#" + name, false); } } protected abstract String getScriptName(); }
cbaenziger/oozie
core/src/main/java/org/apache/oozie/action/hadoop/ScriptLanguageActionExecutor.java
Java
apache-2.0
3,702
// Code generated - DO NOT EDIT. package topology import ( "github.com/skydive-project/skydive/graffiti/getter" "strings" ) func (obj *NextHop) GetFieldBool(key string) (bool, error) { return false, getter.ErrFieldNotFound } func (obj *NextHop) GetFieldInt64(key string) (int64, error) { switch key { case "Priority": return int64(obj.Priority), nil case "IfIndex": return int64(obj.IfIndex), nil } return 0, getter.ErrFieldNotFound } func (obj *NextHop) GetFieldString(key string) (string, error) { switch key { case "IP": return obj.IP.String(), nil case "MAC": return string(obj.MAC), nil } return "", getter.ErrFieldNotFound } func (obj *NextHop) GetFieldKeys() []string { return []string{ "Priority", "IP", "MAC", "IfIndex", } } func (obj *NextHop) MatchBool(key string, predicate getter.BoolPredicate) bool { return false } func (obj *NextHop) MatchInt64(key string, predicate getter.Int64Predicate) bool { if b, err := obj.GetFieldInt64(key); err == nil { return predicate(b) } return false } func (obj *NextHop) MatchString(key string, predicate getter.StringPredicate) bool { if b, err := obj.GetFieldString(key); err == nil { return predicate(b) } return false } func (obj *NextHop) GetField(key string) (interface{}, error) { if s, err := obj.GetFieldString(key); err == nil { return s, nil } if i, err := obj.GetFieldInt64(key); err == nil { return i, nil } return nil, getter.ErrFieldNotFound } func init() { strings.Index("", ".") }
skydive-project/skydive
topology/nexthop_gendecoder.go
GO
apache-2.0
1,511
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.ant.config.execution; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.OpenFileHyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilder; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.execution.ui.ConsoleView; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.io.OutputStream; public final class PlainTextView implements AntOutputView { private final ConsoleView myConsole; private final Project myProject; private String myCommandLine; private final LightProcessHandler myProcessHandler = new LightProcessHandler(); public PlainTextView(Project project) { myProject = project; TextConsoleBuilder builder = TextConsoleBuilderFactory.getInstance().createBuilder(project); builder.addFilter(new AntMessageFilter()); builder.addFilter(new JUnitFilter()); myConsole = builder.getConsole(); myConsole.attachToProcess(myProcessHandler); } public void dispose() { Disposer.dispose(myConsole); } @Override public String getId() { return "_text_view_"; } @Override public JComponent getComponent() { return myConsole.getComponent(); } @Override @Nullable public Object addMessage(AntMessage message) { print(message.getText() + "\n", ProcessOutputTypes.STDOUT); return null; } private void print(String text, Key type) { myProcessHandler.notifyTextAvailable(text, type); } public void addMessages(AntMessage[] messages) { for (AntMessage message : messages) { addMessage(message); } } @Override public void addJavacMessage(AntMessage message, String url) { if (message.getLine() > 0) { String msg = TreeView.printMessage(message, url); print(msg, ProcessOutputTypes.STDOUT); } print(message.getText(), ProcessOutputTypes.STDOUT); } @Override public void addException(AntMessage exception, boolean showFullTrace) { String text = exception.getText(); if (!showFullTrace) { int index = text.indexOf("\r\n"); if (index != -1) { text = text.substring(0, index) + "\n"; } } print(text, ProcessOutputTypes.STDOUT); } public void clearAllMessages() { myConsole.clear(); } @Override public void startBuild(AntMessage message) { print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM); addMessage(message); } @Override public void buildFailed(AntMessage message) { print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM); addMessage(message); } @Override public void startTarget(AntMessage message) { addMessage(message); } @Override public void startTask(AntMessage message) { addMessage(message); } @Override public void finishBuild(String messageText) { print("\n" + messageText + "\n", ProcessOutputTypes.SYSTEM); } @Override public void finishTarget() { } @Override public void finishTask() { } @Override @Nullable public Object getData(@NotNull String dataId) { return null; } public void setBuildCommandLine(String commandLine) { myCommandLine = commandLine; } private final class JUnitFilter implements Filter { @Override @Nullable public Result applyFilter(String line, int entireLength) { HyperlinkUtil.PlaceInfo placeInfo = HyperlinkUtil.parseJUnitMessage(myProject, line); if (placeInfo == null) { return null; } int textStartOffset = entireLength - line.length(); int highlightStartOffset = textStartOffset + placeInfo.getLinkStartIndex(); int highlightEndOffset = textStartOffset + placeInfo.getLinkEndIndex() + 1; OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, placeInfo.getFile(), placeInfo.getLine(), placeInfo.getColumn()); return new Result(highlightStartOffset, highlightEndOffset, info); } } private final class AntMessageFilter implements Filter { @Override public Result applyFilter(String line, int entireLength) { int afterLineNumberIndex = line.indexOf(": "); // end of file_name_and_line_number sequence if (afterLineNumberIndex == -1) { return null; } String fileAndLineNumber = line.substring(0, afterLineNumberIndex); int index = fileAndLineNumber.lastIndexOf(':'); if (index == -1) { return null; } final String fileName = fileAndLineNumber.substring(0, index); String lineNumberStr = fileAndLineNumber.substring(index + 1).trim(); int lineNumber; try { lineNumber = Integer.parseInt(lineNumberStr); } catch (NumberFormatException e) { return null; } final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(fileName.replace(File.separatorChar, '/')); if (file == null) { return null; } int textStartOffset = entireLength - line.length(); int highlightEndOffset = textStartOffset + afterLineNumberIndex; OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, file, lineNumber - 1); return new Result(textStartOffset, highlightEndOffset, info); } } private static class LightProcessHandler extends ProcessHandler { @Override protected void destroyProcessImpl() { throw new UnsupportedOperationException(); } @Override protected void detachProcessImpl() { throw new UnsupportedOperationException(); } @Override public boolean detachIsDefault() { return false; } @Override @Nullable public OutputStream getProcessInput() { return null; } } }
mdanielwork/intellij-community
plugins/ant/src/com/intellij/lang/ant/config/execution/PlainTextView.java
Java
apache-2.0
6,686
/* Copyright 2012-2022 Marco De Salvo Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using RDFSharp.Model; using RDFSharp.Semantics.OWL; using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace RDFSharp.Semantics.SKOS { /// <summary> /// RDFSKOSConceptScheme represents an instance of skos:ConceptScheme within an ontology data. /// </summary> public class RDFSKOSConceptScheme : RDFOntologyFact, IEnumerable<RDFSKOSConcept> { #region Properties /// <summary> /// Count of the concepts composing the scheme /// </summary> public long ConceptsCount => this.Concepts.Count; /// <summary> /// Count of the collections composing the scheme /// </summary> public long CollectionsCount => this.Collections.Count; /// <summary> /// Count of the ordered collections composing the scheme /// </summary> public long OrderedCollectionsCount => this.OrderedCollections.Count; /// <summary> /// Count of the labels composing the scheme /// </summary> public long LabelsCount => this.Labels.Count; /// <summary> /// Gets the enumerator on the concepts of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSConcept> ConceptsEnumerator => this.Concepts.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the collections of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSCollection> CollectionsEnumerator => this.Collections.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the ordered collections of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSOrderedCollection> OrderedCollectionsEnumerator => this.OrderedCollections.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the labels of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSLabel> LabelsEnumerator => this.Labels.Values.GetEnumerator(); /// <summary> /// Annotations describing concepts of the scheme /// </summary> public RDFSKOSAnnotations Annotations { get; internal set; } /// <summary> /// Relations describing concepts of the scheme /// </summary> public RDFSKOSRelations Relations { get; internal set; } /// <summary> /// Concepts contained in the scheme (encodes the 'skos:inScheme' relation) /// </summary> internal Dictionary<long, RDFSKOSConcept> Concepts { get; set; } /// <summary> /// Collections contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSCollection> Collections { get; set; } /// <summary> /// OrderedCollections contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSOrderedCollection> OrderedCollections { get; set; } /// <summary> /// Labels contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSLabel> Labels { get; set; } #endregion #region Ctors /// <summary> /// Default-ctor to build a conceptScheme with the given name /// </summary> public RDFSKOSConceptScheme(RDFResource conceptName) : base(conceptName) { this.Concepts = new Dictionary<long, RDFSKOSConcept>(); this.Collections = new Dictionary<long, RDFSKOSCollection>(); this.OrderedCollections = new Dictionary<long, RDFSKOSOrderedCollection>(); this.Labels = new Dictionary<long, RDFSKOSLabel>(); this.Annotations = new RDFSKOSAnnotations(); this.Relations = new RDFSKOSRelations(); } #endregion #region Interfaces /// <summary> /// Exposes a typed enumerator on the scheme's concepts /// </summary> IEnumerator<RDFSKOSConcept> IEnumerable<RDFSKOSConcept>.GetEnumerator() => this.ConceptsEnumerator; /// <summary> /// Exposes an untyped enumerator on the scheme's concepts /// </summary> IEnumerator IEnumerable.GetEnumerator() => this.ConceptsEnumerator; #endregion #region Methods #region Add /// <summary> /// Adds the given concept to the scheme /// </summary> public RDFSKOSConceptScheme AddConcept(RDFSKOSConcept concept) { if (concept != null) { if (!this.Concepts.ContainsKey(concept.PatternMemberID)) this.Concepts.Add(concept.PatternMemberID, concept); } return this; } /// <summary> /// Adds the given collection to the scheme /// </summary> public RDFSKOSConceptScheme AddCollection(RDFSKOSCollection collection) { if (collection != null) { if (!this.Collections.ContainsKey(collection.PatternMemberID)) { this.Collections.Add(collection.PatternMemberID, collection); //Also add concepts of the collection foreach (var cn in collection.Concepts.Values) this.AddConcept(cn); //Also adds collections of the collection foreach (var cl in collection.Collections.Values) this.AddCollection(cl); } } return this; } /// <summary> /// Adds the given ordered collection to the scheme /// </summary> public RDFSKOSConceptScheme AddOrderedCollection(RDFSKOSOrderedCollection orderedCollection) { if (orderedCollection != null) { if (!this.OrderedCollections.ContainsKey(orderedCollection.PatternMemberID)) { this.OrderedCollections.Add(orderedCollection.PatternMemberID, orderedCollection); //Also add concepts of the ordered collection foreach (var cn in orderedCollection.Concepts.Values.OrderBy(x => x.Item1)) this.AddConcept(cn.Item2); } } return this; } /// <summary> /// Adds the given label to the scheme /// </summary> public RDFSKOSConceptScheme AddLabel(RDFSKOSLabel label) { if (label != null) { if (!this.Labels.ContainsKey(label.PatternMemberID)) this.Labels.Add(label.PatternMemberID, label); } return this; } #endregion #region Remove /// <summary> /// Removes the given concept from the scheme /// </summary> public RDFSKOSConceptScheme RemoveConcept(RDFSKOSConcept concept) { if (concept != null) { if (this.Concepts.ContainsKey(concept.PatternMemberID)) this.Concepts.Remove(concept.PatternMemberID); } return this; } /// <summary> /// Removes the given collection from the scheme /// </summary> public RDFSKOSConceptScheme RemoveCollection(RDFSKOSCollection collection) { if (collection != null) { if (this.Collections.ContainsKey(collection.PatternMemberID)) this.Collections.Remove(collection.PatternMemberID); } return this; } /// <summary> /// Removes the given ordered collection from the scheme /// </summary> public RDFSKOSConceptScheme RemoveOrderedCollection(RDFSKOSOrderedCollection orderedCollection) { if (orderedCollection != null) { if (this.OrderedCollections.ContainsKey(orderedCollection.PatternMemberID)) this.OrderedCollections.Remove(orderedCollection.PatternMemberID); } return this; } /// <summary> /// Removes the given label from the scheme /// </summary> public RDFSKOSConceptScheme RemoveLabel(RDFSKOSLabel label) { if (label != null) { if (this.Labels.ContainsKey(label.PatternMemberID)) this.Labels.Remove(label.PatternMemberID); } return this; } #endregion #region Select /// <summary> /// Selects the concept represented by the given string from the scheme /// </summary> public RDFSKOSConcept SelectConcept(string concept) { if (concept != null) { long conceptID = RDFModelUtilities.CreateHash(concept); if (this.Concepts.ContainsKey(conceptID)) return this.Concepts[conceptID]; } return null; } /// <summary> /// Selects the collection represented by the given string from the scheme /// </summary> public RDFSKOSCollection SelectCollection(string collection) { if (collection != null) { long collectionID = RDFModelUtilities.CreateHash(collection); if (this.Collections.ContainsKey(collectionID)) return this.Collections[collectionID]; } return null; } /// <summary> /// Selects the ordered collection represented by the given string from the scheme /// </summary> public RDFSKOSOrderedCollection SelectOrderedCollection(string orderedCollection) { if (orderedCollection != null) { long orderedCollectionID = RDFModelUtilities.CreateHash(orderedCollection); if (this.OrderedCollections.ContainsKey(orderedCollectionID)) return this.OrderedCollections[orderedCollectionID]; } return null; } /// <summary> /// Selects the label represented by the given string from the scheme /// </summary> public RDFSKOSLabel SelectLabel(string label) { if (label != null) { long labelID = RDFModelUtilities.CreateHash(label); if (this.Labels.ContainsKey(labelID)) return this.Labels[labelID]; } return null; } #endregion #region Set /// <summary> /// Builds a new intersection scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme IntersectWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); if (conceptScheme != null) { //Add intersection concepts foreach (RDFSKOSConcept c in this) { if (conceptScheme.Concepts.ContainsKey(c.PatternMemberID)) result.AddConcept(c); } //Add intersection collections foreach (RDFSKOSCollection c in this.Collections.Values) { if (conceptScheme.Collections.ContainsKey(c.PatternMemberID)) result.AddCollection(c); } //Add intersection ordered collections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { if (conceptScheme.OrderedCollections.ContainsKey(o.PatternMemberID)) result.AddOrderedCollection(o); } //Add intersection labels foreach (RDFSKOSLabel l in this.Labels.Values) { if (conceptScheme.Labels.ContainsKey(l.PatternMemberID)) result.AddLabel(l); } //Add intersection relations result.Relations.TopConcept = this.Relations.TopConcept.IntersectWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = this.Relations.Broader.IntersectWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = this.Relations.BroaderTransitive.IntersectWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = this.Relations.BroadMatch.IntersectWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = this.Relations.Narrower.IntersectWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = this.Relations.NarrowerTransitive.IntersectWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = this.Relations.NarrowMatch.IntersectWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = this.Relations.Related.IntersectWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = this.Relations.RelatedMatch.IntersectWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = this.Relations.SemanticRelation.IntersectWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = this.Relations.MappingRelation.IntersectWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = this.Relations.CloseMatch.IntersectWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = this.Relations.ExactMatch.IntersectWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = this.Relations.Notation.IntersectWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = this.Relations.PrefLabel.IntersectWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = this.Relations.AltLabel.IntersectWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = this.Relations.HiddenLabel.IntersectWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = this.Relations.LiteralForm.IntersectWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = this.Relations.LabelRelation.IntersectWith(conceptScheme.Relations.LabelRelation); //Add intersection annotations result.Annotations.PrefLabel = this.Annotations.PrefLabel.IntersectWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = this.Annotations.AltLabel.IntersectWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = this.Annotations.HiddenLabel.IntersectWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = this.Annotations.Note.IntersectWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = this.Annotations.ChangeNote.IntersectWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = this.Annotations.EditorialNote.IntersectWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = this.Annotations.HistoryNote.IntersectWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = this.Annotations.ScopeNote.IntersectWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = this.Annotations.Definition.IntersectWith(conceptScheme.Annotations.Definition); result.Annotations.Example = this.Annotations.Example.IntersectWith(conceptScheme.Annotations.Example); } return result; } /// <summary> /// Builds a new union scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme UnionWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); //Add concepts from this scheme foreach (RDFSKOSConcept c in this) result.AddConcept(c); //Add collections from this scheme foreach (RDFSKOSCollection c in this.Collections.Values) result.AddCollection(c); //Add ordered collections from this scheme foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from this scheme foreach (RDFSKOSLabel l in this.Labels.Values) result.AddLabel(l); //Add relations from this scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(this.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(this.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(this.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(this.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(this.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(this.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(this.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(this.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(this.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(this.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(this.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(this.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(this.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(this.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(this.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(this.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(this.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(this.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(this.Relations.LabelRelation); //Add annotations from this scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(this.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(this.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(this.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(this.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(this.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(this.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(this.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(this.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(this.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(this.Annotations.Example); //Manage the given scheme if (conceptScheme != null) { //Add concepts from the given scheme foreach (RDFSKOSConcept c in conceptScheme) result.AddConcept(c); //Add collections from the given scheme foreach (RDFSKOSCollection c in conceptScheme.Collections.Values) result.AddCollection(c); //Add ordered collections from the given scheme foreach (RDFSKOSOrderedCollection o in conceptScheme.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from the given scheme foreach (RDFSKOSLabel l in conceptScheme.Labels.Values) result.AddLabel(l); //Add relations from the given scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(conceptScheme.Relations.LabelRelation); //Add annotations from the given scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(conceptScheme.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(conceptScheme.Annotations.Example); } return result; } /// <summary> /// Builds a new difference scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme DifferenceWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); if (conceptScheme != null) { //Add difference concepts foreach (RDFSKOSConcept c in this) { if (!conceptScheme.Concepts.ContainsKey(c.PatternMemberID)) result.AddConcept(c); } //Add difference collections foreach (RDFSKOSCollection c in this.Collections.Values) { if (!conceptScheme.Collections.ContainsKey(c.PatternMemberID)) result.AddCollection(c); } //Add difference ordered collections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { if (!conceptScheme.OrderedCollections.ContainsKey(o.PatternMemberID)) result.AddOrderedCollection(o); } //Add difference labels foreach (RDFSKOSLabel l in this.Labels.Values) { if (!conceptScheme.Labels.ContainsKey(l.PatternMemberID)) result.AddLabel(l); } //Add difference relations result.Relations.TopConcept = this.Relations.TopConcept.DifferenceWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = this.Relations.Broader.DifferenceWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = this.Relations.BroaderTransitive.DifferenceWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = this.Relations.BroadMatch.DifferenceWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = this.Relations.Narrower.DifferenceWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = this.Relations.NarrowerTransitive.DifferenceWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = this.Relations.NarrowMatch.DifferenceWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = this.Relations.Related.DifferenceWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = this.Relations.RelatedMatch.DifferenceWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = this.Relations.SemanticRelation.DifferenceWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = this.Relations.MappingRelation.DifferenceWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = this.Relations.CloseMatch.DifferenceWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = this.Relations.ExactMatch.DifferenceWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = this.Relations.Notation.DifferenceWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = this.Relations.PrefLabel.DifferenceWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = this.Relations.AltLabel.DifferenceWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = this.Relations.HiddenLabel.DifferenceWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = this.Relations.LiteralForm.DifferenceWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = this.Relations.LabelRelation.DifferenceWith(conceptScheme.Relations.LabelRelation); //Add difference annotations result.Annotations.PrefLabel = this.Annotations.PrefLabel.DifferenceWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = this.Annotations.AltLabel.DifferenceWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = this.Annotations.HiddenLabel.DifferenceWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = this.Annotations.Note.DifferenceWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = this.Annotations.ChangeNote.DifferenceWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = this.Annotations.EditorialNote.DifferenceWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = this.Annotations.HistoryNote.DifferenceWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = this.Annotations.ScopeNote.DifferenceWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = this.Annotations.Definition.DifferenceWith(conceptScheme.Annotations.Definition); result.Annotations.Example = this.Annotations.Example.DifferenceWith(conceptScheme.Annotations.Example); } else { //Add concepts from this scheme foreach (RDFSKOSConcept c in this) result.AddConcept(c); //Add collections from this scheme foreach (RDFSKOSCollection c in this.Collections.Values) result.AddCollection(c); //Add ordered collections from this scheme foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from this scheme foreach (RDFSKOSLabel l in this.Labels.Values) result.AddLabel(l); //Add relations from this scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(this.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(this.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(this.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(this.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(this.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(this.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(this.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(this.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(this.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(this.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(this.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(this.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(this.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(this.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(this.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(this.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(this.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(this.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(this.Relations.LabelRelation); //Add annotations from this scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(this.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(this.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(this.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(this.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(this.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(this.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(this.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(this.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(this.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(this.Annotations.Example); } return result; } #endregion #region Convert /// <summary> /// Gets a graph representation of this scheme, exporting inferences according to the selected behavior /// </summary> public RDFGraph ToRDFGraph(RDFSemanticsEnums.RDFOntologyInferenceExportBehavior infexpBehavior) => this.ToRDFOntologyData().ToRDFGraph(infexpBehavior); /// <summary> /// Asynchronously gets a graph representation of this scheme, exporting inferences according to the selected behavior /// </summary> public Task<RDFGraph> ToRDFGraphAsync(RDFSemanticsEnums.RDFOntologyInferenceExportBehavior infexpBehavior) => Task.Run(() => ToRDFGraph(infexpBehavior)); /// <summary> /// Gets an ontology data representation of this scheme /// </summary> public RDFOntologyData ToRDFOntologyData() { RDFOntologyData result = new RDFOntologyData(); //ConceptScheme result.AddFact(this); result.AddClassTypeRelation(this, RDFVocabulary.SKOS.CONCEPT_SCHEME.ToRDFOntologyClass()); //Concepts foreach (RDFSKOSConcept c in this) { result.AddFact(c); result.AddClassTypeRelation(c, RDFVocabulary.SKOS.CONCEPT.ToRDFOntologyClass()); result.AddAssertionRelation(c, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); } //Collections foreach (RDFSKOSCollection c in this.Collections.Values) { result.AddAssertionRelation(c, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); result = result.UnionWith(c.ToRDFOntologyData()); } //OrderedCollections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { result.AddAssertionRelation(o, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); result = result.UnionWith(o.ToRDFOntologyData()); } //Labels foreach (RDFSKOSLabel l in this.Labels.Values) { result.AddFact(l); result.AddClassTypeRelation(l, RDFVocabulary.SKOS.SKOSXL.LABEL.ToRDFOntologyClass()); result.AddAssertionRelation(l, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); } //Assertions result.Relations.Assertions = result.Relations.Assertions.UnionWith(this.Relations.TopConcept) .UnionWith(this.Relations.Broader) .UnionWith(this.Relations.BroaderTransitive) .UnionWith(this.Relations.BroadMatch) .UnionWith(this.Relations.Narrower) .UnionWith(this.Relations.NarrowerTransitive) .UnionWith(this.Relations.NarrowMatch) .UnionWith(this.Relations.Related) .UnionWith(this.Relations.RelatedMatch) .UnionWith(this.Relations.SemanticRelation) .UnionWith(this.Relations.MappingRelation) .UnionWith(this.Relations.CloseMatch) .UnionWith(this.Relations.ExactMatch) .UnionWith(this.Relations.Notation) .UnionWith(this.Relations.PrefLabel) .UnionWith(this.Relations.AltLabel) .UnionWith(this.Relations.HiddenLabel) .UnionWith(this.Relations.LiteralForm) .UnionWith(this.Relations.LabelRelation); //Annotations result.Annotations.CustomAnnotations = result.Annotations.CustomAnnotations.UnionWith(this.Annotations.PrefLabel) .UnionWith(this.Annotations.AltLabel) .UnionWith(this.Annotations.HiddenLabel) .UnionWith(this.Annotations.Note) .UnionWith(this.Annotations.ChangeNote) .UnionWith(this.Annotations.EditorialNote) .UnionWith(this.Annotations.HistoryNote) .UnionWith(this.Annotations.ScopeNote) .UnionWith(this.Annotations.Definition) .UnionWith(this.Annotations.Example); return result; } /// <summary> /// Asynchronously gets an ontology data representation of this scheme /// </summary> public Task<RDFOntologyData> ToRDFOntologyDataAsync() => Task.Run(() => ToRDFOntologyData()); #endregion #endregion } }
mdesalvo/RDFSharp
RDFSharp/Semantics/SKOS/Abstractions/RDFSKOSConceptScheme.cs
C#
apache-2.0
41,109
# frozen_string_literal: true module Beta module Types module Objects class PrefectureType < Beta::Types::Objects::Base implements GraphQL::Types::Relay::Node global_id_field :id field :annict_id, Integer, null: false field :name, String, null: false end end end end
annict/annict
app/graphql/beta/types/objects/prefecture_type.rb
Ruby
apache-2.0
326
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.command.script; import java.util.Map; import java.util.Map.Entry; import javax.script.Bindings; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptException; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.command.OCommandExecutorAbstract; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.db.record.ODatabaseRecordTx; import com.orientechnologies.orient.core.metadata.function.OFunction; /** * Executes Script Commands. * * @see OCommandScript * @author Luca Garulli * */ public class OCommandExecutorFunction extends OCommandExecutorAbstract { protected OCommandFunction request; public OCommandExecutorFunction() { } @SuppressWarnings("unchecked") public OCommandExecutorFunction parse(final OCommandRequest iRequest) { request = (OCommandFunction) iRequest; return this; } public Object execute(final Map<Object, Object> iArgs) { return executeInContext(null, iArgs); } public Object executeInContext(final Map<String, Object> iContext, final Map<Object, Object> iArgs) { parserText = request.getText(); final ODatabaseRecordTx db = (ODatabaseRecordTx) getDatabase(); final OFunction f = db.getMetadata().getFunctionLibrary().getFunction(parserText); final OScriptManager scriptManager = Orient.instance().getScriptManager(); final ScriptEngine scriptEngine = scriptManager.getEngine(f.getLanguage()); final Bindings binding = scriptManager.bind(scriptEngine, db, iContext, iArgs); try { scriptEngine.setBindings(binding, ScriptContext.ENGINE_SCOPE); // COMPILE FUNCTION LIBRARY scriptEngine.eval(scriptManager.getLibrary(db, f.getLanguage())); if (scriptEngine instanceof Invocable) { // INVOKE AS FUNCTION. PARAMS ARE PASSED BY POSITION final Invocable invocableEngine = (Invocable) scriptEngine; Object[] args = null; if (iArgs != null) { args = new Object[iArgs.size()]; int i = 0; for (Entry<Object, Object> arg : iArgs.entrySet()) args[i++] = arg.getValue(); } return invocableEngine.invokeFunction(parserText, args); } else { // INVOKE THE CODE SNIPPET return scriptEngine.eval(invokeFunction(f, iArgs.values().toArray()), binding); } } catch (ScriptException e) { throw new OCommandScriptException("Error on execution of the script", request.getText(), e.getColumnNumber(), e); } catch (NoSuchMethodException e) { throw new OCommandScriptException("Error on execution of the script", request.getText(), 0, e); } finally { scriptManager.unbind(binding); } } public boolean isIdempotent() { return false; } @Override protected void throwSyntaxErrorException(String iText) { throw new OCommandScriptException("Error on execution of the script: " + iText, request.getText(), 0); } protected String invokeFunction(final OFunction f, Object[] iArgs) { final StringBuilder code = new StringBuilder(); code.append(f.getName()); code.append('('); int i = 0; for (Object a : iArgs) { if (i++ > 0) code.append(','); code.append(a); } code.append(");"); return code.toString(); } }
redox/OrientDB
core/src/main/java/com/orientechnologies/orient/core/command/script/OCommandExecutorFunction.java
Java
apache-2.0
4,208
import java.util.Scanner; /** * @author Oleg Cherednik * @since 13.07.2018 */ public class Solution { static int palindromeIndex(String s) { for (int i = 0, j = s.length() - 1; i < j; i++, j--) { if (s.charAt(i) == s.charAt(j)) continue; for (int k = i, m = j - 1; k < m; k++, m--) if (s.charAt(k) != s.charAt(m)) return i; return j; } return -1; } private static final Scanner scanner = new Scanner(System.in); public static void main(String[] args) { int q = scanner.nextInt(); scanner.skip("(\r\n|[\n\r\u2028\u2029\u0085])?"); for (int qItr = 0; qItr < q; qItr++) { String s = scanner.nextLine(); int result = palindromeIndex(s); System.out.println(String.valueOf(result)); } scanner.close(); } }
oleg-cherednik/hackerrank
Algorithms/Strings/Palindrome Index/Solution.java
Java
apache-2.0
921
package de.newsarea.homecockpit.connector.facade.registration.util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; public final class ClassLoaderHelper { private static Logger log = LoggerFactory.getLogger(ClassLoaderHelper.class); private ClassLoaderHelper() { } public static Constructor<?> determineFirstConstructor(Class<?> clazz) { try { for(Constructor<?> constructor : clazz.getConstructors()) { return constructor; } } catch (SecurityException e) { log.error(e.getMessage(), e); } return null; } public static Constructor<?> determineConstructorByArgumentTypes(Class<?> clazz, Class<?>[] argumentTypes) { try { for(Constructor<?> constructor : clazz.getConstructors()) { if(isAssignableFrom(constructor, argumentTypes)) { return constructor; } } } catch (SecurityException e) { log.error(e.getMessage(), e); } return null; } private static boolean isAssignableFrom(Constructor<?> constructor, Class<?>[] argumentTypes) { Class<?>[] constructorArgTypes = constructor.getParameterTypes(); if(constructorArgTypes.length != argumentTypes.length) { return false; } // ~ for(int i=0; i < argumentTypes.length; i++) { if(!argumentTypes[i].isAssignableFrom(constructorArgTypes[i])) { return false; } } return true; } public static List<Class<?>> determineClasses(String packageName) throws ClassNotFoundException, IOException { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); assert classLoader != null; String path = packageName.replace('.', '/'); Enumeration<URL> resources = classLoader.getResources(path); List<File> dirs = new ArrayList<>(); while (resources.hasMoreElements()) { URL resource = resources.nextElement(); dirs.add(new File(resource.getFile().replaceAll("%20", " "))); } ArrayList<Class<?>> classes = new ArrayList<>(); for (File directory : dirs) { classes.addAll(findClasses(directory, packageName)); } return classes; } public static List<Class<?>> findClasses(File directory, String packageName) throws ClassNotFoundException { List<Class<?>> classes = new ArrayList<>(); if (!directory.exists()) { return classes; } File[] files = directory.listFiles(); for (File file : files) { if (file.isDirectory()) { assert !file.getName().contains("."); classes.addAll(findClasses(file, packageName + "." + file.getName())); } else if (file.getName().endsWith(".class")) { classes.add(Class.forName(packageName + '.' + file.getName().substring(0, file.getName().length() - 6))); } } return classes; } public static Method determineSetterMethod(Class<?> clazz, String name) { for(Method method : clazz.getMethods()) { if(method.getName().equalsIgnoreCase("set" + name)) { return method; } } return null; } }
RBernhardt/homecockpit-connectors
connectors-facade/src/main/java/de/newsarea/homecockpit/connector/facade/registration/util/ClassLoaderHelper.java
Java
apache-2.0
3,378
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Upendra Jariya * @sponsor Douglas Johnson * @version 1.0 * @since 2014-11-10 */ package tools.datasync.utils; import org.apache.commons.codec.digest.DigestUtils; import org.apache.log4j.Logger; import tools.datasync.api.utils.HashGenerator; public class Md5HashGenerator implements HashGenerator { private static Md5HashGenerator instance = null; private static Logger LOG = Logger.getLogger(Md5HashGenerator.class .getName()); private Md5HashGenerator() { } public static synchronized Md5HashGenerator getInstance() { if (instance == null) { instance = new Md5HashGenerator(); } return instance; } public String generate(String data) { try { byte[] digest = DigestUtils.md5(data); return (DigestUtils.md5Hex(digest)); } catch (Exception e) { LOG.warn("Error while generating checksum on value [" + data + "]", e); return null; } } public boolean validate(String data, String hash) { String newHash = generate(data); return newHash.equals(hash); } }
datasynctools/sync-tools-prototype
data-sync-tools-core/src/main/java/tools/datasync/utils/Md5HashGenerator.java
Java
apache-2.0
1,858
/*global describe, beforeEach, it*/ 'use strict'; var assert = require('yeoman-generator').assert; var helper = require('./helper'); describe('mcap:connections', function () { beforeEach(function (done) { var answers = { name: 'MyApp' }; // Creates a generateor with the default options / arguments helper.createAppGenerator({ answers: answers }, done); }); it('creates expected files', function (done) { var expectedFiles = [ 'connections/sap.json' ]; var expectedContent = { name: 'SAP', description: 'SAP API', type: 'rest', properties: { descriptorUrl: 'http://sap.mway.io', username: 'admin', password: 'root' } }; var answers = { name: 'SAP', description: 'SAP API', descriptorUrl: 'http://sap.mway.io', username: 'admin', password: 'root' }; helper.createSubGenerator('connection', {answers: answers}, function () { assert.file(expectedFiles); helper.deepEqual('connections/sap.json', expectedContent); done(); }); }); });
mwaylabs/mcap-cli
generators/generator-mcap/test/test-connections.js
JavaScript
apache-2.0
1,116
/* * Copyright 2018 Google LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.tools.jib.api; import java.util.Objects; /** Holds credentials (username and password). */ public class Credential { // If the username is set to <token>, the secret would be a refresh token. // https://github.com/docker/cli/blob/master/docs/reference/commandline/login.md#credential-helper-protocol public static final String OAUTH2_TOKEN_USER_NAME = "<token>"; /** * Gets a {@link Credential} configured with a username and password. * * @param username the username * @param password the password * @return a new {@link Credential} */ public static Credential from(String username, String password) { return new Credential(username, password); } private final String username; private final String password; private Credential(String username, String password) { this.username = username; this.password = password; } /** * Gets the username. * * @return the username */ public String getUsername() { return username; } /** * Gets the password. * * @return the password */ public String getPassword() { return password; } /** * Check whether this credential is an OAuth 2.0 refresh token. * * @return true if this credential is an OAuth 2.0 refresh token. */ public boolean isOAuth2RefreshToken() { return OAUTH2_TOKEN_USER_NAME.equals(username); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof Credential)) { return false; } Credential otherCredential = (Credential) other; return username.equals(otherCredential.username) && password.equals(otherCredential.password); } @Override public int hashCode() { return Objects.hash(username, password); } @Override public String toString() { return username + ":" + password; } }
GoogleContainerTools/jib
jib-core/src/main/java/com/google/cloud/tools/jib/api/Credential.java
Java
apache-2.0
2,498
const browserSync = require('../../../'); const utils = require('../utils'); const register = require('../../../dist/plugins/clients').ClientEvents.register; const assert = require('chai').assert; describe('Client connection stream', function () { it('does not have duplicates', function (done) { browserSync.create({}).subscribe(function (bs) { const client = utils.getClientSocket(bs); client.emit(register, utils.getClient('123456')); client.emit(register, utils.getClient('123456')); bs.clients$.skip(1) .take(2) .toArray() .subscribe(function (clients) { assert.equal(clients[0].size, 1); assert.equal(clients[1].size, 1); const jsClients1 = clients[0].toList().toJS(); const jsClients2 = clients[1].toList().toJS(); assert.equal(jsClients1[0].id, '123456'); assert.equal(jsClients2[0].id, '123456'); bs.cleanup(); done(); }, function (err) {done(err)}); }); }); it('allows unique clients', function (done) { browserSync.create({}).subscribe(function (bs) { const client = utils.getClientSocket(bs); client.emit(register, utils.getClient('xyz')); client.emit(register, utils.getClient('zxy')); bs.clients$.skip(1) .take(2) .toArray() .subscribe(function (clients) { assert.equal(clients[0].size, 1); assert.equal(clients[1].size, 2); const jsClients1 = clients[0].toList().toJS(); const jsClients2 = clients[1].toList().toJS(); assert.equal(jsClients1[0].id, 'xyz'); assert.equal(jsClients2[0].id, 'xyz'); assert.equal(jsClients2[1].id, 'zxy'); bs.cleanup(); done(); }, function(err) { done(err) }); }); }); it('allows unique clients (stress)', function (done) { browserSync.create({}).subscribe(function (bs) { for (var i = 1, n = 51; i < n; i += 1) { utils.getClientSocket(bs).emit(register, utils.getClient('id-' + i)); } bs.clients$.skip(1) .take(50) .toArray() .subscribe(function (clients) { assert.equal(clients[49].size, 50); assert.ok(clients[49].get('id-40')); assert.equal(clients[49].get('id-40').get('id'), 'id-40'); bs.cleanup(); done(); }, function(err) { done(err) }); }); }); });
BrowserSync/browser-sync-core
test/mocha/api/clients$.js
JavaScript
apache-2.0
2,869
using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using DataInput = Lucene.Net.Store.DataInput; using DataOutput = Lucene.Net.Store.DataOutput; /// <summary> /// An FST <see cref="Outputs{T}"/> implementation, holding two other outputs. /// <para/> /// @lucene.experimental /// </summary> public class PairOutputs<A, B> : Outputs<PairOutputs<A, B>.Pair> { private readonly Pair NO_OUTPUT; private readonly Outputs<A> outputs1; private readonly Outputs<B> outputs2; /// <summary> /// Holds a single pair of two outputs. </summary> public class Pair { public A Output1 { get; private set; } public B Output2 { get; private set; } // use newPair internal Pair(A output1, B output2) { this.Output1 = output1; this.Output2 = output2; } public override bool Equals(object other) { // LUCENENET specific - simplified expression return ReferenceEquals(other, this) || (other is Pair pair && Output1.Equals(pair.Output1) && Output2.Equals(pair.Output2)); } public override int GetHashCode() { return Output1.GetHashCode() + Output2.GetHashCode(); } } public PairOutputs(Outputs<A> outputs1, Outputs<B> outputs2) { this.outputs1 = outputs1; this.outputs2 = outputs2; NO_OUTPUT = new Pair(outputs1.NoOutput, outputs2.NoOutput); } /// <summary> /// Create a new <see cref="Pair"/> </summary> public virtual Pair NewPair(A a, B b) { if (a.Equals(outputs1.NoOutput)) { a = outputs1.NoOutput; } if (b.Equals(outputs2.NoOutput)) { b = outputs2.NoOutput; } if (a.Equals(outputs1.NoOutput) && b.Equals(outputs2.NoOutput)) { return NO_OUTPUT; } else { var p = new Pair(a, b); if (Debugging.AssertsEnabled) Debugging.Assert(Valid(p)); return p; } } // for assert private bool Valid(Pair pair) { bool noOutput1 = pair.Output1.Equals(outputs1.NoOutput); bool noOutput2 = pair.Output2.Equals(outputs2.NoOutput); if (noOutput1 && !pair.Output1.Equals(outputs1.NoOutput)) { return false; } if (noOutput2 && !pair.Output2.Equals(outputs2.NoOutput)) { return false; } if (noOutput1 && noOutput2) { if (!pair.Equals(NO_OUTPUT)) { return false; } else { return true; } } else { return true; } } public override Pair Common(Pair pair1, Pair pair2) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(pair1)); Debugging.Assert(Valid(pair2)); } return NewPair(outputs1.Common(pair1.Output1, pair2.Output1), outputs2.Common(pair1.Output2, pair2.Output2)); } public override Pair Subtract(Pair output, Pair inc) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(output)); Debugging.Assert(Valid(inc)); } return NewPair(outputs1.Subtract(output.Output1, inc.Output1), outputs2.Subtract(output.Output2, inc.Output2)); } public override Pair Add(Pair prefix, Pair output) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(prefix)); Debugging.Assert(Valid(output)); } return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Write(Pair output, DataOutput writer) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); outputs1.Write(output.Output1, writer); outputs2.Write(output.Output2, writer); } [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Pair Read(DataInput @in) { A output1 = outputs1.Read(@in); B output2 = outputs2.Read(@in); return NewPair(output1, output2); } public override Pair NoOutput => NO_OUTPUT; [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(Pair output) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); return "<pair:" + outputs1.OutputToString(output.Output1) + "," + outputs2.OutputToString(output.Output2) + ">"; } public override string ToString() { return "PairOutputs<" + outputs1 + "," + outputs2 + ">"; } } }
NightOwl888/lucenenet
src/Lucene.Net/Util/Fst/PairOutputs.cs
C#
apache-2.0
6,273
#!/usr/bin/env python """A flow to run checks for a host.""" from grr.lib import aff4 from grr.lib import flow from grr.lib import rdfvalue from grr.lib.checks import checks from grr.proto import flows_pb2 class CheckFlowArgs(rdfvalue.RDFProtoStruct): protobuf = flows_pb2.CheckFlowArgs class CheckRunner(flow.GRRFlow): """This flow runs checks on a host. CheckRunner: - Identifies what checks should be run for a host. - Identifies the artifacts that need to be collected to perform those checks. - Orchestrates collection of the host data. - Routes host data to the relevant checks. - Returns check data ready for reporting. """ friendly_name = "Run Checks" category = "/Checks/" behaviours = flow.GRRFlow.behaviours + "BASIC" @flow.StateHandler(next_state=["MapArtifactData"]) def Start(self): """.""" client = aff4.FACTORY.Open(self.client_id, token=self.token) self.state.Register("knowledge_base", client.Get(client.Schema.KNOWLEDGE_BASE)) self.state.Register("labels", client.GetLabels()) self.state.Register("artifacts_wanted", set()) self.state.Register("artifacts_fetched", set()) self.state.Register("checks_run", []) self.state.Register("checks_with_findings", []) self.state.Register("results_store", None) self.state.Register("host_data", {}) self.CallState(next_state="MapArtifactData") @flow.StateHandler(next_state=["AddResponses", "RunChecks"]) def MapArtifactData(self, responses): """Get processed data, mapped to artifacts.""" self.state.artifacts_wanted = checks.CheckRegistry.SelectArtifacts( os=self.state.knowledge_base.os) # Fetch Artifacts and map results to the artifacts that generated them. # This is an inefficient collection, but necessary because results need to # be mapped to the originating artifact. An alternative would be to have # rdfvalues labeled with originating artifact ids. for artifact_id in self.state.artifacts_wanted: self.CallFlow("ArtifactCollectorFlow", artifact_list=[artifact_id], request_data={"artifact_id": artifact_id}, next_state="AddResponses") self.CallState(next_state="RunChecks") @flow.StateHandler() def AddResponses(self, responses): artifact_id = responses.request_data["artifact_id"] # TODO(user): Check whether artifact collection succeeded. self.state.host_data[artifact_id] = list(responses) @flow.StateHandler(next_state=["Done"]) def RunChecks(self, responses): if not responses.success: raise RuntimeError("Checks did not run successfully.") # Hand host data across to checks. Do this after all data has been collected # in case some checks require multiple artifacts/results. for finding in checks.CheckHost(self.state.host_data, os=self.state.knowledge_base.os): self.state.checks_run.append(finding.check_id) if finding.anomaly: self.state.checks_with_findings.append(finding.check_id) self.SendReply(finding)
ojengwa/grr
lib/flows/general/checks.py
Python
apache-2.0
3,088
/* Copyright 2017 - 2022 R. Thomas * Copyright 2017 - 2022 Quarkslab * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <algorithm> #include <string> #include <sstream> #include "LIEF/MachO/hash.hpp" #include "LIEF/MachO/RelocationObject.hpp" #include "pyMachO.hpp" namespace LIEF { namespace MachO { template<class T> using getter_t = T (RelocationObject::*)(void) const; template<class T> using setter_t = void (RelocationObject::*)(T); template<> void create<RelocationObject>(py::module& m) { py::class_<RelocationObject, Relocation>(m, "RelocationObject", R"delim( Class that represents a relocation presents in the MachO object file (``.o``). Usually, this kind of relocation is found in the :class:`lief.MachO.Section`. )delim") .def_property("value", static_cast<getter_t<int32_t>>(&RelocationObject::value), static_cast<setter_t<int32_t>>(&RelocationObject::value), R"delim( For **scattered** relocations, the address of the relocatable expression for the item in the file that needs to be updated if the address is changed. For relocatable expressions with the difference of two section addresses, the address from which to subtract (in mathematical terms, the minuend) is contained in the first relocation entry and the address to subtract (the subtrahend) is contained in the second relocation entry.", )delim") .def_property_readonly("is_scattered", &RelocationObject::is_scattered, "``True`` if the relocation is a scattered one") .def("__eq__", &RelocationObject::operator==) .def("__ne__", &RelocationObject::operator!=) .def("__hash__", [] (const RelocationObject& relocation) { return Hash::hash(relocation); }) .def("__str__", [] (const RelocationObject& relocation) { std::ostringstream stream; stream << relocation; std::string str = stream.str(); return str; }); } } }
lief-project/LIEF
api/python/MachO/objects/pyRelocationObject.cpp
C++
apache-2.0
2,566
/* * Copyright (C) 2016 Mkhytar Mkhoian * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.justplay1.shoppist.interactor.units; import com.justplay1.shoppist.executor.PostExecutionThread; import com.justplay1.shoppist.executor.ThreadExecutor; import com.justplay1.shoppist.models.UnitModel; import com.justplay1.shoppist.repository.UnitsRepository; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.util.Collections; import java.util.List; import static com.justplay1.shoppist.ModelUtil.createFakeUnitModel; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; public class UpdateUnitsTest { private UpdateUnits useCase; @Mock private ThreadExecutor mockThreadExecutor; @Mock private PostExecutionThread mockPostExecutionThread; @Mock private UnitsRepository mockUnitsRepository; private List<UnitModel> models; @Before public void setUp() { MockitoAnnotations.initMocks(this); useCase = new UpdateUnits(mockUnitsRepository, mockThreadExecutor, mockPostExecutionThread); models = Collections.singletonList(createFakeUnitModel()); useCase.init(models); } @Test public void updateUnitsUseCase_HappyCase() { useCase.buildUseCaseObservable().subscribe(); verify(mockUnitsRepository).update(models); verifyNoMoreInteractions(mockUnitsRepository); verifyZeroInteractions(mockThreadExecutor); verifyZeroInteractions(mockPostExecutionThread); } }
justplay1/Shoppist
domain/src/test/java/com/justplay1/shoppist/interactor/units/UpdateUnitsTest.java
Java
apache-2.0
2,190
package fi.rivermouth.talous.auth; import java.util.ArrayList; import java.util.List; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import fi.rivermouth.talous.domain.User; public class UserAuthenticationManager implements AuthenticationManager { @Override public Authentication authenticate(Authentication authentication) { List<GrantedAuthority> grantedAuths = new ArrayList<GrantedAuthority>(); grantedAuths.add(new SimpleGrantedAuthority(User.ROLE)); return new UsernamePasswordAuthenticationToken(authentication.getName(), authentication.getCredentials(), grantedAuths); } }
Rivermouth/Rivermouth-Talous
src/main/java/fi/rivermouth/talous/auth/UserAuthenticationManager.java
Java
apache-2.0
906
#!/usr/bin/python3 ################################################################################ # # Copyright 2014 Stjepan Henc <sthenc@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ import scipy.io.wavfile as wav import numpy as np import copy class Signal: # Data loaders def LoadFromFile(self, file): self.fs, self.s = wav.read(file) self.sLength, self.nChans = self.s.shape def LoadWF(self, waveform, fs): self.s = waveform self.fs = fs self.sLength, self.nChans = self.s.shape def __init__(self, *args): #signal properties self.singlePrecision = 0 self.s = np.array([]) self.fs = 44100 self.sLength = 0 self.nChans = 0 self.weightingFunction = np.hamming #FIXME #STFT properties self.S = np.array([]) self.windowLength = 60 self.nfft = 0 self.nfftUtil = 0 self.overlapRatio = 0.5 self.framesPositions = np.array([]) self.nFrames = 0 self.weightingWindow = np.array([]) self.overlap = 0 # Windowing properties self.sWin = np.array([]) self.sWeights = np.array([]) self.sWin = np.array([]) self.sWeights = np.array([]) if len(args) == 1: if type(args[0]) == type(''): # it's a filename self.LoadFromFile(args[0]) elif type(args[0] == type(self)): # copy data from other signal self.__dict__ = copy.deepcopy(args[0].__dict__) elif len(args) == 2: # args[0] is a signal, args[1] is sample freq. self.LoadWF(args(0), args(1))
sthenc/pyKAM
Signal.py
Python
apache-2.0
2,314
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.rest.action.admin.indices; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.DataStreamAlias; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; /** * The REST handler for get alias and head alias APIs. */ public class RestGetAliasesAction extends BaseRestHandler { @Override public List<Route> routes() { return List.of( new Route(GET, "/_alias"), new Route(GET, "/_aliases"), new Route(GET, "/_alias/{name}"), new Route(HEAD, "/_alias/{name}"), new Route(GET, "/{index}/_alias"), new Route(HEAD, "/{index}/_alias"), new Route(GET, "/{index}/_alias/{name}"), new Route(HEAD, "/{index}/_alias/{name}")); } @Override public String getName() { return "get_aliases_action"; } static RestResponse buildRestResponse(boolean aliasesExplicitlyRequested, String[] requestedAliases, ImmutableOpenMap<String, List<AliasMetadata>> responseAliasMap, Map<String, List<DataStreamAlias>> dataStreamAliases, XContentBuilder builder) throws Exception { final Set<String> indicesToDisplay = new HashSet<>(); final Set<String> returnedAliasNames = new HashSet<>(); for (final ObjectObjectCursor<String, List<AliasMetadata>> cursor : responseAliasMap) { for (final AliasMetadata aliasMetadata : cursor.value) { if (aliasesExplicitlyRequested) { // only display indices that have aliases indicesToDisplay.add(cursor.key); } returnedAliasNames.add(aliasMetadata.alias()); } } // compute explicitly requested aliases that have are not returned in the result final SortedSet<String> missingAliases = new TreeSet<>(); // first wildcard index, leading "-" as an alias name after this index means // that it is an exclusion int firstWildcardIndex = requestedAliases.length; for (int i = 0; i < requestedAliases.length; i++) { if (Regex.isSimpleMatchPattern(requestedAliases[i])) { firstWildcardIndex = i; break; } } for (int i = 0; i < requestedAliases.length; i++) { if (Metadata.ALL.equals(requestedAliases[i]) || Regex.isSimpleMatchPattern(requestedAliases[i]) || (i > firstWildcardIndex && requestedAliases[i].charAt(0) == '-')) { // only explicitly requested aliases will be called out as missing (404) continue; } // check if aliases[i] is subsequently excluded int j = Math.max(i + 1, firstWildcardIndex); for (; j < requestedAliases.length; j++) { if (requestedAliases[j].charAt(0) == '-') { // this is an exclude pattern if (Regex.simpleMatch(requestedAliases[j].substring(1), requestedAliases[i]) || Metadata.ALL.equals(requestedAliases[j].substring(1))) { // aliases[i] is excluded by aliases[j] break; } } } if (j == requestedAliases.length) { // explicitly requested aliases[i] is not excluded by any subsequent "-" wildcard in expression if (false == returnedAliasNames.contains(requestedAliases[i])) { // aliases[i] is not in the result set missingAliases.add(requestedAliases[i]); } } } final RestStatus status; builder.startObject(); { if (missingAliases.isEmpty()) { status = RestStatus.OK; } else { status = RestStatus.NOT_FOUND; final String message; if (missingAliases.size() == 1) { message = String.format(Locale.ROOT, "alias [%s] missing", Strings.collectionToCommaDelimitedString(missingAliases)); } else { message = String.format(Locale.ROOT, "aliases [%s] missing", Strings.collectionToCommaDelimitedString(missingAliases)); } builder.field("error", message); builder.field("status", status.getStatus()); } for (final var entry : responseAliasMap) { if (aliasesExplicitlyRequested == false || (aliasesExplicitlyRequested && indicesToDisplay.contains(entry.key))) { builder.startObject(entry.key); { builder.startObject("aliases"); { for (final AliasMetadata alias : entry.value) { AliasMetadata.Builder.toXContent(alias, builder, ToXContent.EMPTY_PARAMS); } } builder.endObject(); } builder.endObject(); } } for (var entry : dataStreamAliases.entrySet()) { builder.startObject(entry.getKey()); { builder.startObject("aliases"); { for (DataStreamAlias alias : entry.getValue()) { builder.startObject(alias.getName()); builder.endObject(); } } builder.endObject(); } builder.endObject(); } } builder.endObject(); return new BytesRestResponse(status, builder); } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { // The TransportGetAliasesAction was improved do the same post processing as is happening here. // We can't remove this logic yet to support mixed clusters. We should be able to remove this logic here // in when 8.0 becomes the new version in the master branch. final boolean namesProvided = request.hasParam("name"); final String[] aliases = request.paramAsStringArrayOrEmptyIfAll("name"); final GetAliasesRequest getAliasesRequest = new GetAliasesRequest(aliases); final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); getAliasesRequest.indices(indices); getAliasesRequest.indicesOptions(IndicesOptions.fromRequest(request, getAliasesRequest.indicesOptions())); getAliasesRequest.local(request.paramAsBoolean("local", getAliasesRequest.local())); //we may want to move this logic to TransportGetAliasesAction but it is based on the original provided aliases, which will //not always be available there (they may get replaced so retrieving request.aliases is not quite the same). return channel -> client.admin().indices().getAliases(getAliasesRequest, new RestBuilderListener<GetAliasesResponse>(channel) { @Override public RestResponse buildResponse(GetAliasesResponse response, XContentBuilder builder) throws Exception { return buildRestResponse(namesProvided, aliases, response.getAliases(), response.getDataStreamAliases(), builder); } }); } }
robin13/elasticsearch
server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java
Java
apache-2.0
9,110
# # Sample : put() : Put a single request message to a queue # require 'wmq' WMQ::QueueManager.connect(q_mgr_name: 'REID') do |qmgr| message = WMQ::Message.new message.data = 'Hello World' message.descriptor = { msg_type: WMQ::MQMT_REQUEST, reply_to_q: 'TEST.REPLY.QUEUE' } qmgr.put(q_name: 'TEST.QUEUE', message: message) end
reidmorrison/rubywmq
examples/put1_c.rb
Ruby
apache-2.0
355
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import axios from 'axios'; import { AxiosResponse } from 'axios'; export function getDruidErrorMessage(e: any) { const data: any = ((e.response || {}).data || {}); return [data.error, data.errorMessage, data.errorClass].filter(Boolean).join(' / ') || e.message; } export async function queryDruidRune(runeQuery: Record<string, any>): Promise<any> { let runeResultResp: AxiosResponse<any>; try { runeResultResp = await axios.post("/druid/v2", runeQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return runeResultResp.data; } export async function queryDruidSql(sqlQuery: Record<string, any>): Promise<any[]> { let sqlResultResp: AxiosResponse<any>; try { sqlResultResp = await axios.post("/druid/v2/sql", sqlQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return sqlResultResp.data; }
liquidm/druid
web-console/src/utils/druid-query.tsx
TypeScript
apache-2.0
1,670
using System; using System.Threading; using System.Threading.Tasks; using Moq; using Moq.Protected; using Riganti.Utils.Infrastructure.Core; using Xunit; #if EFCORE using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Riganti.Utils.Infrastructure.EntityFrameworkCore.Transactions; #else using System.Data.Entity; using Riganti.Utils.Infrastructure.EntityFramework.Transactions; #endif #if EFCORE namespace Riganti.Utils.Infrastructure.EntityFrameworkCore.Tests.UnitOfWork #else namespace Riganti.Utils.Infrastructure.EntityFramework.Tests.UnitOfWork #endif { public class EntityFrameworkUnitOfWorkTests { [Fact] public void Commit_CallCommitCoreOnlyIfHasOwnDbContext() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); var unitOfWorkChildMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkChild = unitOfWorkChildMock.Object) { unitOfWorkChild.Commit(); } unitOfWorkChildMock.Protected().Verify("CommitCore", Times.Never()); unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public void Commit_CorrectChildRequestIgnoredBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); Assert.Throws<ChildCommitPendingException>(() => { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (var unitOfWorkChild = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { unitOfWorkChild.Commit(); } } }); // test that unit of work provider keeps working after caught exception using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } } } [Fact] public void Commit_CorrectMultipleLayeredReuseParentBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var unitOfWorkParent = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 1st level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 2nd level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 3rd level, context 2 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 4th level, context 2 using (var unitOfWorkParent3 = unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 5th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 6th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } using (var unitOfWorkChild3 = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 7th level, context 3 commit requested unitOfWorkChild3.Commit(); } } // commit mandatory, context 3 commit pending unitOfWorkParent3.Commit(); } } } } // commit optional, no reusing child commit pending unitOfWorkParent.Commit(); } } [Fact] public void Commit_UOWHasNotParrent_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public void CommitAsync_UOWHasChild_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); using (var unitOfWorkChild = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext)) { unitOfWorkChild.Commit(); } unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public async Task CommitAsync_CallCommitCoreOnlyIfHasOwnDbContext() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); var unitOfWorkChildMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkChild = unitOfWorkChildMock.Object) { await unitOfWorkChild.CommitAsync(); } unitOfWorkChildMock.Protected().Verify("CommitAsyncCore", Times.Never(), new CancellationToken()); await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task CommitAsync_ThrowIfChildCommitRequestedNotFulfilledByRoot() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); await Assert.ThrowsAsync<ChildCommitPendingException>(async () => { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (var unitOfWorkChild = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { await unitOfWorkChild.CommitAsync(); } } }); // test that unit of work provider keeps working after caught exception using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } } } [Fact] public async Task CommitAsync_CorrectMultipleLayeredReuseParentBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var unitOfWorkParent = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 1st level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 2nd level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 3rd level, context 2 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 4th level, context 2 using (var unitOfWorkParent3 = unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 5th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 6th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } using (var unitOfWorkChild3 = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 7th level, context 3 commit requested await unitOfWorkChild3.CommitAsync(); } } // commit mandatory, context 3 commit pending await unitOfWorkParent3.CommitAsync(); } } } } // commit optional, no reusing child commit pending await unitOfWorkParent.CommitAsync(); } } [Fact] public async Task CommitAsync_UOWHasNotParrent_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task Commit_UOWHasChild_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); using (var unitOfWorkChild = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext)) { await unitOfWorkChild.CommitAsync(); } await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task Commit_Transaction_CallRollback() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); Assert.Equal(1, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(2, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); } throw Assert.Throws<RollbackRequestedException>(() => { uowParent.RollbackTransaction(); }); }); scopeMock.Protected().Verify("AfterRollback", Times.Once()); } [Fact] public async Task Commit_Transaction_CallRollback_UserCatch() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); try { uowParent.RollbackTransaction(); } catch (Exception) { // user catches any exceptions } } }); scopeMock.Protected().Verify("AfterRollback", Times.Once()); } [Fact] public async Task Commit_Transaction_CallRollback_OnException() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scope = unitOfWorkProvider.CreateTransactionScope(); var exceptionKey = Guid.NewGuid().ToString(); try { await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); throw new Exception(exceptionKey); }); } catch (Exception e) when (e.Message == exceptionKey) { // test exception caught, passed } } [Fact] public async Task Commit_Transaction_CallCommit() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public async Task Commit_Transaction_CallCommit_Nesting() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); Assert.Equal(1, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(2, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChildChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChildChild.CommitAsync(); } Assert.Equal(2, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(3, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); } }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public void Commit_Transaction_CallCommit_Sync() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; scope.Execute(uowParent => { Assert.True(uowParent.IsInTransaction); uowParent.Commit(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public void TryGetDbContext_UnitOfWorkRegistryHasUnitOfWork_ReturnCorrectDbContext() { var dbContext = new Mock<DbContext>().Object; Func<DbContext> dbContextFactory = () => dbContext; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWork = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext); unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWork); var uowDbContext = EntityFrameworkUnitOfWork.TryGetDbContext(unitOfWorkProvider); Assert.NotNull(uowDbContext); Assert.Same(dbContext, uowDbContext); } [Fact] public void TryGetDbContext_UnitOfWorkRegistryHasNotUnitOfWork_ReturnsNull() { var dbContext = new Mock<DbContext>().Object; Func<DbContext> dbContextFactory = () => dbContext; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var value = EntityFrameworkUnitOfWork.TryGetDbContext(unitOfWorkProvider); Assert.Null(value); } [Fact] public async Task CommitAsync_WithCancellationTokenInNestedUow_SavedChangesInParentUow() { var dbContext = new Mock<DbContext>(); Func<DbContext> dbContextFactory = () => dbContext.Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var uow = unitOfWorkProvider.Create()) { using (var nested = unitOfWorkProvider.Create()) { await nested.CommitAsync(new CancellationToken()); // verify, that method has NEVER been called dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Never); } await uow.CommitAsync(new CancellationToken()); // verify, that method has been called ONCE dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Once); } } [Fact] public async Task CommitAsync_WithoutCancellationTokenInNestedUow_SavedChangesInParentUow() { var dbContext = new Mock<DbContext>(); Func<DbContext> dbContextFactory = () => dbContext.Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var uow = unitOfWorkProvider.Create()) { using (var nested = unitOfWorkProvider.Create()) { await nested.CommitAsync(); // verify, that method has NEVER been called dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Never); } await uow.CommitAsync(); // verify, that method has been called ONCE dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Once); } } public class InMemoryDbContext : DbContext { #if EFCORE protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) { if (!optionsBuilder.IsConfigured) { optionsBuilder .UseInMemoryDatabase(Guid.NewGuid().ToString()) .ConfigureWarnings(w => w.Ignore(InMemoryEventId.TransactionIgnoredWarning)); } } #endif } private static Func<InMemoryDbContext> GetContextFactory() { return () => #if EFCORE new InMemoryDbContext(); #else new Mock<InMemoryDbContext>().Object; #endif } } }
riganti/infrastructure
src/Infrastructure/Tests/Riganti.Utils.Infrastructure.EntityFramework.Tests/UnitOfWork/EntityFrameworkUnitOfWorkTests.cs
C#
apache-2.0
25,742
<?php /** * Created by PhpStorm. * User: vjcspy * Date: 28/05/2016 * Time: 12:56 */ namespace Modules\IzCore\Repositories; use Modules\IzCore\Repositories\Object\DataObject; use Modules\IzCore\Repositories\Theme\View\AdditionViewInterface; use Pingpong\Modules\Repository; /** * Quản lý Theme * Bao gồm: * - Data của theme: Merge data từ bên ngoài * - Quản lý current theme * * @package Modules\IzCore\Repositories */ class Theme extends DataObject { /** * @var */ protected $currentPath; /** * [ *'path'=>['Modules\IzCore\Repositories\Theme\View\AdditionView] * ] * * @var array */ protected $additionData = []; /** * @var \Teepluss\Theme\Contracts\Theme */ protected $theme; /** * @var string */ protected $_currentThemeName; /** * @var string */ protected $_currentLayoutName; /** * [ *'path'=>[] * ] * * @var array */ protected $data = []; /** * @var \Pingpong\Modules\Repository */ protected $module; /** * All Asset * * @var */ private $assets; /** * @var \Modules\IzCore\Entities\Theme */ private $themeModel; /** * Theme constructor. * * @param \Pingpong\Modules\Repository $module * @param array $data */ public function __construct( Repository $module, \Modules\IzCore\Entities\Theme $themeModel, array $data = [] ) { $this->themeModel = $themeModel; $this->module = $module; parent::__construct($data); } /** * Dành cho các module ngoài muốn add data vào 1 path nào đó * * @param $path * @param $data */ public function addAdditionData($path, $data) { foreach ($data as $item) { if (!isset($this->additionData[$path])) $this->additionData[$path] = []; $this->additionData[$path][] = $item; } } /** * Merge data from another modules to current path * * @param null $path * * @return $this */ public function initAdditionData($path = null) { if (is_null($path)) $path = $this->getCurrentPath(); if (isset($this->additionData[$path])) { foreach ($this->additionData[$path] as $item) { /** @var AdditionViewInterface $item */ $item = app()->make($item); if (!isset($this->data[$path])) $this->data[$path] = []; $this->data[$path] = array_merge($this->data[$path], $item->handle()); } } return $this; } /** * @return mixed */ public function getCurrentPath() { return $this->currentPath; } /** * @param mixed $currentPath * * @return $this */ public function setCurrentPath($currentPath) { $this->currentPath = $currentPath; return $this; } /** * Set data to view of current path * * @param \Teepluss\Theme\Theme $theme * @param $path * * @return $this */ public function initViewData(\Teepluss\Theme\Theme $theme, $path) { /*Merge from another modules*/ $this->initAdditionData($path); if (isset($this->data[$path])) { foreach ($this->data[$path] as $items) { foreach ($items as $k => $item) { $theme->set($k, $item); } } } return $this; } /** * Get all assets in each theme in each module * * @return array * @throws \Exception */ public function getAssetsTree() { if (is_null($this->assets)) { $this->assets = []; $pathModules = $this->module->getPath(); $moduleDirs = scandir($pathModules); foreach ($moduleDirs as $moduleDir) { if (!in_array($moduleDir, [".", ".."])) { /*Path Config/Vendor của module hiện tại*/ $currentModuleThemePaths = $pathModules . '/' . $moduleDir . '/themes'; /*Kiểm tra xem module hiện tại có thư mục themes không*/ if (!file_exists($currentModuleThemePaths)) continue; $themePath = scandir($currentModuleThemePaths); foreach ($themePath as $themDir) { if (!in_array($themDir, [".", ".."])) { $currentThemeDir = $currentModuleThemePaths . '/' . $themDir . '/config.php'; // Check file config.php existed if (!file_exists($currentThemeDir)) continue; $themeConfig = (include $currentThemeDir); if (isset($themeConfig['assets'])) { $assetWithThemeName = []; foreach ($themeConfig['assets'] as $k => $asset) { $asset['theme_name'] = $themDir; $assetWithThemeName[$k] = $asset; } $this->assets = array_merge($this->assets, $assetWithThemeName); } } } } } } return $this->assets; } /** * Retrieve current theme name * * @return string */ public function getCurrentThemeName() { if (is_null($this->_currentThemeName)) $this->_currentThemeName = $this->getTheme()->getThemeName(); return $this->_currentThemeName; } /** * @param string $currentThemeName * * @return $this */ public function setCurrentThemeName($currentThemeName) { $this->_currentThemeName = $currentThemeName; return $this; } /** * @return string */ public function getCurrentLayoutName() { if (is_null($this->_currentLayoutName)) $this->_currentLayoutName = $this->getTheme()->getLayoutName(); return $this->_currentLayoutName; } /** * Retrive current layout use in theme * * @param string $currentLayoutName * * @return $this */ public function setCurrentLayoutName($currentLayoutName) { $this->_currentLayoutName = $currentLayoutName; return $this; } /** * @return \Teepluss\Theme\Theme * @throws \Exception */ public function getTheme() { if (is_null($this->theme)) { $this->theme = app()->make('\Teepluss\Theme\Contracts\Theme'); }; return $this->theme; } /** * Khai báo sự tồn tại của theme trong App * Sử dụng để biết view thuộc theme nào. Loại admin hay frontend * * @param $themeName * @param bool $isAdmin * * @return $this */ public function registerTheme($themeName, $isAdmin = true) { /* FIXME: need cache here */ $theme = $this->themeModel->query()->firstOrNew(['name' => $themeName]); $theme->type = $isAdmin == true ? \Modules\IzCore\Entities\Theme::TYPE_ADMIN : \Modules\IzCore\Entities\Theme::TYPE_FRONTEND; $theme->save(); return $this; } }
vjcspy/IzCore
Repositories/Theme.php
PHP
apache-2.0
7,569
package com.ejlchina.searcher.implement; import com.ejlchina.searcher.*; import com.ejlchina.searcher.bean.InheritType; import java.lang.reflect.Field; import java.util.*; import java.lang.reflect.Modifier; import java.util.concurrent.ConcurrentHashMap; /*** * 默认元信息解析器 * @author Troy.Zhou @ 2021-10-30 * @since v3.0.0 */ public class DefaultMetaResolver implements MetaResolver { private final Map<Class<?>, BeanMeta<?>> cache = new ConcurrentHashMap<>(); private SnippetResolver snippetResolver = new DefaultSnippetResolver(); private DbMapping dbMapping; public DefaultMetaResolver() { this(new DefaultDbMapping()); } public DefaultMetaResolver(DbMapping dbMapping) { this.dbMapping = dbMapping; } @Override public <T> BeanMeta<T> resolve(Class<T> beanClass) { @SuppressWarnings("unchecked") BeanMeta<T> beanMeta = (BeanMeta<T>) cache.get(beanClass); if (beanMeta != null) { return beanMeta; } synchronized (cache) { beanMeta = resolveMetadata(beanClass); cache.put(beanClass, beanMeta); return beanMeta; } } protected <T> BeanMeta<T> resolveMetadata(Class<T> beanClass) { DbMapping.Table table = dbMapping.table(beanClass); if (table == null) { throw new SearchException("The class [" + beanClass.getName() + "] can not be searched, because it can not be resolved by " + dbMapping.getClass()); } BeanMeta<T> beanMeta = new BeanMeta<>(beanClass, table.getDataSource(), snippetResolver.resolve(table.getTables()), snippetResolver.resolve(table.getJoinCond()), snippetResolver.resolve(table.getGroupBy()), table.isDistinct()); // 字段解析 Field[] fields = getBeanFields(beanClass); for (int index = 0; index < fields.length; index++) { Field field = fields[index]; if (Modifier.isStatic(field.getModifiers())) { continue; } DbMapping.Column column = dbMapping.column(beanClass, fields[index]); if (column == null) { continue; } field.setAccessible(true); SqlSnippet snippet = snippetResolver.resolve(column.getFieldSql()); // 注意:Oracle 数据库的别名不能以下划线开头 FieldMeta fieldMeta = new FieldMeta(beanMeta, field, snippet, "c_" + index, column.isConditional(), column.getOnlyOn()); beanMeta.addFieldMeta(field.getName(), fieldMeta); } if (beanMeta.getFieldCount() == 0) { throw new SearchException("[" + beanClass.getName() + "] is not a valid SearchBean, because there is no field mapping to database."); } return beanMeta; } protected Field[] getBeanFields(Class<?> beanClass) { InheritType iType = dbMapping.inheritType(beanClass); List<Field> fieldList = new ArrayList<>(); Set<String> fieldNames = new HashSet<>(); while (beanClass != Object.class) { for (Field field : beanClass.getDeclaredFields()) { String name = field.getName(); int modifiers = field.getModifiers(); if (field.isSynthetic() || Modifier.isStatic(modifiers) || Modifier.isTransient(modifiers) || fieldNames.contains(name)) { continue; } fieldList.add(field); fieldNames.add(name); } if (iType != InheritType.FIELD && iType != InheritType.ALL) { break; } beanClass = beanClass.getSuperclass(); } return fieldList.toArray(new Field[0]); } public SnippetResolver getSnippetResolver() { return snippetResolver; } public void setSnippetResolver(SnippetResolver snippetResolver) { this.snippetResolver = Objects.requireNonNull(snippetResolver); } public DbMapping getDbMapping() { return dbMapping; } public void setDbMapping(DbMapping dbMapping) { this.dbMapping = Objects.requireNonNull(dbMapping); } }
ejlchina/bean-searcher
bean-searcher/src/main/java/com/ejlchina/searcher/implement/DefaultMetaResolver.java
Java
apache-2.0
4,320
'use strict'; // https://github.com/tc39/proposal-iterator-helpers var aCallable = require('../internals/a-callable'); var anObject = require('../internals/an-object'); var getBuiltIn = require('../internals/get-built-in'); var getMethod = require('../internals/get-method'); var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF; var createMethod = function (TYPE) { var IS_TO_ARRAY = TYPE == 0; var IS_FOR_EACH = TYPE == 1; var IS_EVERY = TYPE == 2; var IS_SOME = TYPE == 3; return function (iterator, fn, target) { anObject(iterator); var Promise = getBuiltIn('Promise'); var next = aCallable(iterator.next); var index = 0; var MAPPING = fn !== undefined; if (MAPPING || !IS_TO_ARRAY) aCallable(fn); return new Promise(function (resolve, reject) { var closeIteration = function (method, argument) { try { var returnMethod = getMethod(iterator, 'return'); if (returnMethod) { return Promise.resolve(returnMethod.call(iterator)).then(function () { method(argument); }, function (error) { reject(error); }); } } catch (error2) { return reject(error2); } method(argument); }; var onError = function (error) { closeIteration(reject, error); }; var loop = function () { try { if (IS_TO_ARRAY && (index > MAX_SAFE_INTEGER) && MAPPING) { throw TypeError('The allowed number of iterations has been exceeded'); } Promise.resolve(anObject(next.call(iterator))).then(function (step) { try { if (anObject(step).done) { if (IS_TO_ARRAY) { target.length = index; resolve(target); } else resolve(IS_SOME ? false : IS_EVERY || undefined); } else { var value = step.value; if (MAPPING) { Promise.resolve(IS_TO_ARRAY ? fn(value, index) : fn(value)).then(function (result) { if (IS_FOR_EACH) { loop(); } else if (IS_EVERY) { result ? loop() : closeIteration(resolve, false); } else if (IS_TO_ARRAY) { target[index++] = result; loop(); } else { result ? closeIteration(resolve, IS_SOME || value) : loop(); } }, onError); } else { target[index++] = value; loop(); } } } catch (error) { onError(error); } }, onError); } catch (error2) { onError(error2); } }; loop(); }); }; }; module.exports = { toArray: createMethod(0), forEach: createMethod(1), every: createMethod(2), some: createMethod(3), find: createMethod(4) };
cloudfoundry-community/asp.net5-buildpack
fixtures/node_apps/angular_dotnet/ClientApp/node_modules/core-js/internals/async-iterator-iteration.js
JavaScript
apache-2.0
2,973
/*! * ${copyright} */ sap.ui.require([ "jquery.sap.global", "sap/ui/base/SyncPromise", "sap/ui/model/BindingMode", "sap/ui/model/ChangeReason", "sap/ui/model/ClientListBinding", "sap/ui/model/Context", "sap/ui/model/ContextBinding", "sap/ui/model/Filter", "sap/ui/model/MetaModel", "sap/ui/model/PropertyBinding", "sap/ui/model/Sorter", "sap/ui/model/odata/OperationMode", "sap/ui/model/odata/type/Int64", "sap/ui/model/odata/type/Raw", "sap/ui/model/odata/v4/AnnotationHelper", "sap/ui/model/odata/v4/Context", "sap/ui/model/odata/v4/lib/_Helper", "sap/ui/model/odata/v4/ODataMetaModel", "sap/ui/model/odata/v4/ODataModel", "sap/ui/model/odata/v4/ValueListType", "sap/ui/test/TestUtils", "sap/ui/thirdparty/URI" ], function (jQuery, SyncPromise, BindingMode, ChangeReason, ClientListBinding, BaseContext, ContextBinding, Filter, MetaModel, PropertyBinding, Sorter, OperationMode, Int64, Raw, AnnotationHelper, Context, _Helper, ODataMetaModel, ODataModel, ValueListType, TestUtils, URI) { /*global QUnit, sinon */ /*eslint max-nested-callbacks: 0, no-loop-func: 0, no-warning-comments: 0 */ "use strict"; // Common := com.sap.vocabularies.Common.v1 // tea_busi := com.sap.gateway.default.iwbep.tea_busi.v0001 // tea_busi_product.v0001 := com.sap.gateway.default.iwbep.tea_busi_product.v0001 // tea_busi_supplier.v0001 := com.sap.gateway.default.iwbep.tea_busi_supplier.v0001 // UI := com.sap.vocabularies.UI.v1 var mMostlyEmptyScope = { "$EntityContainer" : "empty.DefaultContainer", "$Version" : "4.0", "empty." : { "$kind" : "Schema" }, "empty.DefaultContainer" : { "$kind" : "EntityContainer" } }, sODataMetaModel = "sap.ui.model.odata.v4.ODataMetaModel", mProductScope = { "$EntityContainer" : "tea_busi_product.v0001.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_supplier/0001/$metadata" : { "$Include" : [ "tea_busi_supplier.v0001." ] } }, "$Version" : "4.0", "tea_busi_product.v0001." : { "$kind" : "Schema", "$Annotations" : { // Note: simulate result of _MetadataRequestor#read "tea_busi_product.v0001.Category/CategoryName" : { "@Common.Label" : "CategoryName from tea_busi_product.v0001." } } }, "tea_busi_product.v0001.Category" : { "$kind" : "EntityType", "CategoryName" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi_product.v0001.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi_product.v0001.Product" : { "$kind" : "EntityType", "Name" : { "$kind" : "Property", "$Type" : "Edm.String" }, "PRODUCT_2_CATEGORY" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_product.v0001.Category" }, "PRODUCT_2_SUPPLIER" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_supplier.v0001.Supplier" } } }, sSampleServiceUrl = "/sap/opu/odata4/sap/zui5_testv4/default/sap/zui5_epm_sample/0002/", mScope = { "$Annotations" : { "name.space.Id" : { "@Common.Label" : "ID" }, "tea_busi.DefaultContainer" : { "@DefaultContainer" : {} }, "tea_busi.DefaultContainer/T€AMS" : { "@T€AMS" : {} }, "tea_busi.TEAM" : { "@Common.Text" : { "$Path" : "Name" }, "@Common.Text@UI.TextArrangement" : { "$EnumMember" : "UI.TextArrangementType/TextLast" }, "@UI.Badge" : { "@Common.Label" : "Label inside", "$Type" : "UI.BadgeType", "HeadLine" : { "$Type" : "UI.DataField", "Value" : { "$Path" : "Name" } }, "Title" : { "$Type" : "UI.DataField", "Value" : { "$Path" : "Team_Id" } } }, "@UI.Badge@Common.Label" : "Best Badge Ever!", "@UI.LineItem" : [{ "@UI.Importance" : { "$EnumMember" : "UI.ImportanceType/High" }, "$Type" : "UI.DataField", "Label" : "Team ID", "Label@Common.Label" : "Team ID's Label", "Value" : { "$Path" : "Team_Id" } }] }, "tea_busi.TEAM/Team_Id" : { "@Common.Label" : "Team ID", "@Common.Text" : { "$Path" : "Name" }, "@Common.Text@UI.TextArrangement" : { "$EnumMember" : "UI.TextArrangementType/TextLast" } }, "tea_busi.Worker" : { "@UI.Facets" : [{ "$Type" : "UI.ReferenceFacet", "Target" : { // term cast "$AnnotationPath" : "@UI.LineItem" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // term cast at navigation property itself "$AnnotationPath" : "EMPLOYEE_2_TEAM@Common.Label" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // navigation property and term cast "$AnnotationPath" : "EMPLOYEE_2_TEAM/@UI.LineItem" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // type cast, navigation properties and term cast (at its type) "$AnnotationPath" : "tea_busi.TEAM/TEAM_2_EMPLOYEES/EMPLOYEE_2_TEAM/@UI.LineItem" } }], "@UI.LineItem" : [{ "$Type" : "UI.DataField", "Label" : "Team ID", "Value" : { "$Path" : "EMPLOYEE_2_TEAM/Team_Id" } }] }, "tea_busi.Worker/EMPLOYEE_2_TEAM" : { "@Common.Label" : "Employee's Team" } }, "$EntityContainer" : "tea_busi.DefaultContainer", "empty." : { "$kind" : "Schema" }, "name.space." : { "$kind" : "Schema" }, "tea_busi." : { "$kind" : "Schema", "@Schema" : {} }, "empty.Container" : { "$kind" : "EntityContainer" }, "name.space.BadContainer" : { "$kind" : "EntityContainer", "DanglingActionImport" : { "$kind" : "ActionImport", "$Action" : "not.Found" }, "DanglingFunctionImport" : { "$kind" : "FunctionImport", "$Function" : "not.Found" } }, "name.space.Broken" : { "$kind" : "Term", "$Type" : "not.Found" }, "name.space.BrokenFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "not.Found" } }], "name.space.BrokenOverloads" : [{ "$kind" : "Operation" }], "name.space.DerivedPrimitiveFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "name.space.Id" } }], "name.space.EmptyOverloads" : [], "name.space.Id" : { "$kind" : "TypeDefinition", "$UnderlyingType" : "Edm.String", "$MaxLength" : 10 }, "name.space.Term" : { // only case with a qualified name and a $Type "$kind" : "Term", "$Type" : "tea_busi.Worker" }, "name.space.OverloadedAction" : [{ "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.EQUIPMENT" }], "$ReturnType" : { "$Type" : "tea_busi.EQUIPMENT" } }, { "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.TEAM" }], "$ReturnType" : { "$Type" : "tea_busi.TEAM" } }, { // "An unbound action MAY have the same name as a bound action." "$kind" : "Action", "$ReturnType" : { "$Type" : "tea_busi.ComplexType_Salary" } }, { "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.Worker" }], "$ReturnType" : { "$Type" : "tea_busi.Worker" } }], "name.space.OverloadedFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.String" } }, { "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.String" } }], "name.space.VoidAction" : [{ "$kind" : "Action" }], "tea_busi.AcChangeManagerOfTeam" : [{ "$kind" : "Action", "$ReturnType" : { "$Type" : "tea_busi.TEAM", "@Common.Label" : "Hail to the Chief" } }], "tea_busi.ComplexType_Salary" : { "$kind" : "ComplexType", "AMOUNT" : { "$kind" : "Property", "$Type" : "Edm.Decimal" }, "CURRENCY" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi.ContainedC" : { "$kind" : "EntityType", "$Key" : ["Id"], "Id" : { "$kind" : "Property", "$Type" : "Edm.String" }, "C_2_EMPLOYEE" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.Worker" }, "C_2_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" } }, "tea_busi.ContainedS" : { "$kind" : "EntityType", "$Key" : ["Id"], "Id" : { "$kind" : "Property", "$Type" : "Edm.String" }, "S_2_C" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.ContainedC" }, "S_2_EMPLOYEE" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.Worker" } }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer", "ChangeManagerOfTeam" : { "$kind" : "ActionImport", "$Action" : "tea_busi.AcChangeManagerOfTeam" }, "EMPLOYEES" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "EMPLOYEE_2_TEAM" : "T€AMS", "EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS" }, "$Type" : "tea_busi.Worker" }, "EQUIPM€NTS" : { "$kind" : "EntitySet", "$Type" : "tea_busi.EQUIPMENT" }, "GetEmployeeMaxAge" : { "$kind" : "FunctionImport", "$Function" : "tea_busi.FuGetEmployeeMaxAge" }, "Me" : { "$kind" : "Singleton", "$NavigationPropertyBinding" : { "EMPLOYEE_2_TEAM" : "T€AMS", "EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS" }, "$Type" : "tea_busi.Worker" }, "OverloadedAction" : { "$kind" : "ActionImport", "$Action" : "name.space.OverloadedAction" }, "TEAMS" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "TEAM_2_EMPLOYEES" : "EMPLOYEES", "TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "EMPLOYEES" }, "$Type" : "tea_busi.TEAM" }, "T€AMS" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "TEAM_2_EMPLOYEES" : "EMPLOYEES" }, "$Type" : "tea_busi.TEAM" }, "VoidAction" : { "$kind" : "ActionImport", "$Action" : "name.space.VoidAction" } }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType", "$Key" : ["ID"], "ID" : { "$kind" : "Property", "$Type" : "Edm.Int32", "$Nullable" : false } }, "tea_busi.FuGetEmployeeMaxAge" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.Int16" } }], "tea_busi.TEAM" : { "$kind" : "EntityType", "$Key" : ["Team_Id"], "Team_Id" : { "$kind" : "Property", "$Type" : "name.space.Id", "$Nullable" : false, "$MaxLength" : 10 }, "Name" : { "$kind" : "Property", "$Type" : "Edm.String", "$Nullable" : false, "$MaxLength" : 40 }, "TEAM_2_EMPLOYEES" : { "$kind" : "NavigationProperty", "$isCollection" : true, "$OnDelete" : "None", "$OnDelete@Common.Label" : "None of my business", "$ReferentialConstraint" : { "foo" : "bar", "foo@Common.Label" : "Just a Gigolo" }, "$Type" : "tea_busi.Worker" }, "TEAM_2_CONTAINED_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" }, "TEAM_2_CONTAINED_C" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.ContainedC" }, // Note: "value" is a symbolic name for an operation's return type iff. it is // primitive "value" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi.Worker" : { "$kind" : "EntityType", "$Key" : ["ID"], "ID" : { "$kind" : "Property", "$Type" : "Edm.String", "$Nullable" : false, "$MaxLength" : 4 }, "AGE" : { "$kind" : "Property", "$Type" : "Edm.Int16", "$Nullable" : false }, "EMPLOYEE_2_CONTAINED_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" }, "EMPLOYEE_2_EQUIPM€NTS" : { "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.EQUIPMENT", "$Nullable" : false }, "EMPLOYEE_2_TEAM" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.TEAM", "$Nullable" : false }, "SALÃRY" : { "$kind" : "Property", "$Type" : "tea_busi.ComplexType_Salary" } }, "$$Loop" : "$$Loop/", // some endless loop "$$Term" : "name.space.Term" // replacement for any reference to the term }, oContainerData = mScope["tea_busi.DefaultContainer"], aOverloadedAction = mScope["name.space.OverloadedAction"], mSupplierScope = { "$Version" : "4.0", "tea_busi_supplier.v0001." : { "$kind" : "Schema" }, "tea_busi_supplier.v0001.Supplier" : { "$kind" : "EntityType", "Supplier_Name" : { "$kind" : "Property", "$Type" : "Edm.String" } } }, oTeamData = mScope["tea_busi.TEAM"], oTeamLineItem = mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], oWorkerData = mScope["tea_busi.Worker"], mXServiceScope = { "$Version" : "4.0", "$Annotations" : {}, // simulate ODataMetaModel#_mergeAnnotations "$EntityContainer" : "tea_busi.v0001.DefaultContainer", "$Reference" : { // Note: Do not reference tea_busi_supplier directly from here! We want to test the // special case that it is only indirectly referenced. "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] }, "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "ignore.me.", "tea_busi_product.v0001." ] }, "/empty/$metadata" : { "$Include" : [ "empty.", "I.still.haven't.found.what.I'm.looking.for." ] } }, "tea_busi.v0001." : { "$kind" : "Schema" }, "tea_busi.v0001.DefaultContainer" : { "$kind" : "EntityContainer", "EQUIPM€NTS" : { "$kind" : "EntitySet", "$Type" : "tea_busi.v0001.EQUIPMENT" } }, "tea_busi.v0001.EQUIPMENT" : { "$kind" : "EntityType", "EQUIPMENT_2_PRODUCT" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_product.v0001.Product" } } }, aAllScopes = [ mMostlyEmptyScope, mProductScope, mScope, mSupplierScope, mXServiceScope ]; /** * Checks the "get*" and "request*" methods corresponding to the named "fetch*" method, * using the given arguments. * * @param {object} oTestContext * the QUnit "this" object * @param {object} assert * the QUnit "assert" object * @param {string} sMethodName * method name "fetch*" * @param {object[]} aArguments * method arguments * @param {boolean} [bThrow=false] * whether the "get*" method throws if the promise is not fulfilled * @returns {Promise} * the "request*" method's promise */ function checkGetAndRequest(oTestContext, assert, sMethodName, aArguments, bThrow) { var oExpectation, sGetMethodName = sMethodName.replace("fetch", "get"), oMetaModel = oTestContext.oMetaModel, oReason = new Error("rejected"), oRejectedPromise = Promise.reject(oReason), sRequestMethodName = sMethodName.replace("fetch", "request"), oResult = {}, oSyncPromise = SyncPromise.resolve(oRejectedPromise); // resolve... oExpectation = oTestContext.mock(oMetaModel).expects(sMethodName).exactly(4); oExpectation = oExpectation.withExactArgs.apply(oExpectation, aArguments); oExpectation.returns(SyncPromise.resolve(oResult)); // get: fulfilled assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), oResult); // reject... oExpectation.returns(oSyncPromise); oTestContext.mock(Promise).expects("resolve") .withExactArgs(sinon.match.same(oSyncPromise)) .returns(oRejectedPromise); // return any promise (this is not unwrapping!) // request (promise still pending!) assert.strictEqual(oMetaModel[sRequestMethodName].apply(oMetaModel, aArguments), oRejectedPromise); // get: pending if (bThrow) { assert.throws(function () { oMetaModel[sGetMethodName].apply(oMetaModel, aArguments); }, new Error("Result pending")); } else { assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), undefined, "pending"); } return oSyncPromise.catch(function () { // get: rejected if (bThrow) { assert.throws(function () { oMetaModel[sGetMethodName].apply(oMetaModel, aArguments); }, oReason); } else { assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), undefined, "rejected"); } }); } /** * Returns a clone, that is a deep copy, of the given object. * * @param {object} o * any serializable object * @returns {object} * a deep copy of <code>o</code> */ function clone(o) { return JSON.parse(JSON.stringify(o)); } /** * Runs the given test for each name/value pair in the given fixture. The name is interpreted * as a path "[<sContextPath>'|']<sMetaPath>" and cut accordingly. The test is called with * an almost resolved sPath (just '|' replaced by '/'). * * @param {object} mFixture * map<string, any> * @param {function} fnTest * function(string sPath, any vResult, string sContextPath, string sMetaPath) */ function forEach(mFixture, fnTest) { var sPath; for (sPath in mFixture) { var i = sPath.indexOf("|"), sContextPath = "", sMetaPath = sPath.slice(i + 1), vValue = mFixture[sPath]; if (i >= 0) { sContextPath = sPath.slice(0, i); sPath = sContextPath + "/" + sMetaPath; } fnTest(sPath, vValue, sContextPath, sMetaPath); } } //********************************************************************************************* QUnit.module("sap.ui.model.odata.v4.ODataMetaModel", { // remember copy to ensure test isolation mOriginalScopes : clone(aAllScopes), afterEach : function (assert) { assert.deepEqual(aAllScopes, this.mOriginalScopes, "metadata unchanged"); }, /* * Allow warnings if told to; always suppress debug messages. */ allowWarnings : function (assert, bWarn) { this.mock(jQuery.sap.log).expects("isLoggable").atLeast(1) .withExactArgs(sinon.match.number, sODataMetaModel) .callsFake(function (iLogLevel) { switch (iLogLevel) { case jQuery.sap.log.Level.DEBUG: return false; case jQuery.sap.log.Level.WARNING: return bWarn; default: return true; } }); }, beforeEach : function () { var oMetadataRequestor = { read : function () { throw new Error(); } }, sUrl = "/a/b/c/d/e/$metadata"; this.oLogMock = this.mock(jQuery.sap.log); this.oLogMock.expects("warning").never(); this.oLogMock.expects("error").never(); this.oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl); this.oMetaModelMock = this.mock(this.oMetaModel); this.oModel = { reportError : function () { throw new Error("Unsupported operation"); }, resolve : ODataModel.prototype.resolve }; }, /* * Expect the given debug message with the given path, but only if debug level is on. */ expectDebug : function (bDebug, sMessage, sPath) { this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug); this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0) .withExactArgs(sMessage, sPath, sODataMetaModel); }, /* * Expects "fetchEntityContainer" to be called at least once on the current meta model, * returning a clone of the given scope. * * @param {object} mScope */ expectFetchEntityContainer : function (mScope) { mScope = clone(mScope); this.oMetaModel.validate("n/a", mScope); // fill mSchema2MetadataUrl! this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); } }); //********************************************************************************************* QUnit.test("basics", function (assert) { var sAnnotationUri = "my/annotation.xml", aAnnotationUris = [ sAnnotationUri, "uri2.xml"], oModel = {}, oMetadataRequestor = this.oMetaModel.oRequestor, sUrl = "/~/$metadata", oMetaModel; // code under test assert.strictEqual(ODataMetaModel.prototype.$$valueAsPromise, true); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl); assert.ok(oMetaModel instanceof MetaModel); assert.strictEqual(oMetaModel.aAnnotationUris, undefined); assert.ok(oMetaModel.hasOwnProperty("aAnnotationUris"), "own property aAnnotationUris"); assert.strictEqual(oMetaModel.oRequestor, oMetadataRequestor); assert.strictEqual(oMetaModel.sUrl, sUrl); assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneTime); assert.strictEqual(oMetaModel.toString(), "sap.ui.model.odata.v4.ODataMetaModel: /~/$metadata"); // code under test oMetaModel.setDefaultBindingMode(BindingMode.OneWay); assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneWay); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, aAnnotationUris); assert.strictEqual(oMetaModel.aAnnotationUris, aAnnotationUris, "arrays are passed"); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, sAnnotationUri); assert.deepEqual(oMetaModel.aAnnotationUris, [sAnnotationUri], "single annotation is wrapped"); // code under test oMetaModel = new ODataMetaModel(null, null, null, oModel); // code under test assert.strictEqual(oMetaModel.getAdapterFactoryModulePath(), "sap/ui/model/odata/v4/meta/ODataAdapterFactory"); }); //********************************************************************************************* QUnit.test("forbidden", function (assert) { assert.throws(function () { //TODO implement this.oMetaModel.bindTree(); }, new Error("Unsupported operation: v4.ODataMetaModel#bindTree")); assert.throws(function () { this.oMetaModel.getOriginalProperty(); }, new Error("Unsupported operation: v4.ODataMetaModel#getOriginalProperty")); assert.throws(function () { //TODO implement this.oMetaModel.isList(); }, new Error("Unsupported operation: v4.ODataMetaModel#isList")); assert.throws(function () { this.oMetaModel.refresh(); }, new Error("Unsupported operation: v4.ODataMetaModel#refresh")); assert.throws(function () { this.oMetaModel.setLegacySyntax(); // argument does not matter! }, new Error("Unsupported operation: v4.ODataMetaModel#setLegacySyntax")); assert.throws(function () { this.oMetaModel.setDefaultBindingMode(BindingMode.TwoWay); }); }); //********************************************************************************************* [ undefined, ["/my/annotation.xml"], ["/my/annotation.xml", "/another/annotation.xml"] ].forEach(function (aAnnotationURI) { var title = "fetchEntityContainer - " + JSON.stringify(aAnnotationURI); QUnit.test(title, function (assert) { var oRequestorMock = this.mock(this.oMetaModel.oRequestor), aReadResults, mRootScope = {}, oSyncPromise, that = this; function expectReads(bPrefetch) { oRequestorMock.expects("read") .withExactArgs(that.oMetaModel.sUrl, false, bPrefetch) .returns(Promise.resolve(mRootScope)); aReadResults = []; (aAnnotationURI || []).forEach(function (sAnnotationUrl) { var oAnnotationResult = {}; aReadResults.push(oAnnotationResult); oRequestorMock.expects("read") .withExactArgs(sAnnotationUrl, true, bPrefetch) .returns(Promise.resolve(oAnnotationResult)); }); } this.oMetaModel.aAnnotationUris = aAnnotationURI; this.oMetaModelMock.expects("_mergeAnnotations").never(); expectReads(true); // code under test assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null); // bPrefetch => no caching expectReads(true); // code under test assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null); // now test [bPrefetch=false] expectReads(); this.oMetaModelMock.expects("_mergeAnnotations") .withExactArgs(mRootScope, aReadResults); // code under test oSyncPromise = this.oMetaModel.fetchEntityContainer(); // pending assert.strictEqual(oSyncPromise.isPending(), true); // already caching assert.strictEqual(this.oMetaModel.fetchEntityContainer(), oSyncPromise); assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), oSyncPromise, "now bPrefetch makes no difference"); return oSyncPromise.then(function (mRootScope0) { assert.strictEqual(mRootScope0, mRootScope); // still caching assert.strictEqual(that.oMetaModel.fetchEntityContainer(), oSyncPromise); }); }); }); //TODO later support "$Extends" : "<13.1.2 EntityContainer Extends>" //********************************************************************************************* QUnit.test("fetchEntityContainer: _mergeAnnotations fails", function (assert) { var oError = new Error(); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs(this.oMetaModel.sUrl, false, undefined) .returns(Promise.resolve({})); this.oMetaModelMock.expects("_mergeAnnotations").throws(oError); return this.oMetaModel.fetchEntityContainer().then(function () { assert.ok(false, "unexpected success"); }, function (oError0) { assert.strictEqual(oError0, oError); }); }); //********************************************************************************************* QUnit.test("getMetaContext", function (assert) { var oMetaContext; this.oMetaModelMock.expects("getMetaPath") .withExactArgs("/Foo/-1/bar") .returns("/Foo/bar"); // code under test oMetaContext = this.oMetaModel.getMetaContext("/Foo/-1/bar"); assert.strictEqual(oMetaContext.getModel(), this.oMetaModel); assert.strictEqual(oMetaContext.getPath(), "/Foo/bar"); }); //********************************************************************************************* QUnit.test("getMetaPath", function (assert) { var sMetaPath = {}, sPath = {}; this.mock(_Helper).expects("getMetaPath") .withExactArgs(sinon.match.same(sPath)).returns(sMetaPath); assert.strictEqual(this.oMetaModel.getMetaPath(sPath), sMetaPath); }); //********************************************************************************************* forEach({ // absolute path "/" : "/", "/foo/bar|/" : "/", // context is ignored // relative path "" : undefined, // w/o context --> important for MetaModel#createBindingContext etc. "|foo/bar" : undefined, // w/o context "/|" : "/", "/|foo/bar" : "/foo/bar", "/foo|bar" : "/foo/bar", "/foo/bar|" : "/foo/bar", "/foo/|bar" : "/foo/bar", // trailing slash is preserved "/foo/bar/" : "/foo/bar/", "/foo|bar/" : "/foo/bar/", // relative path that starts with a dot "/foo/bar|./" : "/foo/bar/", "/foo|./bar/" : "/foo/bar/", "/foo/|./bar/" : "/foo/bar/", // annotations "/foo|@bar" : "/foo@bar", "/foo/|@bar" : "/foo/@bar", "/foo|./@bar" : "/foo/@bar", "/foo/|./@bar" : "/foo/@bar", // technical properties "/foo|$kind" : "/foo/$kind", "/foo/|$kind" : "/foo/$kind", "/foo|./$kind" : "/foo/$kind", "/foo/|./$kind" : "/foo/$kind" }, function (sPath, sResolvedPath, sContextPath, sMetaPath) { QUnit.test("resolve: " + sContextPath + " > " + sMetaPath, function (assert) { var oContext = sContextPath && this.oMetaModel.getContext(sContextPath); assert.strictEqual(this.oMetaModel.resolve(sMetaPath, oContext), sResolvedPath); }); }); //TODO make sure that Context objects are only created for absolute paths?! //********************************************************************************************* [".bar", ".@bar", ".$kind"].forEach(function (sPath) { QUnit.test("resolve: unsupported relative path " + sPath, function (assert) { var oContext = this.oMetaModel.getContext("/foo"); assert.raises(function () { this.oMetaModel.resolve(sPath, oContext); }, new Error("Unsupported relative path: " + sPath)); }); }); //********************************************************************************************* QUnit.test("resolve: undefined", function (assert) { assert.strictEqual( this.oMetaModel.resolve(undefined, this.oMetaModel.getContext("/")), "/"); }); //********************************************************************************************* //TODO better map meta model path to pure JSON path (look up inside JsonModel)? // what about @sapui.name then, which requires a literal as expected result? // --> we could distinguish "/<path>" from "<literal>" forEach({ // "JSON" drill-down ---------------------------------------------------------------------- "/$EntityContainer" : "tea_busi.DefaultContainer", "/tea_busi./$kind" : "Schema", "/tea_busi.DefaultContainer/$kind" : "EntityContainer", // trailing slash: object vs. name -------------------------------------------------------- "/" : oContainerData, "/$EntityContainer/" : oContainerData, "/T€AMS/" : oTeamData, "/T€AMS/$Type/" : oTeamData, // scope lookup ("17.3 QualifiedName") ---------------------------------------------------- "/$EntityContainer/$kind" : "EntityContainer", "/$EntityContainer/T€AMS/$Type" : "tea_busi.TEAM", "/$EntityContainer/T€AMS/$Type/Team_Id" : oTeamData.Team_Id, // "17.3 QualifiedName", e.g. type cast --------------------------------------------------- "/tea_busi." : mScope["tea_busi."], // access to schema "/tea_busi.DefaultContainer/EMPLOYEES/tea_busi.Worker/AGE" : oWorkerData.AGE, // implicit $Type insertion --------------------------------------------------------------- "/T€AMS/Team_Id" : oTeamData.Team_Id, "/T€AMS/TEAM_2_EMPLOYEES" : oTeamData.TEAM_2_EMPLOYEES, "/T€AMS/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE, // scope lookup, then implicit $Type insertion! "/$$Term/AGE" : oWorkerData.AGE, // "17.2 SimpleIdentifier": lookup inside current schema child ---------------------------- "/T€AMS" : oContainerData["T€AMS"], "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/" : oWorkerData, "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/$Type" : "tea_busi.Worker", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE, // operations ----------------------------------------------------------------------------- "/OverloadedAction" : oContainerData["OverloadedAction"], "/OverloadedAction/$Action" : "name.space.OverloadedAction", "/ChangeManagerOfTeam/" : oTeamData, //TODO mScope[mScope["..."][0].$ReturnType.$Type] is where the next OData simple identifier // would live in case of entity/complex type, but we would like to avoid warnings for // primitive types - how to tell the difference? // "/GetEmployeeMaxAge/" : "Edm.Int16", // Note: "value" is a symbolic name for the whole return type iff. it is primitive "/GetEmployeeMaxAge/value" : mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType, "/GetEmployeeMaxAge/value/$Type" : "Edm.Int16", // path may continue! "/tea_busi.FuGetEmployeeMaxAge/value" : mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType, "/name.space.DerivedPrimitiveFunction/value" //TODO merge facets of return type and type definition?! : mScope["name.space.DerivedPrimitiveFunction"][0].$ReturnType, "/ChangeManagerOfTeam/value" : oTeamData.value, // action overloads ----------------------------------------------------------------------- //TODO @$ui5.overload: support for split segments? etc. "/OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([aOverloadedAction[2]]), "/OverloadedAction/@$ui5.overload/0" : aOverloadedAction[2], // Note: trailing slash does not make a difference in "JSON" drill-down "/OverloadedAction/@$ui5.overload/0/$ReturnType/" : aOverloadedAction[2].$ReturnType, "/OverloadedAction/@$ui5.overload/0/$ReturnType/$Type" : "tea_busi.ComplexType_Salary", "/OverloadedAction/" : mScope["tea_busi.ComplexType_Salary"], "/name.space.OverloadedAction" : aOverloadedAction, "/T€AMS/NotFound/name.space.OverloadedAction" : aOverloadedAction, "/name.space.OverloadedAction/1" : aOverloadedAction[1], "/OverloadedAction/$Action/1" : aOverloadedAction[1], "/OverloadedAction/@$ui5.overload/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT, "/OverloadedAction/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT, "/T€AMS/name.space.OverloadedAction/Team_Id" : oTeamData.Team_Id, "/T€AMS/name.space.OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([aOverloadedAction[1]]), "/name.space.OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([]), // only "Action" and "Function" is expected as $kind, but others are not filtered out! "/name.space.BrokenOverloads" : sinon.match.array.deepEquals(mScope["name.space.BrokenOverloads"]), // annotations ---------------------------------------------------------------------------- "/@DefaultContainer" : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/tea_busi.DefaultContainer@DefaultContainer" : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/tea_busi.DefaultContainer/@DefaultContainer" // w/o $Type, slash makes no difference! : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/$EntityContainer@DefaultContainer" // Note: we could change this : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/$EntityContainer/@DefaultContainer" // w/o $Type, slash makes no difference! : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/T€AMS/$Type/@UI.LineItem" : oTeamLineItem, "/T€AMS/@UI.LineItem" : oTeamLineItem, "/T€AMS/@UI.LineItem/0/Label" : oTeamLineItem[0].Label, "/T€AMS/@UI.LineItem/0/@UI.Importance" : oTeamLineItem[0]["@UI.Importance"], "/T€AMS@T€AMS" : mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"]["@T€AMS"], "/T€AMS/@Common.Text" : mScope.$Annotations["tea_busi.TEAM"]["@Common.Text"], "/T€AMS/@Common.Text@UI.TextArrangement" : mScope.$Annotations["tea_busi.TEAM"]["@Common.Text@UI.TextArrangement"], "/T€AMS/Team_Id@Common.Text" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], "/T€AMS/Team_Id@Common.Text@UI.TextArrangement" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text@UI.TextArrangement"], "/tea_busi./@Schema" : mScope["tea_busi."]["@Schema"], // inline annotations "/ChangeManagerOfTeam/$Action/0/$ReturnType/@Common.Label" : "Hail to the Chief", "/T€AMS/TEAM_2_EMPLOYEES/$OnDelete@Common.Label" : "None of my business", "/T€AMS/TEAM_2_EMPLOYEES/$ReferentialConstraint/foo@Common.Label" : "Just a Gigolo", "/T€AMS/@UI.LineItem/0/Label@Common.Label" : "Team ID's Label", "/T€AMS/@UI.Badge@Common.Label" : "Best Badge Ever!", // annotation of annotation "/T€AMS/@UI.Badge/@Common.Label" : "Label inside", // annotation of record // "@" to access to all annotations, e.g. for iteration "/T€AMS@" : mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"], "/T€AMS/@" : mScope.$Annotations["tea_busi.TEAM"], "/T€AMS/Team_Id@" : mScope.$Annotations["tea_busi.TEAM/Team_Id"], // "14.5.12 Expression edm:Path" // Note: see integration test "{field>Value/$Path@com.sap.vocabularies.Common.v1.Label}" "/T€AMS/@UI.LineItem/0/Value/$Path@Common.Text" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], "/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label" : mScope.$Annotations["name.space.Id"]["@Common.Label"], "/EMPLOYEES/@UI.LineItem/0/Value/$Path@Common.Text" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], // "14.5.2 Expression edm:AnnotationPath" "/EMPLOYEES/@UI.Facets/0/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.Worker"]["@UI.LineItem"], "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.Worker/EMPLOYEE_2_TEAM"]["@Common.Label"], "/EMPLOYEES/@UI.Facets/2/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], "/EMPLOYEES/@UI.Facets/3/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], // @sapui.name ---------------------------------------------------------------------------- "/@sapui.name" : "tea_busi.DefaultContainer", "/tea_busi.DefaultContainer@sapui.name" : "tea_busi.DefaultContainer", "/tea_busi.DefaultContainer/@sapui.name" : "tea_busi.DefaultContainer", // no $Type here! "/$EntityContainer/@sapui.name" : "tea_busi.DefaultContainer", "/T€AMS@sapui.name" : "T€AMS", "/T€AMS/@sapui.name" : "tea_busi.TEAM", "/T€AMS/Team_Id@sapui.name" : "Team_Id", "/T€AMS/TEAM_2_EMPLOYEES@sapui.name" : "TEAM_2_EMPLOYEES", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/@sapui.name" : "tea_busi.Worker", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/AGE@sapui.name" : "AGE", "/T€AMS@T€AMS@sapui.name" : "@T€AMS", "/T€AMS@/@T€AMS@sapui.name" : "@T€AMS", "/T€AMS@T€AMS/@sapui.name" : "@T€AMS", // no $Type inside @T€AMS, / makes no difference! "/T€AMS@/@T€AMS/@sapui.name" : "@T€AMS", // dito "/T€AMS/@UI.LineItem/0/@UI.Importance/@sapui.name" : "@UI.Importance", // in "JSON" mode "/T€AMS/Team_Id@/@Common.Label@sapui.name" : "@Common.Label" // avoid indirection here! }, function (sPath, vResult) { QUnit.test("fetchObject: " + sPath, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); if (vResult && typeof vResult === "object" && "test" in vResult) { // Sinon.JS matcher assert.ok(vResult.test(oSyncPromise.getResult()), vResult); } else { assert.strictEqual(oSyncPromise.getResult(), vResult); } // self-guard to avoid that a complex right-hand side evaluates to undefined assert.notStrictEqual(vResult, undefined, "use this test for defined results only!"); }); }); //TODO annotations at enum member ".../<10.2.1 Member Name>@..." (Note: "<10.2.2 Member Value>" // might be a string! Avoid indirection!) //TODO special cases where inline and external targeting annotations need to be merged! //TODO support also external targeting from a different schema! //TODO MySchema.MyFunction/MyParameter --> requires search in array?! //TODO $count? //TODO "For annotations targeting a property of an entity type or complex type, the path // expression is evaluated starting at the outermost entity type or complex type named in the // Target of the enclosing edm:Annotations element, i.e. an empty path resolves to the // outermost type, and the first segment of a non-empty path MUST be a property or navigation // property of the outermost type, a type cast, or a term cast." --> consequences for us? //********************************************************************************************* [ // "JSON" drill-down ---------------------------------------------------------------------- "/$missing", "/tea_busi.DefaultContainer/$missing", "/tea_busi.DefaultContainer/missing", // "17.2 SimpleIdentifier" treated like any property "/tea_busi.FuGetEmployeeMaxAge/0/tea_busi.FuGetEmployeeMaxAge", // "0" switches to JSON "/tea_busi.TEAM/$Key/this.is.missing", "/tea_busi.Worker/missing", // entity container (see above) treated like any schema child // scope lookup ("17.3 QualifiedName") ---------------------------------------------------- "/$EntityContainer/$missing", "/$EntityContainer/missing", // implicit $Type insertion --------------------------------------------------------------- "/T€AMS/$Key", // avoid $Type insertion for following $ segments "/T€AMS/missing", "/T€AMS/$missing", // annotations ---------------------------------------------------------------------------- "/tea_busi.Worker@missing", "/tea_busi.Worker/@missing", // "@" to access to all annotations, e.g. for iteration "/tea_busi.Worker/@/@missing", // operations ----------------------------------------------------------------------------- "/VoidAction/" ].forEach(function (sPath) { QUnit.test("fetchObject: " + sPath + " --> undefined", function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* QUnit.test("fetchObject: Invalid relative path w/o context", function (assert) { var sMetaPath = "some/relative/path", oSyncPromise; this.oLogMock.expects("error").withExactArgs("Invalid relative path w/o context", sMetaPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sMetaPath, null); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), null); }); //********************************************************************************************* ["/empty.Container/@", "/T€AMS/Name@"].forEach(function (sPath) { QUnit.test("fetchObject returns {} (anonymous empty object): " + sPath, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), {}); // strictEqual would not work! }); }); //********************************************************************************************* QUnit.test("fetchObject without $Annotations", function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mMostlyEmptyScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject("/@DefaultContainer"); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); // strictEqual would not work! }); //TODO if no annotations exist for an external target, avoid {} internally unless "@" is used? //********************************************************************************************* [false, true].forEach(function (bWarn) { forEach({ "/$$Loop/" : "Invalid recursion at /$$Loop", // Invalid segment (warning) ---------------------------------------------------------- "//$Foo" : "Invalid empty segment", "/tea_busi./$Annotations" : "Invalid segment: $Annotations", // entrance forbidden! // Unknown ... ------------------------------------------------------------------------ "/not.Found" : "Unknown qualified name not.Found", "/Me/not.Found" : "Unknown qualified name not.Found", // no "at /.../undefined"! "/not.Found@missing" : "Unknown qualified name not.Found", "/." : "Unknown child . of tea_busi.DefaultContainer", "/Foo" : "Unknown child Foo of tea_busi.DefaultContainer", "/$EntityContainer/$kind/" : "Unknown child EntityContainer" + " of tea_busi.DefaultContainer at /$EntityContainer/$kind", // implicit $Action, $Function, $Type insertion "/name.space.BadContainer/DanglingActionImport/" : "Unknown qualified name not.Found" + " at /name.space.BadContainer/DanglingActionImport/$Action", "/name.space.BadContainer/DanglingFunctionImport/" : "Unknown qualified name not.Found" + " at /name.space.BadContainer/DanglingFunctionImport/$Function", "/name.space.Broken/" : "Unknown qualified name not.Found at /name.space.Broken/$Type", "/name.space.BrokenFunction/" : "Unknown qualified name not.Found" + " at /name.space.BrokenFunction/0/$ReturnType/$Type", //TODO align with "/GetEmployeeMaxAge/" : "Edm.Int16" "/GetEmployeeMaxAge/@sapui.name" : "Unknown qualified name Edm.Int16" + " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type", "/GetEmployeeMaxAge/value/@sapui.name" : "Unknown qualified name Edm.Int16" + " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type", // implicit scope lookup "/name.space.Broken/$Type/" : "Unknown qualified name not.Found at /name.space.Broken/$Type", "/tea_busi.DefaultContainer/$kind/@sapui.name" : "Unknown child EntityContainer" + " of tea_busi.DefaultContainer at /tea_busi.DefaultContainer/$kind", // Unsupported path before @sapui.name ------------------------------------------------ "/$EntityContainer@sapui.name" : "Unsupported path before @sapui.name", "/tea_busi.FuGetEmployeeMaxAge/0@sapui.name" : "Unsupported path before @sapui.name", "/tea_busi.TEAM/$Key/not.Found/@sapui.name" : "Unsupported path before @sapui.name", "/GetEmployeeMaxAge/value@sapui.name" : "Unsupported path before @sapui.name", // Unsupported path after @sapui.name ------------------------------------------------- "/@sapui.name/foo" : "Unsupported path after @sapui.name", "/$EntityContainer/T€AMS/@sapui.name/foo" : "Unsupported path after @sapui.name", // Unsupported path after @@... ------------------------------------------------------- "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@this.is.ignored/foo" : "Unsupported path after @@this.is.ignored", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/@@this.is.ignored@foo" : "Unsupported path after @@this.is.ignored", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@this.is.ignored@sapui.name" : "Unsupported path after @@this.is.ignored", // ...is not a function but... -------------------------------------------------------- "/@@sap.ui.model.odata.v4.AnnotationHelper.invalid" : "sap.ui.model.odata.v4.AnnotationHelper.invalid is not a function but: undefined", "/@@sap.ui.model.odata.v4.AnnotationHelper" : "sap.ui.model.odata.v4.AnnotationHelper is not a function but: " + sap.ui.model.odata.v4.AnnotationHelper, // Unsupported overloads -------------------------------------------------------------- "/name.space.EmptyOverloads/" : "Unsupported overloads", "/name.space.OverloadedAction/" : "Unsupported overloads", "/name.space.OverloadedFunction/" : "Unsupported overloads" }, function (sPath, sWarning) { QUnit.test("fetchObject fails: " + sPath + ", warn = " + bWarn, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs(sWarning, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* [false, true].forEach(function (bDebug) { forEach({ // Invalid segment (debug) ------------------------------------------------------------ "/$Foo/@bar" : "Invalid segment: @bar", "/$Foo/$Bar" : "Invalid segment: $Bar", "/$Foo/$Bar/$Baz" : "Invalid segment: $Bar", "/$EntityContainer/T€AMS/Team_Id/$MaxLength/." : "Invalid segment: .", "/$EntityContainer/T€AMS/Team_Id/$Nullable/." : "Invalid segment: .", "/$EntityContainer/T€AMS/Team_Id/NotFound/Invalid" : "Invalid segment: Invalid" }, function (sPath, sMessage) { QUnit.test("fetchObject fails: " + sPath + ", debug = " + bDebug, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug); this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0) .withExactArgs(sMessage, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* [ "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/" ].forEach(function (sPath) { QUnit.test("fetchObject: " + sPath + "@@...isMultiple", function (assert) { var oContext, oInput, fnIsMultiple = this.mock(AnnotationHelper).expects("isMultiple"), oResult = {}, oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput .returns(SyncPromise.resolve(mScope)); oInput = this.oMetaModel.getObject(sPath); fnIsMultiple .withExactArgs(oInput, sinon.match({ context : sinon.match.object, schemaChildName : "tea_busi.Worker" })).returns(oResult); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath + "@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple"); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), oResult); oContext = fnIsMultiple.args[0][1].context; assert.ok(oContext instanceof BaseContext); assert.strictEqual(oContext.getModel(), this.oMetaModel); assert.strictEqual(oContext.getPath(), sPath); assert.strictEqual(oContext.getObject(), oInput); }); }); //********************************************************************************************* (function () { var sPath, sPathPrefix, mPathPrefix2SchemaChildName = { "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath" : "tea_busi.Worker", "/T€AMS/@UI.LineItem/0/Value/$Path@Common.Label" : "tea_busi.TEAM", "/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label" : "name.space.Id" }, sSchemaChildName; for (sPathPrefix in mPathPrefix2SchemaChildName) { sPath = sPathPrefix + "@@.computedAnnotation"; sSchemaChildName = mPathPrefix2SchemaChildName[sPathPrefix]; QUnit.test("fetchObject: " + sPath, function (assert) { var fnComputedAnnotation, oContext, oInput, oResult = {}, oScope = { computedAnnotation : function () {} }, oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput .returns(SyncPromise.resolve(mScope)); oInput = this.oMetaModel.getObject(sPathPrefix); fnComputedAnnotation = this.mock(oScope).expects("computedAnnotation"); fnComputedAnnotation .withExactArgs(oInput, sinon.match({ context : sinon.match.object, schemaChildName : sSchemaChildName })).returns(oResult); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath, null, {scope : oScope}); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), oResult); oContext = fnComputedAnnotation.args[0][1].context; assert.ok(oContext instanceof BaseContext); assert.strictEqual(oContext.getModel(), this.oMetaModel); assert.strictEqual(oContext.getPath(), sPathPrefix); assert.strictEqual(oContext.getObject(), oInput); }); } }()); //********************************************************************************************* [false, true].forEach(function (bWarn) { QUnit.test("fetchObject: " + "...@@... throws", function (assert) { var oError = new Error("This call failed intentionally"), sPath = "/@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple", oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.mock(AnnotationHelper).expects("isMultiple") .throws(oError); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0).withExactArgs( "Error calling sap.ui.model.odata.v4.AnnotationHelper.isMultiple: " + oError, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* [false, true].forEach(function (bDebug) { QUnit.test("fetchObject: cross-service reference, bDebug = " + bDebug, function (assert) { var mClonedProductScope = clone(mProductScope), aPromises = [], oRequestorMock = this.mock(this.oMetaModel.oRequestor), that = this; /* * Expect the given debug message with the given path. */ function expectDebug(sMessage, sPath) { that.expectDebug(bDebug, sMessage, sPath); } /* * Code under test: ODataMetaModel#fetchObject with the given path should yield the * given expected result. */ function codeUnderTest(sPath, vExpectedResult) { aPromises.push(that.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.strictEqual(vResult, vExpectedResult); })); } this.expectFetchEntityContainer(mXServiceScope); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata") .returns(Promise.resolve(mClonedProductScope)); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_supplier/0001/$metadata") .returns(Promise.resolve(mSupplierScope)); oRequestorMock.expects("read") .withExactArgs("/empty/$metadata") .returns(Promise.resolve(mMostlyEmptyScope)); expectDebug("Namespace tea_busi_product.v0001. found in $Include" + " of /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Reading /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name", mClonedProductScope["tea_busi_product.v0001.Product"].Name); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName", mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName); expectDebug("Waiting for tea_busi_product.v0001.", "/tea_busi_product.v0001.Category/CategoryName"); codeUnderTest("/tea_busi_product.v0001.Category/CategoryName", mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName); expectDebug("Waiting for tea_busi_product.v0001.", "/tea_busi_product.v0001.Category/CategoryName@Common.Label"); codeUnderTest("/tea_busi_product.v0001.Category/CategoryName@Common.Label", "CategoryName from tea_busi_product.v0001."); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name", mSupplierScope["tea_busi_supplier.v0001.Supplier"].Supplier_Name); expectDebug("Namespace empty. found in $Include of /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Reading /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Waiting for empty.", "/empty.DefaultContainer"); codeUnderTest("/empty.DefaultContainer", mMostlyEmptyScope["empty.DefaultContainer"]); // Note: these are logged asynchronously! expectDebug("Including tea_busi_product.v0001." + " from /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Including empty. from /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Namespace tea_busi_supplier.v0001. found in $Include" + " of /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Reading /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Waiting for tea_busi_supplier.v0001." + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Including tea_busi_supplier.v0001." + " from /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); return Promise.all(aPromises); }); }); //TODO Decision: It is an error if a namespace is referenced multiple times with different URIs. // This should be checked even when load-on-demand is used. // (It should not even be included multiple times with the same URI!) //TODO Check that no namespace is included which is already present! //TODO API to load "transitive closure" //TODO support for sync. XML Templating //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: missing cross-service reference, bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var sPath = "/not.found", oSyncPromise; this.expectFetchEntityContainer(mMostlyEmptyScope); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name not.found", sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: referenced metadata does not contain included schema, bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var sSchemaName = "I.still.haven't.found.what.I'm.looking.for.", sQualifiedName = sSchemaName + "Child", sPath = "/" + sQualifiedName; this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs("/empty/$metadata") .returns(Promise.resolve(mMostlyEmptyScope)); this.allowWarnings(assert, bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("/empty/$metadata does not contain " + sSchemaName, sPath, sODataMetaModel); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name " + sQualifiedName, sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.deepEqual(vResult, undefined); }); }); }); //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: cross-service reference, respect $Include; bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var mScope0 = { "$Version" : "4.0", "$Reference" : { "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "not.found.", "tea_busi_product.v0001.", "tea_busi_supplier.v0001." ] } } }, mReferencedScope = { "$Version" : "4.0", "must.not.be.included." : { "$kind" : "Schema" }, "tea_busi_product.v0001." : { "$kind" : "Schema" }, "tea_busi_supplier.v0001." : { "$kind" : "Schema" } }, oRequestorMock = this.mock(this.oMetaModel.oRequestor), that = this; this.expectFetchEntityContainer(mScope0); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata") .returns(Promise.resolve(mReferencedScope)); this.allowWarnings(assert, bWarn); // code under test return this.oMetaModel.fetchObject("/tea_busi_product.v0001.").then(function (vResult) { var oSyncPromise; assert.strictEqual(vResult, mReferencedScope["tea_busi_product.v0001."]); assert.ok(that.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."] ["/a/default/iwbep/tea_busi_product/0001/$metadata"], "document marked as read"); that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name must.not.be.included.", "/must.not.be.included.", sODataMetaModel); assert.strictEqual(that.oMetaModel.getObject("/must.not.be.included."), undefined, "must not include schemata which are not mentioned in edmx:Include"); assert.strictEqual(that.oMetaModel.getObject("/tea_busi_supplier.v0001."), mReferencedScope["tea_busi_supplier.v0001."]); // now check that "not.found." does not trigger another read(), // does finish synchronously and logs a warning that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata" + " does not contain not.found.", "/not.found.", sODataMetaModel); that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name not.found.", "/not.found.", sODataMetaModel); // code under test oSyncPromise = that.oMetaModel.fetchObject("/not.found."); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - validation failure", function (assert) { var oError = new Error(), mReferencedScope = {}, sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata"; this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read").withExactArgs(sUrl) .returns(Promise.resolve(mReferencedScope)); this.oMetaModelMock.expects("validate") .withExactArgs(sUrl, mReferencedScope) .throws(oError); return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () { assert.ok(false); }, function (oError0) { assert.strictEqual(oError0, oError); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - document loaded from different URI", function (assert) { var sMessage = "A schema cannot span more than one document: schema is referenced by" + " following URLs: /a/default/iwbep/tea_busi_product/0001/$metadata," + " /second/reference", sSchema = "tea_busi_product.v0001."; this.expectFetchEntityContainer(mXServiceScope); this.oLogMock.expects("error") .withExactArgs(sMessage, sSchema, sODataMetaModel); // simulate 2 references for a schema this.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."]["/second/reference"] = false; // code under test return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () { assert.ok(false); }, function (oError0) { assert.strictEqual(oError0.message, sSchema + ": " + sMessage); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - duplicate include", function (assert) { var oRequestorMock = this.mock(this.oMetaModel.oRequestor), // root service includes both A and B, A also includes B mScope0 = { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A." ] }, "/B/$metadata" : { "$Include" : [ "B." ] } } }, mScopeA = { "$Version" : "4.0", "$Reference" : { "/B/$metadata" : { "$Include" : [ "B.", "B.B." // includes additional namespace from already read document ] } }, "A." : { "$kind" : "Schema" } }, mScopeB = { "$Version" : "4.0", "B." : { "$kind" : "Schema" }, "B.B." : { "$kind" : "Schema" } }, that = this; this.expectFetchEntityContainer(mScope0); oRequestorMock.expects("read").withExactArgs("/A/$metadata") .returns(Promise.resolve(mScopeA)); oRequestorMock.expects("read").withExactArgs("/B/$metadata") .returns(Promise.resolve(mScopeB)); return this.oMetaModel.fetchObject("/B.") .then(function (vResult) { assert.strictEqual(vResult, mScopeB["B."]); // code under test - we must not overwrite our "$ui5.read" promise! return that.oMetaModel.fetchObject("/A.") .then(function (vResult) { assert.strictEqual(vResult, mScopeA["A."]); // Note: must not trigger read() again! return that.oMetaModel.fetchObject("/B.B.") .then(function (vResult) { assert.strictEqual(vResult, mScopeB["B.B."]); }); }); }); }); //TODO Implement consistency checks that the same namespace is always included from the same // reference URI, no matter which referencing document. //********************************************************************************************* [undefined, false, true].forEach(function (bSupportReferences) { var sTitle = "fetchObject: cross-service reference - supportReferences: " + bSupportReferences; QUnit.test(sTitle, function (assert) { var mClonedProductScope = clone(mProductScope), oModel = new ODataModel({ // code under test serviceUrl : "/a/b/c/d/e/", supportReferences : bSupportReferences, synchronizationMode : "None" }), sPath = "/tea_busi_product.v0001.Product", sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata"; this.oMetaModel = oModel.getMetaModel(); this.oMetaModelMock = this.mock(this.oMetaModel); bSupportReferences = bSupportReferences !== false; // default is true! assert.strictEqual(this.oMetaModel.bSupportReferences, bSupportReferences); this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read") .exactly(bSupportReferences ? 1 : 0) .withExactArgs(sUrl) .returns(Promise.resolve(mClonedProductScope)); this.allowWarnings(assert, true); this.oLogMock.expects("warning").exactly(bSupportReferences ? 0 : 1) .withExactArgs("Unknown qualified name " + sPath.slice(1), sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.strictEqual(vResult, bSupportReferences ? mClonedProductScope["tea_busi_product.v0001.Product"] : undefined); }); }); }); //********************************************************************************************* QUnit.test("getObject, requestObject", function (assert) { return checkGetAndRequest(this, assert, "fetchObject", ["sPath", {/*oContext*/}]); }); //********************************************************************************************* [{ $Type : "Edm.Boolean" },{ $Type : "Edm.Byte" }, { $Type : "Edm.Date" }, { $Type : "Edm.DateTimeOffset" },{ $Precision : 7, $Type : "Edm.DateTimeOffset", __constraints : {precision : 7} }, { $Type : "Edm.Decimal" }, { $Precision : 20, $Scale : 5, $Type : "Edm.Decimal", __constraints : {maximum : "100.00", maximumExclusive : true, minimum : "0.00", precision : 20, scale : 5} }, { $Precision : 20, $Scale : "variable", $Type : "Edm.Decimal", __constraints : {precision : 20, scale : Infinity} }, { $Type : "Edm.Double" }, { $Type : "Edm.Guid" }, { $Type : "Edm.Int16" }, { $Type : "Edm.Int32" }, { $Type : "Edm.Int64" }, { $Type : "Edm.SByte" }, { $Type : "Edm.Single" }, { $Type : "Edm.Stream" }, { $Type : "Edm.String" }, { $MaxLength : 255, $Type : "Edm.String", __constraints : {maxLength : 255} }, { $Type : "Edm.String", __constraints : {isDigitSequence : true} }, { $Type : "Edm.TimeOfDay" }, { $Precision : 3, $Type : "Edm.TimeOfDay", __constraints : {precision : 3} }].forEach(function (oProperty0) { // Note: take care not to modify oProperty0, clone it first! [false, true].forEach(function (bNullable) { // Note: JSON.parse(JSON.stringify(...)) cannot clone Infinity! var oProperty = jQuery.extend(true, {}, oProperty0), oConstraints = oProperty.__constraints; delete oProperty.__constraints; if (!bNullable) { oProperty.$Nullable = false; oConstraints = oConstraints || {}; oConstraints.nullable = false; } QUnit.test("fetchUI5Type: " + JSON.stringify(oProperty), function (assert) { // Note: just spy on fetchModule() to make sure that the real types are used // which check correctness of constraints var fnFetchModuleSpy = this.spy(this.oMetaModel, "fetchModule"), sPath = "/EMPLOYEES/0/ENTRYDATE", oMetaContext = this.oMetaModel.getMetaContext(sPath), that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, oMetaContext) .returns(SyncPromise.resolve(oProperty)); if (oProperty.$Type === "Edm.String") { // simulate annotation for strings this.oMetaModelMock.expects("fetchObject") .withExactArgs("@com.sap.vocabularies.Common.v1.IsDigitSequence", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.isDigitSequence)); } else if (oProperty.$Type === "Edm.Decimal") { // simulate annotation for decimals this.oMetaModelMock.expects("fetchObject") .withExactArgs("@Org.OData.Validation.V1.Minimum/$Decimal", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.minimum)); this.oMetaModelMock.expects("fetchObject") .withExactArgs( "@Org.OData.Validation.V1.Minimum@Org.OData.Validation.V1.Exclusive", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.minimumExlusive)); this.oMetaModelMock.expects("fetchObject") .withExactArgs("@Org.OData.Validation.V1.Maximum/$Decimal", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.maximum)); this.oMetaModelMock.expects("fetchObject") .withExactArgs( "@Org.OData.Validation.V1.Maximum@Org.OData.Validation.V1.Exclusive", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.maximumExclusive)); } // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { var sExpectedTypeName = "sap.ui.model.odata.type." + oProperty.$Type.slice(4)/*cut off "Edm."*/; assert.strictEqual(fnFetchModuleSpy.callCount, 1); assert.ok(fnFetchModuleSpy.calledOn(that.oMetaModel)); assert.ok(fnFetchModuleSpy.calledWithExactly(sExpectedTypeName), fnFetchModuleSpy.printf("%C")); assert.strictEqual(oType.getName(), sExpectedTypeName); assert.deepEqual(oType.oConstraints, oConstraints); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }); }); }); }); //TODO later: support for facet DefaultValue? //********************************************************************************************* QUnit.test("fetchUI5Type: $count", function (assert) { var sPath = "/T€AMS/$count", oType; // code under test oType = this.oMetaModel.fetchUI5Type(sPath).getResult(); assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Int64"); assert.strictEqual(this.oMetaModel.getUI5Type(sPath), oType, "cached"); }); //********************************************************************************************* QUnit.test("fetchUI5Type: collection", function (assert) { var sPath = "/EMPLOYEES/0/foo", that = this; this.oMetaModelMock.expects("fetchObject").thrice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve({ $isCollection : true, $Nullable : false, // must not be turned into a constraint for Raw! $Type : "Edm.String" })); this.oLogMock.expects("warning").withExactArgs( "Unsupported collection type, using sap.ui.model.odata.type.Raw", sPath, sODataMetaModel); return Promise.all([ // code under test this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }), // code under test this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); }) ]); }); //********************************************************************************************* //TODO make Edm.Duration work with OData V4 ["acme.Type", "Edm.Duration", "Edm.GeographyPoint"].forEach(function (sQualifiedName) { QUnit.test("fetchUI5Type: unsupported type " + sQualifiedName, function (assert) { var sPath = "/EMPLOYEES/0/foo", that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve({ $Nullable : false, // must not be turned into a constraint for Raw! $Type : sQualifiedName })); this.oLogMock.expects("warning").withExactArgs( "Unsupported type '" + sQualifiedName + "', using sap.ui.model.odata.type.Raw", sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }); }); }); //********************************************************************************************* QUnit.test("fetchUI5Type: invalid path", function (assert) { var sPath = "/EMPLOYEES/0/invalid", that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve(/*no property metadata for path*/)); this.oLogMock.expects("warning").twice().withExactArgs( "No metadata for path '" + sPath + "', using sap.ui.model.odata.type.Raw", undefined, sODataMetaModel); // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); // code under test assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "Type is cached"); }); }); //********************************************************************************************* QUnit.test("getUI5Type, requestUI5Type", function (assert) { return checkGetAndRequest(this, assert, "fetchUI5Type", ["sPath"], true); }); //********************************************************************************************* [{ // simple entity from a set dataPath : "/TEAMS/0", canonicalUrl : "/TEAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // simple entity in transient context dataPath : "/TEAMS/-1", canonicalUrl : "/TEAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", // TODO a transient entity does not necessarily have all key properties, but this is // required to create a dependent cache predicate : "(~1)" }] }, { // simple entity by key predicate dataPath : "/TEAMS('4%3D2')", canonicalUrl : "/TEAMS('4%3D2')", requests : [] }, { // simple singleton dataPath : "/Me", canonicalUrl : "/Me", requests : [] }, { // navigation to root entity dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1", canonicalUrl : "/EMPLOYEES(~1)", requests : [{ entityType : "tea_busi.Worker", predicate : "(~1)" }] }, { // navigation to root entity dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES/1", canonicalUrl : "/EMPLOYEES(~1)", requests : [{ entityType : "tea_busi.Worker", predicate : "(~1)" }] }, { // navigation to root entity with key predicate dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')", canonicalUrl : "/EMPLOYEES('23')", requests : [] }, { // multiple navigation to root entity dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM", canonicalUrl : "/T%E2%82%ACAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // navigation from entity set to single contained entity dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }] }, { // navigation from singleton to single contained entity dataPath : "/Me/EMPLOYEE_2_CONTAINED_S", canonicalUrl : "/Me/EMPLOYEE_2_CONTAINED_S", requests : [] }, { // navigation to contained entity within a collection dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/1", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_C/1", predicate : "(~2)" }] }, { // navigation to contained entity with a key predicate dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')", canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')", requests : [] }, { // navigation from contained entity to contained entity dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S/S_2_C(~2)", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1", predicate : "(~2)" }] }, { // navigation from contained to root entity // must be appended nevertheless since we only have a type, but no set dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)/C_2_EMPLOYEE", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_C/5", predicate : "(~2)" }] }, { // navigation from entity w/ key predicate to contained to root entity dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE", canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C(~1)/C_2_EMPLOYEE", requests : [{ entityType : "tea_busi.ContainedC", path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5", predicate : "(~1)" }] }, { // decode entity set initially, encode it finally dataPath : "/T%E2%82%ACAMS/0", canonicalUrl : "/T%E2%82%ACAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // decode navigation property, encode entity set when building sCandidate dataPath : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)", canonicalUrl : "/EQUIPM%E2%82%ACNTS(42)", requests : [] }].forEach(function (oFixture) { QUnit.test("fetchCanonicalPath: " + oFixture.dataPath, function (assert) { var oContext = Context.create(this.oModel, undefined, oFixture.dataPath), oContextMock = this.mock(oContext), oPromise; this.oMetaModelMock.expects("getMetaPath").withExactArgs(oFixture.dataPath) .returns("metapath"); this.oMetaModelMock.expects("fetchObject").withExactArgs("metapath") .returns(SyncPromise.resolve()); this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); oFixture.requests.forEach(function (oRequest) { var oEntityInstance = {"@$ui5._" : {"predicate" : oRequest.predicate}}; oContextMock.expects("fetchValue") .withExactArgs(oRequest.path || oFixture.dataPath) .returns(SyncPromise.resolve(oEntityInstance)); }); // code under test oPromise = this.oMetaModel.fetchCanonicalPath(oContext); assert.ok(!oPromise.isRejected()); return oPromise.then(function (sCanonicalUrl) { assert.strictEqual(sCanonicalUrl, oFixture.canonicalUrl); }); }); }); //********************************************************************************************* [{ // simple singleton path : "/Me|ID", editUrl : "Me" }, { // simple entity by key predicate path : "/TEAMS('42')|Name", editUrl : "TEAMS('42')" }, { // simple entity from a set path : "/TEAMS/0|Name", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM" }, editUrl : "TEAMS(~0)" }, { // simple entity from a set, complex property path : "/EMPLOYEES/0|SAL%C3%83RY/CURRENCY", fetchPredicates : { "/EMPLOYEES/0" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity path : "/TEAMS/0/TEAM_2_EMPLOYEES/1|ID", fetchPredicates : { "/TEAMS/0/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity path : "/TEAMS('42')/TEAM_2_EMPLOYEES/1|ID", fetchPredicates : { "/TEAMS('42')/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity with key predicate path : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')|ID", editUrl : "EMPLOYEES('23')" }, { // multiple navigation to root entity path : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM|Name", fetchPredicates : { "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM" : "tea_busi.TEAM" }, editUrl : "T%E2%82%ACAMS(~0)" }, { // navigation from entity set to single contained entity path : "/TEAMS/0/TEAM_2_CONTAINED_S|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S" }, { // navigation from singleton to single contained entity path : "/Me/EMPLOYEE_2_CONTAINED_S|Id", editUrl : "Me/EMPLOYEE_2_CONTAINED_S" }, { // navigation to contained entity within a collection path : "/TEAMS/0/TEAM_2_CONTAINED_C/1|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM", "/TEAMS/0/TEAM_2_CONTAINED_C/1" : "tea_busi.ContainedC" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_C(~1)" }, { // navigation to contained entity with a key predicate path : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')|Id", editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C('foo')" }, { // navigation from contained entity to contained entity path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM", "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1" : "tea_busi.ContainedC" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S/S_2_C(~1)" }, { // navigation from contained to root entity, resolved via navigation property binding path path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE|ID", fetchPredicates : { "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation from entity w/ key predicate to contained to root entity path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE|ID", fetchPredicates : { "/TEAMS('42')/TEAM_2_CONTAINED_C/5" : "tea_busi.ContainedC" }, editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C(~0)/C_2_EMPLOYEE" }, { // decode entity set initially, encode it finally path : "/T%E2%82%ACAMS/0|Name", fetchPredicates : { "/T%E2%82%ACAMS/0" : "tea_busi.TEAM" }, editUrl : "T%E2%82%ACAMS(~0)" }, { // decode navigation property, encode entity set path : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)|ID", editUrl : "EQUIPM%E2%82%ACNTS(42)" }].forEach(function (oFixture) { QUnit.test("fetchUpdateData: " + oFixture.path, function (assert) { var i = oFixture.path.indexOf("|"), sContextPath = oFixture.path.slice(0, i), sPropertyPath = oFixture.path.slice(i + 1), oContext = Context.create(this.oModel, undefined, sContextPath), oContextMock = this.mock(oContext), oPromise, that = this; this.oMetaModelMock.expects("getMetaPath") .withExactArgs(oFixture.path.replace("|", "/")).returns("~"); this.oMetaModelMock.expects("fetchObject").withExactArgs("~") .returns(SyncPromise.resolve(Promise.resolve()).then(function () { that.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); Object.keys(oFixture.fetchPredicates || {}).forEach(function (sPath, i) { var oEntityInstance = {"@$ui5._" : {"predicate" : "(~" + i + ")"}}; // Note: the entity instance is delivered asynchronously oContextMock.expects("fetchValue") .withExactArgs(sPath) .returns(SyncPromise.resolve(Promise.resolve(oEntityInstance))); }); })); // code under test oPromise = this.oMetaModel.fetchUpdateData(sPropertyPath, oContext); assert.ok(!oPromise.isRejected()); return oPromise.then(function (oResult) { assert.strictEqual(oResult.editUrl, oFixture.editUrl); assert.strictEqual(oResult.entityPath, sContextPath); assert.strictEqual(oResult.propertyPath, sPropertyPath); }); }); }); //TODO support collection properties (-> path containing index not leading to predicate) //TODO prefer instance annotation at payload for "odata.editLink"?! //TODO target URLs like "com.sap.gateway.default.iwbep.tea_busi_product.v0001.Container/Products(...)"? //TODO type casts, operations? //********************************************************************************************* QUnit.test("fetchUpdateData: transient entity", function(assert) { var oContext = Context.create(this.oModel, undefined, "/TEAMS/-1"), sPropertyPath = "Name"; this.oMetaModelMock.expects("fetchEntityContainer").twice() .returns(SyncPromise.resolve(mScope)); this.mock(oContext).expects("fetchValue").withExactArgs("/TEAMS/-1") .returns(SyncPromise.resolve({"@$ui5._" : {"transient" : "update"}})); // code under test return this.oMetaModel.fetchUpdateData(sPropertyPath, oContext).then(function (oResult) { assert.deepEqual(oResult, { entityPath : "/TEAMS/-1", editUrl : undefined, propertyPath : "Name" }); }); }); //********************************************************************************************* QUnit.test("fetchUpdateData: fetchObject fails", function(assert) { var oModel = this.oModel, oContext = { getModel : function () { return oModel; } }, oExpectedError = new Error(), oMetaModelMock = this.mock(this.oMetaModel), sPath = "some/invalid/path/to/a/property"; this.mock(oModel).expects("resolve") .withExactArgs(sPath, sinon.match.same(oContext)) .returns("~1"); oMetaModelMock.expects("getMetaPath").withExactArgs("~1").returns("~2"); oMetaModelMock.expects("fetchObject").withExactArgs("~2") .returns(Promise.reject(oExpectedError)); // code under test return this.oMetaModel.fetchUpdateData(sPath, oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError, oExpectedError); }); }); //********************************************************************************************* [{ dataPath : "/Foo/Bar", message : "Not an entity set: Foo", warning : "Unknown child Foo of tea_busi.DefaultContainer" }, { dataPath : "/TEAMS/0/Foo/Bar", message : "Not a (navigation) property: Foo" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : undefined, message : "No instance to calculate key predicate at /TEAMS/0" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : {}, message : "No key predicate known at /TEAMS/0" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : new Error("failed to load team"), message : "failed to load team at /TEAMS/0" }].forEach(function (oFixture) { QUnit.test("fetchUpdateData: " + oFixture.message, function (assert) { var oContext = Context.create(this.oModel, undefined, oFixture.dataPath), oPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); if ("instance" in oFixture) { this.mock(oContext).expects("fetchValue") .returns(oFixture.instance instanceof Error ? SyncPromise.reject(oFixture.instance) : SyncPromise.resolve(oFixture.instance)); } if (oFixture.warning) { this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel) .returns(true); this.oLogMock.expects("warning") .withExactArgs(oFixture.warning, oFixture.dataPath, sODataMetaModel); } this.mock(this.oModel).expects("reportError") .withExactArgs(oFixture.message, sODataMetaModel, sinon.match({ message : oFixture.dataPath + ": " + oFixture.message, name : "Error" })); oPromise = this.oMetaModel.fetchUpdateData("", oContext); assert.ok(oPromise.isRejected()); assert.strictEqual(oPromise.getResult().message, oFixture.dataPath + ": " + oFixture.message); oPromise.caught(); // avoid "Uncaught (in promise)" }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: success", function(assert) { var oContext = {}; this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.resolve({ editUrl : "edit('URL')", propertyPath : "" }))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function (oCanonicalPath) { assert.strictEqual(oCanonicalPath, "/edit('URL')"); }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: not an entity", function(assert) { var oContext = { getPath : function () { return "/TEAMS('4711')/Name"; } }; this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.resolve({ entityPath : "/TEAMS('4711')", editUrl : "TEAMS('4711')", propertyPath : "Name" }))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, "Context " + oContext.getPath() + " does not point to an entity. It should be " + "/TEAMS('4711')"); }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: fetchUpdateData fails", function(assert) { var oContext = {}, oExpectedError = new Error(); this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.reject(oExpectedError))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError, oExpectedError); }); }); //********************************************************************************************* QUnit.test("getProperty = getObject", function (assert) { assert.strictEqual(this.oMetaModel.getProperty, this.oMetaModel.getObject); }); //********************************************************************************************* QUnit.test("bindProperty", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo"; // code under test oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); assert.ok(oBinding instanceof PropertyBinding); assert.ok(oBinding.hasOwnProperty("vValue")); assert.strictEqual(oBinding.getContext(), oContext); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.mParameters, mParameters, "mParameters available internally"); assert.strictEqual(oBinding.getValue(), undefined); // code under test: must not call getProperty() again! assert.strictEqual(oBinding.getExternalValue(), undefined); // code under test assert.throws(function () { oBinding.setExternalValue("foo"); }, /Unsupported operation: ODataMetaPropertyBinding#setValue/); }); //********************************************************************************************* [undefined, {}, {$$valueAsPromise : false}].forEach(function (mParameters, i) { QUnit.test("ODataMetaPropertyBinding#checkUpdate: " + i, function (assert) { var oBinding, oContext = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange") .withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); assert.strictEqual(oBinding.getValue(), undefined); oPromise.then(function () { assert.strictEqual(oBinding.getValue(), oValue); }); return oPromise; }); }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise=true, sync", function (assert) { var oBinding, oContext = {}, mParameters = {$$valueAsPromise : true}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(oValue); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); assert.strictEqual(oBinding.getValue(), oValue, "Value sync"); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: no event", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").never(); // code under test oBinding.checkUpdate(); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: bForceUpdate, sChangeReason", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").withExactArgs({reason : "Foo"}); // code under test oBinding.checkUpdate(true, "Foo"); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise = true", function (assert) { var oBinding, oContext = {}, mParameters = { $$valueAsPromise : true }, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange") .withExactArgs({reason : "Foo"}) .twice() .onFirstCall().callsFake(function () { assert.ok(oBinding.getValue().isPending(), "Value is still a pending SyncPromise"); }) .onSecondCall().callsFake(function () { assert.strictEqual(oBinding.getValue(), oValue, "Value resolved"); }); // code under test oBinding.checkUpdate(false, "Foo"); assert.ok(oBinding.getValue().isPending(), "Value is a pending SyncPromise"); return oBinding.getValue().then(function (oResult) { assert.strictEqual(oResult, oValue); assert.strictEqual(oBinding.getValue(), oValue); }); }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#setContext", function (assert) { var oBinding, oBindingMock, oContext = {}; oBinding = this.oMetaModel.bindProperty("Foo", oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("checkUpdate").never(); // code under test oBinding.setContext(oContext); oBindingMock.expects("checkUpdate").withExactArgs(false, ChangeReason.Context); // code under test oBinding.setContext(undefined); assert.strictEqual(oBinding.getContext(), undefined); oBinding = this.oMetaModel.bindProperty("/Foo"); this.mock(oBinding).expects("checkUpdate").never(); // code under test oBinding.setContext(oContext); }); //********************************************************************************************* ["ENTRYDATE", "/EMPLOYEES/ENTRYDATE"].forEach(function (sPath) { QUnit.test("bindContext: " + sPath, function (assert) { var bAbsolutePath = sPath[0] === "/", oBinding, oBoundContext, iChangeCount = 0, oContext = this.oMetaModel.getMetaContext("/EMPLOYEES"), oContextCopy = this.oMetaModel.getMetaContext("/EMPLOYEES"), oNewContext = this.oMetaModel.getMetaContext("/T€AMS"); // without context oBinding = this.oMetaModel.bindContext(sPath, null); assert.ok(oBinding instanceof ContextBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), null); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); // with context oBinding = this.oMetaModel.bindContext(sPath, oContextCopy); assert.ok(oBinding instanceof ContextBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), oContextCopy); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); // setContext ********** oBinding.attachChange(function (oEvent) { assert.strictEqual(oEvent.getId(), "change"); iChangeCount += 1; }); // code under test oBinding.setContext(oContext); assert.strictEqual(iChangeCount, 0, "still initial"); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); assert.strictEqual(oBinding.getContext(), oContext); // code under test oBinding.initialize(); assert.strictEqual(iChangeCount, 1, "ManagedObject relies on 'change' event!"); assert.strictEqual(oBinding.isInitial(), false); oBoundContext = oBinding.getBoundContext(); assert.strictEqual(oBoundContext.getModel(), this.oMetaModel); assert.strictEqual(oBoundContext.getPath(), bAbsolutePath ? sPath : oContext.getPath() + "/" + sPath); // code under test - same context oBinding.setContext(oContext); assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); // code under test oBinding.setContext(oContextCopy); assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); // code under test // Note: checks equality on resolved path, not simply object identity of context! oBinding.setContext(oNewContext); if (bAbsolutePath) { assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); } else { assert.strictEqual(iChangeCount, 2, "context changed"); oBoundContext = oBinding.getBoundContext(); assert.strictEqual(oBoundContext.getModel(), this.oMetaModel); assert.strictEqual(oBoundContext.getPath(), oNewContext.getPath() + "/" + sPath); } // code under test oBinding.setContext(null); if (bAbsolutePath) { assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); } else { assert.strictEqual(iChangeCount, 3, "context changed"); assert.strictEqual(oBinding.isInitial(), false); assert.strictEqual(oBinding.getBoundContext(), null); } }); }); //********************************************************************************************* QUnit.test("bindList", function (assert) { var oBinding, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aFilters = [], sPath = "@", aSorters = []; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); // code under test oBinding = this.oMetaModel.bindList(sPath, oContext, aSorters, aFilters); assert.ok(oBinding instanceof ClientListBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), oContext); assert.strictEqual(oBinding.aSorters, aSorters); assert.strictEqual(oBinding.aApplicationFilters, aFilters); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#setContexts", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [], sPath = "path"; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("updateIndices").withExactArgs(); oBindingMock.expects("applyFilter").withExactArgs(); oBindingMock.expects("applySort").withExactArgs(); oBindingMock.expects("_getLength").withExactArgs().returns(42); // code under test oBinding.setContexts(aContexts); assert.strictEqual(oBinding.oList, aContexts); assert.strictEqual(oBinding.iLength, 42); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#update (sync)", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [{}], sPath = "path"; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("fetchContexts").withExactArgs() .returns(SyncPromise.resolve(aContexts)); oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts)); oBindingMock.expects("_fireChange").never(); // code under test oBinding.update(); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#update (async)", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [{}], sPath = "path", oFetchPromise = SyncPromise.resolve(Promise.resolve()).then(function () { // This is expected to happen after the promise is resolved oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts)); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); return aContexts; }); // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("fetchContexts").withExactArgs().returns(oFetchPromise); oBindingMock.expects("setContexts").withExactArgs([]); oBindingMock.expects("_fireChange").never(); // initially // code under test oBinding.update(); return oFetchPromise; }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#checkUpdate", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/"), sPath = ""; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); this.mock(oBinding).expects("update").thrice().callsFake(function () { this.oList = [{/*a context*/}]; }); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); // code under test: The second call must call update, but not fire an event oBinding.checkUpdate(); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test: Must fire a change event oBinding.checkUpdate(true); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#getContexts, getCurrentContexts", function (assert) { var oBinding, oMetaModel = this.oMetaModel, // instead of "that = this" oContext = oMetaModel.getMetaContext("/EMPLOYEES"), sPath = ""; function assertContextPaths(aContexts, aPaths) { assert.notOk("diff" in aContexts, "extended change detection is ignored"); assert.deepEqual(aContexts.map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath().replace("/EMPLOYEES/", ""); }), aPaths); assert.deepEqual(oBinding.getCurrentContexts(), aContexts); } this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); oBinding = oMetaModel.bindList(sPath, oContext); // code under test: should be ignored oBinding.enableExtendedChangeDetection(); assertContextPaths(oBinding.getContexts(0, 2), ["ID", "AGE"]); assertContextPaths(oBinding.getContexts(1, 2), ["AGE", "EMPLOYEE_2_CONTAINED_S"]); assertContextPaths(oBinding.getContexts(), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]); assertContextPaths(oBinding.getContexts(0, 10), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]); oMetaModel.setSizeLimit(2); assertContextPaths(oBinding.getContexts(), ["ID", "AGE"]); oBinding.attachEvent("sort", function () { assert.ok(false, "unexpected sort event"); }); oMetaModel.setSizeLimit(100); oBinding.sort(new Sorter("@sapui.name")); assertContextPaths(oBinding.getContexts(), ["AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "ID", "SALÃRY"]); oBinding.attachEvent("filter", function () { assert.ok(false, "unexpected filter event"); }); oBinding.filter(new Filter("$kind", "EQ", "Property")); assertContextPaths(oBinding.getContexts(), ["AGE", "ID", "SALÃRY"]); }); //********************************************************************************************* [{ contextPath : undefined, metaPath : "@", result : [] }, { // <template:repeat list="{entitySet>}" ...> // Iterate all OData path segments, i.e. (navigation) properties. // Implicit $Type insertion happens here! //TODO support for $BaseType contextPath : "/EMPLOYEES", metaPath : "", result : [ "/EMPLOYEES/ID", "/EMPLOYEES/AGE", "/EMPLOYEES/EMPLOYEE_2_CONTAINED_S", "/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS", "/EMPLOYEES/EMPLOYEE_2_TEAM", "/EMPLOYEES/SALÃRY" ] }, { // <template:repeat list="{meta>EMPLOYEES}" ...> // same as before, but with non-empty path contextPath : "/", metaPath : "EMPLOYEES", result : [ "/EMPLOYEES/ID", "/EMPLOYEES/AGE", "/EMPLOYEES/EMPLOYEE_2_CONTAINED_S", "/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS", "/EMPLOYEES/EMPLOYEE_2_TEAM", "/EMPLOYEES/SALÃRY" ] }, { // <template:repeat list="{meta>/}" ...> // Iterate all OData path segments, i.e. entity sets and imports. // Implicit scope lookup happens here! metaPath : "/", result :[ "/ChangeManagerOfTeam", "/EMPLOYEES", "/EQUIPM€NTS", "/GetEmployeeMaxAge", "/Me", "/OverloadedAction", "/TEAMS", "/T€AMS", "/VoidAction" ] }, { // <template:repeat list="{property>@}" ...> // Iterate all external targeting annotations. contextPath : "/T€AMS/Team_Id", metaPath : "@", result : [ "/T€AMS/Team_Id@Common.Label", "/T€AMS/Team_Id@Common.Text", "/T€AMS/Team_Id@Common.Text@UI.TextArrangement" ] }, { // <template:repeat list="{property>@}" ...> // Iterate all external targeting annotations. contextPath : "/T€AMS/Name", metaPath : "@", result : [] }, { // <template:repeat list="{field>./@}" ...> // Iterate all inline annotations. contextPath : "/T€AMS/$Type/@UI.LineItem/0", metaPath : "./@", result : [ "/T€AMS/$Type/@UI.LineItem/0/@UI.Importance" ] }, { // <template:repeat list="{at>}" ...> // Iterate all inline annotations (edge case with empty relative path). contextPath : "/T€AMS/$Type/@UI.LineItem/0/@", metaPath : "", result : [ "/T€AMS/$Type/@UI.LineItem/0/@UI.Importance" ] }, { contextPath : undefined, metaPath : "/Unknown", result : [], warning : ["Unknown child Unknown of tea_busi.DefaultContainer", "/Unknown/"] }].forEach(function (oFixture) { var sPath = oFixture.contextPath ? oFixture.contextPath + "|"/*make cut more visible*/ + oFixture.metaPath : oFixture.metaPath; QUnit.test("ODataMetaListBinding#fetchContexts (sync): " + sPath, function (assert) { var oBinding, oMetaModel = this.oMetaModel, // instead of "that = this" oContext = oFixture.contextPath && oMetaModel.getContext(oFixture.contextPath); if (oFixture.warning) { // Note that _getContexts is called twice in this test: once from bindList via the // constructor, once directly from the test this.oLogMock.expects("isLoggable").twice() .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel) .returns(true); this.oLogMock.expects("warning").twice() .withExactArgs(oFixture.warning[0], oFixture.warning[1], sODataMetaModel); } this.oMetaModelMock.expects("fetchEntityContainer").atLeast(0) .returns(SyncPromise.resolve(mScope)); oBinding = this.oMetaModel.bindList(oFixture.metaPath, oContext); // code under test assert.deepEqual(oBinding.fetchContexts().getResult().map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath(); }), oFixture.result); }); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#fetchContexts (async)", function (assert) { var oBinding, oMetaModel = this.oMetaModel, sPath = "/foo"; // Note that fetchObject is called twice in this test: once from bindList via the // constructor, once from fetchContexts this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(sPath + "/") .returns(SyncPromise.resolve(Promise.resolve({bar: "", baz: ""}))); oBinding = this.oMetaModel.bindList(sPath); return oBinding.fetchContexts().then(function (oResult) { assert.deepEqual(oResult.map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath(); }), ["/foo/bar", "/foo/baz"]); }); }); //TODO iterate mix of inline and external targeting annotations //TODO iterate annotations like "foo@..." for our special cases, e.g. annotations of annotation //********************************************************************************************* QUnit.test("events", function (assert) { assert.throws(function () { this.oMetaModel.attachParseError(); }, new Error("Unsupported event 'parseError': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestCompleted(); }, new Error("Unsupported event 'requestCompleted': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestFailed(); }, new Error("Unsupported event 'requestFailed': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestSent(); }, new Error("Unsupported event 'requestSent': v4.ODataMetaModel#attachEvent")); }); //********************************************************************************************* QUnit.test("validate: mSchema2MetadataUrl", function (assert) { var mScope = { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A.", "A.A." ] }, "/B/$metadata" : { "$Include" : [ "B.", "B.B." ] }, "/C/$metadata" : { "$Include" : ["C."] }, "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "tea_busi_product." ] } } }, sUrl = "/~/$metadata"; assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // simulate a previous reference to a schema with the _same_ reference URI --> allowed! this.oMetaModel.mSchema2MetadataUrl["A."] = {"/A/$metadata" : false}; // simulate a previous reference to a schema with the _different_ reference URI // --> allowed as long as the document is not yet read (and will never be read) this.oMetaModel.mSchema2MetadataUrl["B.B."] = {"/B/V2/$metadata" : false}; // simulate a previous reference to a schema with the _same_ reference URI, already loaded this.oMetaModel.mSchema2MetadataUrl["C."] = {"/C/$metadata" : true}; // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mScope), mScope); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "A." : {"/A/$metadata" : false}, "A.A." : {"/A/$metadata" : false}, "B." : {"/B/$metadata" : false}, "B.B." : { "/B/$metadata" : false, "/B/V2/$metadata" : false }, "C." : {"/C/$metadata" : true}, "tea_busi_product." : {"/a/default/iwbep/tea_busi_product/0001/$metadata" : false} }); }); //********************************************************************************************* QUnit.test("getLastModified", function (assert) { var mEmptyScope = { "$Version" : "4.0" }, mNewScope = { "$Version" : "4.0", "$Date" : "Tue, 18 Apr 2017 14:40:29 GMT" }, iNow = Date.now(), mOldScope = { "$Version" : "4.0", "$Date" : "Tue, 18 Apr 2017 14:40:29 GMT", // $LastModified wins! "$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT" }, mOldScopeClone = clone(mOldScope), sUrl = "/~/$metadata"; // Note: in real life, each URL is read at most once! // code under test (together with c'tor) assert.strictEqual(this.oMetaModel.getLastModified().getTime(), 0, "initial value"); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScope), mOldScope); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-07T11:21:50.000Z", "old $LastModified is used"); assert.notOk("$LastModified" in mOldScope); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mNewScope), mNewScope); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-18T14:40:29.000Z", "new $Date is used"); assert.notOk("$Date" in mNewScope); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScopeClone), mOldScopeClone); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-18T14:40:29.000Z", "new $Date wins, old $LastModified is ignored"); assert.notOk("$LastModified" in mOldScopeClone); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mEmptyScope), mEmptyScope); assert.ok(this.oMetaModel.getLastModified().getTime() >= iNow, "missing $Date/$LastModified is like 'now': " + this.oMetaModel.getLastModified()); }); //********************************************************************************************* QUnit.test("getETags", function (assert) { var sETag = 'W/"..."', mETags, that = this; function codeUnderTest(sUrl, mScope) { // code under test assert.strictEqual(that.oMetaModel.validate(sUrl, mScope), mScope); assert.notOk("$ETag" in mScope); assert.notOk("$LastModified" in mScope); } // code under test (together with c'tor) assert.deepEqual(this.oMetaModel.getETags(), {}, "initial value"); codeUnderTest("/~/A", { "$Version" : "4.0", "$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT" }); codeUnderTest("/~/B", { "$Version" : "4.0", "$LastModified" : "Tue, 18 Apr 2017 14:40:29 GMT" }); codeUnderTest("/~/C", { "$Version" : "4.0" }); codeUnderTest("/~/D", { "$Version" : "4.0", "$ETag" : sETag }); // code under test mETags = this.oMetaModel.getETags(); assert.deepEqual(mETags, { "/~/A" : new Date(Date.UTC(2017, 3, 7, 11, 21, 50)), "/~/B" : new Date(Date.UTC(2017, 3, 18, 14, 40, 29)), "/~/C" : null, "/~/D" : sETag // wins over null! }); }); //********************************************************************************************* [{ message : "Unsupported IncludeAnnotations", scope : { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A." ] }, "/B/$metadata" : { "$IncludeAnnotations" : [{ "$TermNamespace" : "com.sap.vocabularies.Common.v1" }] } } } }, { message : "A schema cannot span more than one document: tea_busi." + " - is both included and defined", scope : { "$Version" : "4.0", "$Reference" : { "/B/$metadata" : { "$Include" : [ "foo.", "tea_busi." ] } }, "tea_busi." : { "$kind" : "Schema" } } }, { message : "A schema cannot span more than one document: existing." + " - expected reference URI /B/v1/$metadata but instead saw /B/v2/$metadata", scope : { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "foo.", "bar." ] }, "/B/v2/$metadata" : { "$Include" : [ "baz.", "existing." ] } } } }].forEach(function (oFixture) { [false, true].forEach(function (bSupportReferences) { var sMessage = oFixture.message, sTitle = "validate: " + sMessage + ", supportReferences: " + bSupportReferences; QUnit.test(sTitle, function (assert) { var sUrl = "/~/$metadata", that = this; function codeUnderTest() { var oResult = that.oMetaModel.validate(sUrl, oFixture.scope); assert.strictEqual(oResult, oFixture.scope); } this.oMetaModel.bSupportReferences = bSupportReferences; // simulate a schema that has been loaded or referenced before this.oMetaModel.mSchema2MetadataUrl = { // simulate schema that is already read "existing." : {"/B/v1/$metadata" : true} }; if (bSupportReferences) { this.oLogMock.expects("error") .withExactArgs(sMessage, sUrl, sODataMetaModel); } if (bSupportReferences) { assert.throws(codeUnderTest, new Error(sUrl + ": " + sMessage)); } else { codeUnderTest(); } }); }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: without annotation files", function (assert) { // Note: target elements have been omitted for brevity var mExpectedAnnotations = { "same.target" : { "@Common.Description" : "", "@Common.Label" : { "old" : true // Note: no aggregation of properties here! }, "@Common.Text" : "" }, "another.target" : { "@Common.Label" : "" } }, mScope = { "A." : { "$kind" : "Schema", "$Annotations" : { "same.target" : { "@Common.Label" : { "old" : true }, "@Common.Text" : "" } } }, "B." : { "$kind" : "Schema", "$Annotations" : { "same.target" : { "@Common.Description" : "", "@Common.Label" : { // illegal overwrite within $metadata, ignored! "new" : true } }, "another.target" : { "@Common.Label" : "" } } }, "B.B" : {} }; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // code under test this.oMetaModel._mergeAnnotations(mScope, []); assert.deepEqual(mScope.$Annotations, mExpectedAnnotations, "$Annotations have been shifted and merged from schemas to root"); assert.notOk("$Annotations" in mScope["A."], "$Annotations removed from schema"); assert.notOk("$Annotations" in mScope["B."], "$Annotations removed from schema"); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "A." : {"/a/b/c/d/e/$metadata" : false}, "B." : {"/a/b/c/d/e/$metadata" : false} }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: validation failure for $metadata", function (assert) { var oError = new Error(), mScope = {}; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope) .throws(oError); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(mScope, []); }, oError); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: validation failure in annotation file", function (assert) { var oError = new Error(), mScope = {}, mAnnotationScope1 = {}, mAnnotationScope2 = {}; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", mAnnotationScope1); this.oMetaModelMock.expects("validate") .withExactArgs("/my/annotation.xml", mAnnotationScope2) .throws(oError); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(mScope, [mAnnotationScope1, mAnnotationScope2]); }, oError); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: with annotation files (legacy)", function (assert) { var sNamespace = "com.sap.gateway.default.iwbep.tea_busi.v0001.", sWorker = sNamespace + "Worker/", sBasicSalaryCurr = sWorker + "SALARY/BASIC_SALARY_CURR", sBasicSalaryCurr2 = "another.schema.2.SALARY/BASIC_SALARY_CURR", sBonusCurr = sWorker + "SALARY/BONUS_CURR", sCommonLabel = "@com.sap.vocabularies.Common.v1.Label", sCommonQuickInfo = "@com.sap.vocabularies.Common.v1.QuickInfo", sCommonText = "@com.sap.vocabularies.Common.v1.Text", sBaseUrl = "/" + window.location.pathname.split("/")[1] + "/test-resources/sap/ui/core/qunit/odata/v4/data/", oMetadata = jQuery.sap.sjax({url : sBaseUrl + "metadata.json", dataType : 'json'}).data, oExpectedResult = clone(oMetadata), oAnnotation = jQuery.sap.sjax({ url : sBaseUrl + "legacy_annotations.json", dataType : 'json' }).data, oAnnotationCopy = clone(oAnnotation); // the examples are unrealistic and only need to work in 'legacy mode' this.oMetaModel.bSupportReferences = false; this.oMetaModel.aAnnotationUris = ["n/a"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, oMetadata); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", oAnnotation); oExpectedResult.$Annotations = oMetadata[sNamespace].$Annotations; delete oExpectedResult[sNamespace].$Annotations; // all entries with $kind are merged oExpectedResult["my.schema.2.FuGetEmployeeMaxAge"] = oAnnotationCopy["my.schema.2.FuGetEmployeeMaxAge"]; oExpectedResult["my.schema.2.Entity"] = oAnnotationCopy["my.schema.2.Entity"]; oExpectedResult["my.schema.2.DefaultContainer"] = oAnnotationCopy["my.schema.2.DefaultContainer"]; oExpectedResult["my.schema.2."] = oAnnotationCopy["my.schema.2."]; oExpectedResult["another.schema.2."] = oAnnotationCopy["another.schema.2."]; // update annotations oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonLabel] = oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonLabel]; oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonQuickInfo] = oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonQuickInfo]; oExpectedResult.$Annotations[sBonusCurr][sCommonText] = oAnnotationCopy["my.schema.2."].$Annotations[sBonusCurr][sCommonText]; oExpectedResult.$Annotations[sBasicSalaryCurr2] = oAnnotationCopy["another.schema.2."].$Annotations[sBasicSalaryCurr2]; delete oExpectedResult["my.schema.2."].$Annotations; delete oExpectedResult["another.schema.2."].$Annotations; // code under test this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation]); assert.deepEqual(oMetadata, oExpectedResult, "merged metadata as expected"); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: with annotation files", function (assert) { var mScope0 = { "$EntityContainer" : "tea_busi.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] } }, "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@A" : "from $metadata", "@B" : "from $metadata", "@C" : "from $metadata" }, "tea_busi.TEAM" : { "@D" : ["from $metadata"], "@E" : ["from $metadata"], "@F" : ["from $metadata"] } } }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType" }, "tea_busi.TEAM" : { "$kind" : "EntityType" }, "tea_busi.Worker" : { "$kind" : "EntityType" } }, mScope1 = { "$Version" : "4.0", "tea_busi_foo.v0001." : { "$kind" : "Schema", "$Annotations" : { "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from $metadata" } } }, "tea_busi_foo.v0001.Product" : { "$kind" : "EntityType", "Name" : { "$kind" : "Property", "$Type" : "Edm.String" } } }, mAnnotationScope1 = { "$Version" : "4.0", "foo." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@B" : "from annotation #1", "@C" : "from annotation #1" }, "tea_busi.TEAM" : { "@E" : ["from annotation #1"], "@F" : ["from annotation #1"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", "Label" : "from annotation #1" }, "@From.Annotation1" : "from annotation #1" } } } }, mAnnotationScope2 = { "$Version" : "4.0", "bar." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@C" : "from annotation #2" }, "tea_busi.EQUIPMENT" : { "@From.Annotation2" : "from annotation #2" }, "tea_busi.TEAM" : { "@F" : ["from annotation #2"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", "Value" : "from annotation #2" } }, "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from annotation #2" } } } }, mExpectedScope = { "$Annotations" : { "tea_busi.DefaultContainer" : { "@A" : "from $metadata", "@B" : "from annotation #1", "@C" : "from annotation #2" }, "tea_busi.EQUIPMENT" : { "@From.Annotation2" : "from annotation #2" }, "tea_busi.TEAM" : { // Note: no aggregation of array elements here! "@D" : ["from $metadata"], "@E" : ["from annotation #1"], "@F" : ["from annotation #2"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", // Note: no "Label" here! "Value" : "from annotation #2" }, "@From.Annotation1" : "from annotation #1" }, "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from annotation #2" } }, "$EntityContainer" : "tea_busi.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] } }, "$Version" : "4.0", "bar." : { "$kind" : "Schema" }, "foo." : { "$kind" : "Schema" }, "tea_busi." : { "$kind" : "Schema" }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType" }, "tea_busi.TEAM" : { "$kind" : "EntityType" }, "tea_busi.Worker" : { "$kind" : "EntityType" } }; this.oMetaModel.aAnnotationUris = ["/URI/1", "/URI/2"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope0); this.oMetaModelMock.expects("validate") .withExactArgs("/URI/1", mAnnotationScope1); this.oMetaModelMock.expects("validate") .withExactArgs("/URI/2", mAnnotationScope2); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // code under test this.oMetaModel._mergeAnnotations(mScope0, [mAnnotationScope1, mAnnotationScope2]); assert.deepEqual(mScope0, mExpectedScope); assert.strictEqual(mScope0["tea_busi."].$Annotations, undefined); assert.strictEqual(mAnnotationScope1["foo."].$Annotations, undefined); assert.strictEqual(mAnnotationScope2["bar."].$Annotations, undefined); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "bar." : {"/URI/2" : false}, "foo." : {"/URI/1" : false}, "tea_busi." : {"/a/b/c/d/e/$metadata" : false} }); // prepare to load "cross-service reference" // simulate #validate of mScope0 this.oMetaModel.mSchema2MetadataUrl["tea_busi_foo.v0001."] = {"/a/default/iwbep/tea_busi_foo/0001/$metadata" : false}; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope0)); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata") .returns(Promise.resolve(mScope1)); this.oMetaModelMock.expects("validate") .withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata", mScope1) .returns(mScope1); // code under test return this.oMetaModel.fetchObject("/tea_busi_foo.v0001.Product/Name@Common.Label") .then(function (sLabel) { assert.strictEqual(sLabel, "from annotation #2", "not overwritten by $metadata"); }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations - error (legacy)", function (assert) { var oAnnotation1 = { "tea_busi.NewType1" : { "$kind" : "EntityType" } }, oAnnotation2 = { "tea_busi.NewType2" : { "$kind" : "EntityType" }, "tea_busi.ExistingType" : { "$kind" : "EntityType" } }, sMessage = "A schema cannot span more than one document: tea_busi.ExistingType", oMetadata = { "tea_busi.ExistingType" : { "$kind" : "EntityType" } }; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; // legacy behavior: $Version is not checked, tea_busi.NewType2 is allowed this.oMetaModel.bSupportReferences = false; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, oMetadata); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", oAnnotation1); this.oMetaModelMock.expects("validate") .withExactArgs("/my/annotation.xml", oAnnotation2); this.oLogMock.expects("error") .withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation1, oAnnotation2]); }, new Error("/my/annotation.xml: " + sMessage)); }); //********************************************************************************************* QUnit.test("_mergeAnnotations - a schema cannot span more than one document", function (assert) { var oAnnotation = { "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema" } }, sMessage = "A schema cannot span more than one document: tea_busi.", oMetadata = { "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema" } }; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; this.oLogMock.expects("error") .withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(oMetadata, [{"$Version" : "4.0"}, oAnnotation]); }, new Error("/my/annotation.xml: " + sMessage)); } ); //********************************************************************************************* QUnit.test("getOrCreateValueListModel", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), oValueListModel; oModel.oRequestor.mHeaders["X-CSRF-Token"] = "xyz"; // code under test oValueListModel = oMetaModel.getOrCreateValueListModel("../ValueListService/$metadata"); assert.ok(oValueListModel instanceof ODataModel); assert.strictEqual(oValueListModel.sServiceUrl, "/Foo/ValueListService/"); assert.strictEqual(oValueListModel.getDefaultBindingMode(), BindingMode.OneWay); assert.strictEqual(oValueListModel.sOperationMode, OperationMode.Server); assert.strictEqual(oValueListModel.oRequestor.mHeaders["X-CSRF-Token"], "xyz"); // code under test assert.strictEqual(oMetaModel.getOrCreateValueListModel("/Foo/ValueListService/$metadata"), oValueListModel); // code under test assert.strictEqual(oValueListModel.getMetaModel() .getOrCreateValueListModel("/Foo/ValueListService/$metadata"), oValueListModel); // code under test assert.strictEqual(oValueListModel.getMetaModel().getOrCreateValueListModel("$metadata"), oValueListModel); oModel = new ODataModel({ serviceUrl : "/Foo/DataService2/", synchronizationMode : "None" }); // code under test - even a totally different model gets the very same value list model assert.strictEqual(oModel.getMetaModel() .getOrCreateValueListModel("../ValueListService/$metadata"), oValueListModel); }); //********************************************************************************************* QUnit.test("getOrCreateValueListModel: relative data service URL", function (assert) { var sRelativePath = "../../../DataService/", sAbsolutePath = new URI(sRelativePath).absoluteTo(document.baseURI).pathname().toString(), oModel = new ODataModel({ serviceUrl : sRelativePath, synchronizationMode : "None" }), oValueListModel; // code under test oValueListModel = oModel.getMetaModel() .getOrCreateValueListModel("../ValueListService/$metadata"); assert.strictEqual(oValueListModel.sServiceUrl, new URI("../ValueListService/").absoluteTo(sAbsolutePath).toString()); }); //********************************************************************************************* QUnit.test("fetchValueListType: unknown property", function (assert) { var oContext = {}, sPath = "/Products('HT-1000')/Foo"; this.oMetaModelMock.expects("getMetaContext").withExactArgs(sPath).returns(oContext); this.oMetaModelMock.expects("fetchObject") .withExactArgs(undefined, sinon.match.same(oContext)) .returns(Promise.resolve()); // code under test return this.oMetaModel.fetchValueListType(sPath).then(function () { assert.ok(false); }, function (oError) { assert.ok(oError.message, "No metadata for " + sPath); }); }); //********************************************************************************************* [{ mAnnotations : { "@some.other.Annotation" : true }, sValueListType : ValueListType.None }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [], "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true }, sValueListType : ValueListType.Fixed }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [] }, sValueListType : ValueListType.Standard }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences#foo" : [], "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false }, sValueListType : ValueListType.Standard }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {}, "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false }, sValueListType : ValueListType.Standard }].forEach(function (oFixture) { QUnit.test("fetchValueListType: " + oFixture.sValueListType, function (assert) { var oContext = {}, sPropertyPath = "/ProductList('HT-1000')/Status"; this.oMetaModelMock.expects("getMetaContext") .withExactArgs(sPropertyPath).returns(oContext); this.oMetaModelMock.expects("fetchObject") .withExactArgs(undefined, sinon.match.same(oContext)) .returns(SyncPromise.resolve({})); this.oMetaModelMock.expects("getObject") .withExactArgs("@", sinon.match.same(oContext)) .returns(oFixture.mAnnotations); // code under test this.oMetaModel.fetchValueListType(sPropertyPath).then(function (sValueListType) { assert.strictEqual(sValueListType, oFixture.sValueListType); }); }); }); //********************************************************************************************* QUnit.test("getValueListType, requestValueListType", function (assert) { return checkGetAndRequest(this, assert, "fetchValueListType", ["sPath"], true); }); //********************************************************************************************* QUnit.test("fetchValueListMappings: success", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oDefaultMapping = { "CollectionPath" : "VH_Category1Set", "Parameters" : [{"p1" : "foo"}] }, oFooMapping = { "CollectionPath" : "VH_Category2Set", "Parameters" : [{"p2" : "bar"}] }, oProperty = {}, oValueListMetadata = { "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListMapping" : oDefaultMapping, "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oFooMapping }, "some.other.Target" : {} } }, oValueListModel = { getMetaModel : function () { return { fetchEntityContainer : function () { return Promise.resolve(oValueListMetadata); } }; } }; oMetaModelMock.expects("getObject") .withExactArgs("/zui5_epm_sample.Product/Category") .returns(oProperty); // code under test return oModel.getMetaModel() .fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty) .then(function (oValueListMappings) { assert.deepEqual(oValueListMappings, { "" : oDefaultMapping, "foo" : oFooMapping }); }); }); //********************************************************************************************* [{ annotations : { "zui5_epm_sample.Product/CurrencyCode/type.cast" : true }, error : "Unexpected annotation target 'zui5_epm_sample.Product/CurrencyCode/type.cast' " + "with namespace of data service in /Foo/ValueListService" }, { annotations : { "zui5_epm_sample.Product/Category" : { "@some.other.Term" : true } }, error : "Unexpected annotation 'some.other.Term' for target " + "'zui5_epm_sample.Product/Category' with namespace of data service " + "in /Foo/ValueListService" }, { annotations : {}, error : "No annotation 'com.sap.vocabularies.Common.v1.ValueListMapping' " + "in /Foo/ValueListService" }].forEach(function (oFixture) { QUnit.test("fetchValueListMappings: " + oFixture.error, function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), oMetaModelMock = this.mock(oMetaModel), oProperty = {}, oValueListMetadata = { "$Annotations" : oFixture.annotations }, oValueListModel = { getMetaModel : function () { return { fetchEntityContainer : function () { return Promise.resolve(oValueListMetadata); } }; }, sServiceUrl : "/Foo/ValueListService" }, sTarget = Object.keys(oFixture.annotations)[0]; oMetaModelMock.expects("getObject").atLeast(0) .withExactArgs("/" + sTarget) .returns(sTarget === "zui5_epm_sample.Product/Category" ? oProperty : undefined); // code under test return oMetaModel .fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty) .then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, oFixture.error); }); }); }); //********************************************************************************************* QUnit.test("fetchValueListMappings: value list model is data model", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oMapping = { "CollectionPath" : "VH_CountrySet", "Parameters" : [{"p1" : "foo"}] }, oProperty = { "$kind" : "Property" }, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : oProperty }, "$Annotations" : { // value list on value list "value_list.VH_BusinessPartner/Country" : { "@com.sap.vocabularies.Common.v1.Label" : "Country", "@com.sap.vocabularies.Common.v1.ValueListMapping" : oMapping }, "value_list.VH_BusinessPartner/Foo" : {/* some other field w/ value list*/} } }; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel() .fetchValueListMappings(oModel, "value_list", oProperty) .then(function (oValueListMappings) { assert.deepEqual(oValueListMappings, { "" : oMapping }); }); }); //********************************************************************************************* [{ sPropertyPath : "/EMPLOYEES/unknown", sExpectedError : "No metadata" }, { sPropertyPath : "/EMPLOYEES/AGE", sExpectedError : "No annotation 'com.sap.vocabularies.Common.v1.ValueListReferences'" }].forEach(function (oFixture) { QUnit.test("requestValueListInfo: " + oFixture.sExpectedError, function (assert) { var oModel = new ODataModel({ serviceUrl : "/~/", synchronizationMode : "None" }); this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); // code under test return oModel.getMetaModel().requestValueListInfo(oFixture.sPropertyPath) .then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, oFixture.sExpectedError + " for " + oFixture.sPropertyPath); }); }); }); //********************************************************************************************* [false, true].forEach(function (bDuplicate) { QUnit.test("requestValueListInfo: duplicate=" + bDuplicate, function (assert) { var sMappingUrl1 = "../ValueListService1/$metadata", sMappingUrl2 = "../ValueListService2/$metadata", sMappingUrlBar = "../ValueListServiceBar/$metadata", oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oProperty = { "$kind" : "Property" }, sPropertyPath = "/ProductList('HT-1000')/Category", oMetadata = { "$EntityContainer" : "zui5_epm_sample.Container", "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl1, sMappingUrl2], "@com.sap.vocabularies.Common.v1.ValueListReferences#bar" : [sMappingUrlBar], "@com.sap.vocabularies.Common.v1.ValueListReferences#bar@an.Annotation" : true, "@some.other.Annotation" : true } }, "zui5_epm_sample.Container" : { "ProductList" : { "$kind" : "EntitySet", "$Type" : "zui5_epm_sample.Product" } } }, oValueListMappings1 = { "" : {CollectionPath : ""} }, oValueListMappings2 = { "foo" : {CollectionPath : "foo"} }, oValueListMappingsBar = {}, oValueListModel1 = {sServiceUrl : sMappingUrl1}, oValueListModel2 = {sServiceUrl : sMappingUrl2}, oValueListModelBar = {sServiceUrl : sMappingUrlBar}; oValueListMappingsBar[bDuplicate ? "" : "bar"] = {CollectionPath : "bar"}; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl1) .returns(oValueListModel1); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel1), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings1)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl2) .returns(oValueListModel2); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel2), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings2)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrlBar) .returns(oValueListModelBar); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModelBar), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(SyncPromise.resolve(oValueListMappingsBar)); // code under test return oModel.getMetaModel() .requestValueListInfo(sPropertyPath) .then(function (oResult) { assert.ok(!bDuplicate); assert.deepEqual(oResult, { "" : { $model : oValueListModel1, CollectionPath : "" }, "foo" : { $model : oValueListModel2, CollectionPath : "foo" }, "bar" : { $model : oValueListModelBar, CollectionPath : "bar" } }); }, function (oError) { assert.ok(bDuplicate); assert.strictEqual(oError.message, "Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with " + "identical qualifier '' for property " + sPropertyPath + " in " + sMappingUrlBar + " and " + sMappingUrl1); }); }); }); //********************************************************************************************* QUnit.test("requestValueListInfo: same model w/o reference", function (assert) { var oProperty = { "$kind" : "Property" }, oValueListMappingFoo = {CollectionPath : "foo"}, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.Container" : { "$kind" : "EntityContainer", "VH_BusinessPartnerSet" : { "$kind" : "EntitySet", "$Type" : "value_list.VH_BusinessPartner" } }, "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : oProperty }, "$Annotations" : { "value_list.VH_BusinessPartner/Country" : { "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oValueListMappingFoo, "@com.sap.vocabularies.Common.v1.ValueListMapping#bar" : {CollectionPath : "bar"} } } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), sPropertyPath = "/VH_BusinessPartnerSet('0100000000')/Country"; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) { assert.strictEqual(oResult.foo.$model, oModel); assert.strictEqual(oResult.bar.$model, oModel); assert.notOk("$model" in oValueListMappingFoo); delete oResult.foo.$model; delete oResult.bar.$model; assert.deepEqual(oResult, { "foo" : {CollectionPath : "foo"}, "bar" : {CollectionPath : "bar"} }); }); }); //********************************************************************************************* [false, true].forEach(function (bDuplicate) { var sTitle = "requestValueListInfo: fixed values: duplicate=" + bDuplicate; QUnit.test(sTitle, function (assert) { var oValueListMapping = {CollectionPath : "foo"}, oAnnotations = { "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true, "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oValueListMapping }, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.Container" : { "$kind" : "EntityContainer", "VH_BusinessPartnerSet" : { "$kind" : "EntitySet", "$Type" : "value_list.VH_BusinessPartner" } }, "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : {} }, "$Annotations" : { "value_list.VH_BusinessPartner/Country" : oAnnotations } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), sPropertyPath = "/VH_BusinessPartnerSet('42')/Country"; if (bDuplicate) { oAnnotations["@com.sap.vocabularies.Common.v1.ValueListMapping#bar"] = {}; } this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath) .then(function (oResult) { assert.notOk(bDuplicate); assert.strictEqual(oResult[""].$model, oModel); delete oResult[""].$model; assert.deepEqual(oResult, { "" : {CollectionPath : "foo"} }); }, function (oError) { assert.ok(bDuplicate); assert.strictEqual(oError.message, "Annotation " + "'com.sap.vocabularies.Common.v1.ValueListWithFixedValues' but multiple " + "'com.sap.vocabularies.Common.v1.ValueListMapping' for property " + sPropertyPath); }); }); }); // ********************************************************************************************* QUnit.test("requestValueListInfo: property in cross-service reference", function (assert) { var sMappingUrl = "../ValueListService/$metadata", oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oProperty = { "$kind" : "Property" }, oMetadata = { "$Version" : "4.0", "$Reference" : { "/Foo/EpmSample/$metadata" : { "$Include" : ["zui5_epm_sample."] } }, "$EntityContainer" : "base.Container", "base.Container" : { "BusinessPartnerList" : { "$kind" : "EntitySet", "$Type" : "base.BusinessPartner" } }, "base.BusinessPartner" : { "$kind" : "EntityType", "BP_2_PRODUCT" : { "$kind" : "NavigationProperty", "$Type" : "zui5_epm_sample.Product" } } }, oMetadataProduct = { "$Version" : "4.0", "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "zui5_epm_sample." : { "$kind" : "Schema", "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl] } } } }, sPropertyPath = "/BusinessPartnerList('0100000000')/BP_2_PRODUCT('HT-1000')/Category", oRequestorMock = this.mock(oModel.oMetaModel.oRequestor), oValueListMappings = { "" : {CollectionPath : ""} }, oValueListModel = {sServiceUrl : sMappingUrl}; oRequestorMock.expects("read").withExactArgs("/Foo/DataService/$metadata", false, undefined) .returns(Promise.resolve(oMetadata)); oRequestorMock.expects("read").withExactArgs("/Foo/EpmSample/$metadata") .returns(Promise.resolve(oMetadataProduct)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl) .returns(oValueListModel); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) { assert.deepEqual(oResult, { "" : { $model : oValueListModel, CollectionPath : "" } }); }); }); // ********************************************************************************************* QUnit.test("requestValueListInfo: same qualifier in reference and local", function (assert) { var sMappingUrl = "../ValueListService/$metadata", oProperty = { "$kind" : "Property" }, oMetadata = { "$EntityContainer" : "zui5_epm_sample.Container", "zui5_epm_sample.Container" : { "$kind" : "EntityContainer", "ProductList" : { "$kind" : "EntitySet", "$Type" : "zui5_epm_sample.Product" } }, "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl], "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {} } } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), sPropertyPath = "/ProductList('HT-1000')/Category", oValueListModel = {}; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl) .returns(oValueListModel); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve({"foo" : {}})); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, "Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with identical " + "qualifier 'foo' for property " + sPropertyPath + " in " + oModel.sServiceUrl + "$metadata and " + sMappingUrl); }); }); // ********************************************************************************************* QUnit.test("fetchModule: synchronously", function (assert) { var vModule = {}; this.mock(sap.ui).expects("require") .withExactArgs("sap/ui/model/odata/type/Int") .returns(vModule); // requested module already loaded // code under test assert.strictEqual(this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int").getResult(), vModule); }); // ********************************************************************************************* QUnit.test("fetchModule, asynchronous", function (assert) { var vModule = {}, sModuleName = "sap/ui/model/odata/type/Int64", oSapUiMock = this.mock(sap.ui); oSapUiMock.expects("require") .withExactArgs(sModuleName) .returns(undefined); // requested module not yet loaded oSapUiMock.expects("require") .withExactArgs([sModuleName], sinon.match.func) .callsArgWithAsync(1, vModule); // code under test return this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int64") .then(function (oResult) { assert.strictEqual(oResult, vModule); }); }); //********************************************************************************************* if (TestUtils.isRealOData()) { //***************************************************************************************** QUnit.test("getValueListType, requestValueListInfo: realOData", function (assert) { var sPath = new URI(TestUtils.proxy(sSampleServiceUrl)) .absoluteTo(window.location.pathname).toString(), oModel = new ODataModel({ serviceUrl : sPath, synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), sPropertyPath = "/ProductList('HT-1000')/Category"; return oMetaModel.requestObject("/ProductList/").then(function () { assert.strictEqual(oMetaModel.getValueListType( "/com.sap.gateway.default.zui5_epm_sample.v0002.Contact/Sex"), ValueListType.Fixed); assert.strictEqual(oMetaModel.getValueListType(sPropertyPath), ValueListType.Standard); return oMetaModel.requestValueListInfo(sPropertyPath).then(function (oResult) { var oValueListInfo = oResult[""]; assert.strictEqual(oValueListInfo.CollectionPath, "H_EPM_PD_CATS_SH_Set"); }); }); }); //***************************************************************************************** QUnit.test("requestValueListInfo: same model w/o reference, realOData", function (assert) { var oModel = new ODataModel({ serviceUrl : TestUtils.proxy(sSampleServiceUrl), synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), sPropertyPath = "/ProductList/0/CurrencyCode", oValueListMetaModel; return oMetaModel.requestObject("/ProductList/").then(function () { // value list in the data service assert.strictEqual(oMetaModel.getValueListType(sPropertyPath), ValueListType.Standard); return oMetaModel.requestValueListInfo(sPropertyPath); }).then(function (oValueListInfo) { var sPropertyPath2 = "/H_TCURC_SH_Set/1/WAERS"; // value list in the value list service oValueListMetaModel = oValueListInfo[""].$model.getMetaModel(); assert.strictEqual(oValueListMetaModel.getValueListType(sPropertyPath2), ValueListType.Standard); assert.strictEqual(oValueListInfo[""].CollectionPath, "H_TCURC_SH_Set"); return oValueListMetaModel.requestValueListInfo(sPropertyPath2); }).then(function (oValueListInfo) { assert.strictEqual(oValueListInfo[""].$model.getMetaModel(), oValueListMetaModel); assert.strictEqual(oValueListInfo[""].CollectionPath, "TCURC_CT_Set"); }); }); } }); //TODO getContext vs. createBindingContext; map of "singletons" vs. memory leak
cschuff/openui5
src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataMetaModel.qunit.js
JavaScript
apache-2.0
161,426