text
stringlengths
2
1.04M
meta
dict
<?php class Encapsulator { public static function toTextField($_text) { return "<textarea>$_text</textarea>"; } public static function toInputField($_text) { return "<input type='text' value='$_text'>"; } public static function toDatePicker($_text) { return "<input type='date' value='$_text'>"; } } ?>
{ "content_hash": "e8e955d4f3bc75f6fba450de019528f6", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 46, "avg_line_length": 21.4, "alnum_prop": 0.6510903426791277, "repo_name": "ogrady/SuperInfBros", "id": "19d89c0d2d451ecb6244b057345e3a30aaa1b985", "size": "321", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cls/cls.Encapsulator.php", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "35631" }, { "name": "PHP", "bytes": "19374" } ], "symlink_target": "" }
<?php namespace Ekino\HalClient\Deserialization { use Ekino\HalClient\Deserialization\Construction\ProxyObjectConstruction; use Ekino\HalClient\Deserialization\Handler\ArrayCollectionHandler; use Ekino\HalClient\Deserialization\Handler\DateHandler; use Ekino\HalClient\Deserialization\ResourceDeserializationVisitor; use Ekino\HalClient\HttpClient\HttpResponse; use Ekino\HalClient\Resource; use JMS\Serializer\Naming\CamelCaseNamingStrategy; use JMS\Serializer\SerializerBuilder; use Doctrine\Common\Collections\ArrayCollection; class DeserializationTest extends \PHPUnit_Framework_TestCase { /** * @return Resource */ public function getResource() { $client = $this->getMock('Ekino\HalClient\HttpClient\HttpClientInterface'); $client->expects($this->exactly(1))->method('get')->will($this->returnCallback(function($url) { if ($url == '/users/1') { return new HttpResponse(200, array( 'Content-Type' => 'application/hal+json' ), json_encode(array( 'name' => 'Thomas Rabaix', 'email' => 'thomas.rabaix@ekino.com' ))); } })); $resource = new Resource($client, array( 'name' => 'Salut', ), array( 'fragments' => array('href' => '/document/1/fragments'), 'author' => array('href' => '/users/1') ), array( 'fragments' => array( array( 'type' => 'test', 'settings' => array( 'color' => 'red' ) ), array( 'type' => 'image', 'settings' => array( 'url' => 'http://dummyimage.com/600x400/000/fff' ) ) ) )); return $resource; } public function testMapping() { $resource = $this->getResource(); $object = Builder::build()->deserialize($resource, 'Ekino\HalClient\Deserialization\Article', 'hal'); $this->assertEquals('Salut', $object->getName()); $fragments = $object->getFragments(); $this->assertCount(2, $fragments); $this->assertEquals($fragments[0]->getType(), 'test'); $this->assertEquals($fragments[1]->getType(), 'image'); $this->assertNotNull($object->getAuthor()); $this->assertEquals($object->getAuthor()->getEmail(), 'thomas.rabaix@ekino.com'); } public function testWithValidProxy() { $serializerBuilder = Builder::get(false) ->setObjectConstructor($constructor = new ProxyObjectConstruction()); // todo, inject proxy handler $serializerBuilder->setObjectConstructor($constructor); $resource = $this->getResource(); $serializer = $serializerBuilder->build(); $constructor->setSerializer($serializer); $object = $serializer->deserialize($resource, 'Ekino\HalClient\Deserialization\Article', 'hal'); $this->assertInstanceOf('Proxy\Ekino\HalClient\Deserialization\Article', $object); $this->assertInstanceOf('Ekino\HalClient\Deserialization\Article', $object); $this->assertEquals('Salut', $object->getName()); $fragments = $object->getFragments(); $this->assertCount(2, $fragments); $this->assertEquals($fragments[0]->getType(), 'test'); $this->assertEquals($fragments[1]->getType(), 'image'); $this->assertInstanceOf('Proxy\Ekino\HalClient\Deserialization\Author', $object->getAuthor()); $this->assertInstanceOf('Ekino\HalClient\Deserialization\Author', $object->getAuthor()); $this->assertEquals($object->getAuthor()->getEmail(), 'thomas.rabaix@ekino.com'); } } } namespace Proxy\Ekino\HalClient\Deserialization { use Ekino\HalClient\Proxy\HalResourceEntity; use Ekino\HalClient\Proxy\HalResourceEntityInterface; use Ekino\HalClient\Resource; class Article extends \Ekino\HalClient\Deserialization\Article implements HalResourceEntityInterface { use HalResourceEntity; /** * @return Author */ public function getAuthor() { if (!$this->author && !$this->halIsLoaded('author')) { $this->halLoaded('author'); $resource = $this->getHalResource()->get('author'); if ($resource instanceof Resource) { $this->author = $this->getHalSerializer()->deserialize($resource, 'Ekino\HalClient\Deserialization\Author', 'hal'); } } return $this->author; } } class Author extends \Ekino\HalClient\Deserialization\Author implements HalResourceEntityInterface { use HalResourceEntity; } } namespace Ekino\HalClient\Deserialization { use JMS\Serializer\Annotation as Serializer; class Article { /** * @Serializer\Type("string") * * @var string */ protected $name; /** * @Serializer\Type("Doctrine\Common\Collections\ArrayCollection<Ekino\HalClient\Deserialization\Fragment>") * * @var array */ protected $fragments; /** * @param array $author */ public function setAuthor(Author $author) { $this->author = $author; } /** * @return array */ public function getAuthor() { return $this->author; } /** * @Serializer\Type("Ekino\HalClient\Deserialization\Author") * * @var array */ protected $author; /** * @param array $fragments */ public function setFragments(ArrayCollection $fragments) { $this->fragments = $fragments; } /** * @return array */ public function getFragments() { return $this->fragments; } /** * @param string $name */ public function setName($name) { $this->name = $name; } /** * @return string */ public function getName() { return $this->name; } } class Fragment { /** * @Serializer\Type("string") * * @var string */ protected $type; /** * @Serializer\Type("array") * * @var array */ protected $settings; /** * @param array $settings */ public function setSettings($settings) { $this->settings = $settings; } /** * @return array */ public function getSettings() { return $this->settings; } /** * @param string $type */ public function setType($type) { $this->type = $type; } /** * @return string */ public function getType() { return $this->type; } } class Author { /** * @Serializer\Type("string") * * @var string */ protected $name; /** * @Serializer\Type("string") * * @var string */ protected $email; /** * @param string $email */ public function setEmail($email) { $this->email = $email; } /** * @return string */ public function getEmail() { return $this->email; } /** * @param string $name */ public function setName($name) { $this->name = $name; } /** * @return string */ public function getName() { return $this->name; } } }
{ "content_hash": "8901a6612fad2f8839493b0ad58e02f7", "timestamp": "", "source": "github", "line_count": 317, "max_line_length": 135, "avg_line_length": 26.820189274447948, "alnum_prop": 0.49376617266525524, "repo_name": "ekino/php-hal-client", "id": "d1ae1cf88064fd61d0e9dc83cf9b606f1f41cce2", "size": "8705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/HalClient/Deserialization/DeserializationTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "64847" } ], "symlink_target": "" }
package com.intellij.openapi.roots.ui.configuration; import com.intellij.facet.impl.ProjectFacetsConfigurator; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.LangDataKeys; import com.intellij.openapi.components.ServiceKt; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleConfigurationEditor; import com.intellij.openapi.module.impl.ModuleConfigurationStateImpl; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.options.ModuleConfigurableEP; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ModuleRootModel; import com.intellij.openapi.roots.OrderEntry; import com.intellij.openapi.roots.impl.ModuleRootManagerImpl; import com.intellij.openapi.roots.impl.libraries.LibraryEx; import com.intellij.openapi.roots.impl.libraries.LibraryTableBase; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.ui.navigation.History; import com.intellij.ui.navigation.Place; import com.intellij.util.EventDispatcher; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.*; import java.util.List; /** * @author Eugene Zhuravlev * Date: Oct 4, 2003 * Time: 6:29:56 PM */ @SuppressWarnings({"AssignmentToStaticFieldFromInstanceMethod"}) public abstract class ModuleEditor implements Place.Navigator, Disposable { private static final Logger LOG = Logger.getInstance(ModuleEditor.class); private static final ExtensionPointName<ModuleConfigurableEP> MODULE_CONFIGURABLES = ExtensionPointName.create("com.intellij.moduleConfigurable"); public static final String SELECTED_EDITOR_NAME = "selectedEditor"; private final Project myProject; private JPanel myGenericSettingsPanel; private ModifiableRootModel myModifiableRootModel; // important: in order to correctly update OrderEntries UI use corresponding proxy for the model private final ModulesProvider myModulesProvider; private String myName; private final Module myModule; protected final List<ModuleConfigurationEditor> myEditors = new ArrayList<ModuleConfigurationEditor>(); private ModifiableRootModel myModifiableRootModelProxy; private final EventDispatcher<ChangeListener> myEventDispatcher = EventDispatcher.create(ChangeListener.class); @NonNls private static final String METHOD_COMMIT = "commit"; private boolean myEditorsInitialized; protected History myHistory; public ModuleEditor(Project project, ModulesProvider modulesProvider, @NotNull Module module) { myProject = project; myModulesProvider = modulesProvider; myModule = module; myName = module.getName(); } public void init(History history) { myHistory = history; for (ModuleConfigurationEditor each : myEditors) { if (each instanceof ModuleElementsEditor) { ((ModuleElementsEditor)each).setHistory(myHistory); } } restoreSelectedEditor(); } public abstract ProjectFacetsConfigurator getFacetsConfigurator(); protected abstract JComponent createCenterPanel(); @Nullable public abstract ModuleConfigurationEditor getSelectedEditor(); public abstract void selectEditor(String displayName); protected abstract void restoreSelectedEditor(); @Nullable public abstract ModuleConfigurationEditor getEditor(@NotNull String displayName); protected abstract void disposeCenterPanel(); public interface ChangeListener extends EventListener { void moduleStateChanged(ModifiableRootModel moduleRootModel); } public void addChangeListener(ChangeListener listener) { myEventDispatcher.addListener(listener); } public void removeChangeListener(ChangeListener listener) { myEventDispatcher.removeListener(listener); } @Nullable public Module getModule() { final Module[] all = myModulesProvider.getModules(); for (Module each : all) { if (each == myModule) return myModule; } return myModulesProvider.getModule(myName); } public ModifiableRootModel getModifiableRootModel() { if (myModifiableRootModel == null) { final Module module = getModule(); if (module != null) { myModifiableRootModel = ((ModuleRootManagerImpl)ModuleRootManager.getInstance(module)).getModifiableModel(new UIRootConfigurationAccessor(myProject)); } } return myModifiableRootModel; } public OrderEntry[] getOrderEntries() { if (myModifiableRootModel == null) { // do not clone all model if not necessary return ModuleRootManager.getInstance(getModule()).getOrderEntries(); } else { return myModifiableRootModel.getOrderEntries(); } } public ModifiableRootModel getModifiableRootModelProxy() { if (myModifiableRootModelProxy == null) { final ModifiableRootModel rootModel = getModifiableRootModel(); if (rootModel != null) { myModifiableRootModelProxy = (ModifiableRootModel)Proxy.newProxyInstance( getClass().getClassLoader(), new Class[]{ModifiableRootModel.class}, new ModifiableRootModelInvocationHandler(rootModel) ); } } return myModifiableRootModelProxy; } public ModuleRootModel getRootModel() { if (myModifiableRootModel != null) { return getModifiableRootModelProxy(); } return ModuleRootManager.getInstance(myModule); } public boolean isModified() { for (ModuleConfigurationEditor moduleElementsEditor : myEditors) { if (moduleElementsEditor.isModified()) { return true; } } return false; } private void createEditors(@Nullable Module module) { if (module == null) return; ModuleConfigurationState state = createModuleConfigurationState(); for (ModuleConfigurationEditorProvider provider : collectProviders(module)) { ModuleConfigurationEditor[] editors = provider.createEditors(state); if (editors.length > 0 && provider instanceof ModuleConfigurationEditorProviderEx && ((ModuleConfigurationEditorProviderEx)provider).isCompleteEditorSet()) { myEditors.clear(); ContainerUtil.addAll(myEditors, editors); break; } else { ContainerUtil.addAll(myEditors, editors); } } for (Configurable moduleConfigurable : ServiceKt.getComponents(module, Configurable.class)) { reportDeprecatedModuleEditor(moduleConfigurable.getClass()); myEditors.add(new ModuleConfigurableWrapper(moduleConfigurable)); } for(ModuleConfigurableEP extension : module.getExtensions(MODULE_CONFIGURABLES)) { if (extension.canCreateConfigurable()) { Configurable configurable = extension.createConfigurable(); if (configurable != null) { reportDeprecatedModuleEditor(configurable.getClass()); myEditors.add(new ModuleConfigurableWrapper(configurable)); } } } } private static Set<Class<?>> ourReportedDeprecatedClasses = new HashSet<Class<?>>(); private static void reportDeprecatedModuleEditor(Class<?> aClass) { if (ourReportedDeprecatedClasses.add(aClass)) { LOG.warn(aClass.getName() + " uses deprecated way to register itself as a module editor. " + ModuleConfigurationEditorProvider.class.getName() + " extension point should be used instead"); } } private static ModuleConfigurationEditorProvider[] collectProviders(@NotNull Module module) { List<ModuleConfigurationEditorProvider> result = new ArrayList<ModuleConfigurationEditorProvider>(); result.addAll(ServiceKt.getComponents(module, ModuleConfigurationEditorProvider.class)); for (ModuleConfigurationEditorProvider component : result) { reportDeprecatedModuleEditor(component.getClass()); } ContainerUtil.addAll(result, Extensions.getExtensions(ModuleConfigurationEditorProvider.EP_NAME, module)); return result.toArray(new ModuleConfigurationEditorProvider[result.size()]); } public ModuleConfigurationState createModuleConfigurationState() { return new ModuleConfigurationStateImpl(myProject, myModulesProvider) { @Override public ModifiableRootModel getRootModel() { return getModifiableRootModelProxy(); } @Override public FacetsProvider getFacetsProvider() { return getFacetsConfigurator(); } }; } private JPanel createPanel() { getModifiableRootModel(); //initialize model if needed getModifiableRootModelProxy(); myGenericSettingsPanel = new ModuleEditorPanel(); createEditors(getModule()); final JComponent component = createCenterPanel(); myGenericSettingsPanel.add(component, BorderLayout.CENTER); myEditorsInitialized = true; return myGenericSettingsPanel; } public JPanel getPanel() { if (myGenericSettingsPanel == null) { myGenericSettingsPanel = createPanel(); } return myGenericSettingsPanel; } public void moduleCountChanged() { updateOrderEntriesInEditors(false); } private void updateOrderEntriesInEditors(boolean forceInitEditors) { if (getModule() != null) { //module with attached module libraries was deleted if (myEditorsInitialized || forceInitEditors) { getPanel(); //init editor if needed for (final ModuleConfigurationEditor myEditor : myEditors) { myEditor.moduleStateChanged(); } } myEventDispatcher.getMulticaster().moduleStateChanged(getModifiableRootModelProxy()); } } public void updateCompilerOutputPathChanged(String baseUrl, String moduleName){ if (myGenericSettingsPanel == null) return; //wasn't initialized yet for (final ModuleConfigurationEditor myEditor : myEditors) { if (myEditor instanceof ModuleElementsEditor) { ((ModuleElementsEditor)myEditor).moduleCompileOutputChanged(baseUrl, moduleName); } } } @Override public void dispose() { try { for (final ModuleConfigurationEditor myEditor : myEditors) { myEditor.disposeUIResources(); } myEditors.clear(); disposeCenterPanel(); if (myModifiableRootModel != null) { myModifiableRootModel.dispose(); } myGenericSettingsPanel = null; } finally { myModifiableRootModel = null; myModifiableRootModelProxy = null; } } public ModifiableRootModel apply() throws ConfigurationException { try { for (ModuleConfigurationEditor editor : myEditors) { editor.saveData(); editor.apply(); } return myModifiableRootModel; } finally { myModifiableRootModel = null; myModifiableRootModelProxy = null; } } public void canApply() throws ConfigurationException { for (ModuleConfigurationEditor editor : myEditors) { if (editor instanceof ModuleElementsEditor) { ((ModuleElementsEditor)editor).canApply(); } } } public String getName() { return myName; } private class ModifiableRootModelInvocationHandler implements InvocationHandler, ProxyDelegateAccessor { private final ModifiableRootModel myDelegateModel; @NonNls private final Set<String> myCheckedNames = new HashSet<String>( Arrays.asList("addOrderEntry", "addLibraryEntry", "addInvalidLibrary", "addModuleOrderEntry", "addInvalidModuleEntry", "removeOrderEntry", "setSdk", "inheritSdk", "inheritCompilerOutputPath", "setExcludeOutput", "replaceEntryOfType", "rearrangeOrderEntries")); ModifiableRootModelInvocationHandler(ModifiableRootModel model) { myDelegateModel = model; } @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { final boolean needUpdate = myCheckedNames.contains(method.getName()); try { final Object result = method.invoke(myDelegateModel, unwrapParams(params)); if (result instanceof LibraryTable) { return Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{LibraryTable.class}, new LibraryTableInvocationHandler((LibraryTable)result)); } return result; } catch (InvocationTargetException e) { throw e.getCause(); } finally { if (needUpdate) { updateOrderEntriesInEditors(true); } } } @Override public Object getDelegate() { return myDelegateModel; } } private class LibraryTableInvocationHandler implements InvocationHandler, ProxyDelegateAccessor { private final LibraryTable myDelegateTable; @NonNls private final Set<String> myCheckedNames = new HashSet<String>(Arrays.asList("removeLibrary" /*,"createLibrary"*/)); LibraryTableInvocationHandler(LibraryTable table) { myDelegateTable = table; } @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { final boolean needUpdate = myCheckedNames.contains(method.getName()); try { final Object result = method.invoke(myDelegateTable, unwrapParams(params)); if (result instanceof Library) { return Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{result instanceof LibraryEx ? LibraryEx.class : Library.class}, new LibraryInvocationHandler((Library)result)); } else if (result instanceof LibraryTable.ModifiableModel) { return Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{LibraryTableBase.ModifiableModel.class}, new LibraryTableModelInvocationHandler((LibraryTable.ModifiableModel)result)); } if (result instanceof Library[]) { Library[] libraries = (Library[])result; for (int idx = 0; idx < libraries.length; idx++) { Library library = libraries[idx]; libraries[idx] = (Library)Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{library instanceof LibraryEx ? LibraryEx.class : Library.class}, new LibraryInvocationHandler(library)); } } return result; } catch (InvocationTargetException e) { throw e.getCause(); } finally { if (needUpdate) { updateOrderEntriesInEditors(true); } } } @Override public Object getDelegate() { return myDelegateTable; } } private class LibraryInvocationHandler implements InvocationHandler, ProxyDelegateAccessor { private final Library myDelegateLibrary; LibraryInvocationHandler(Library delegateLibrary) { myDelegateLibrary = delegateLibrary; } @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { try { final Object result = method.invoke(myDelegateLibrary, unwrapParams(params)); if (result instanceof LibraryEx.ModifiableModelEx) { return Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{LibraryEx.ModifiableModelEx.class}, new LibraryModifiableModelInvocationHandler((LibraryEx.ModifiableModelEx)result)); } return result; } catch (InvocationTargetException e) { throw e.getCause(); } } @Override public Object getDelegate() { return myDelegateLibrary; } } private class LibraryModifiableModelInvocationHandler implements InvocationHandler, ProxyDelegateAccessor { private final Library.ModifiableModel myDelegateModel; LibraryModifiableModelInvocationHandler(Library.ModifiableModel delegateModel) { myDelegateModel = delegateModel; } @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { final boolean needUpdate = METHOD_COMMIT.equals(method.getName()); try { return method.invoke(myDelegateModel, unwrapParams(params)); } catch (InvocationTargetException e) { throw e.getCause(); } finally { if (needUpdate) { updateOrderEntriesInEditors(true); } } } @Override public Object getDelegate() { return myDelegateModel; } } private class LibraryTableModelInvocationHandler implements InvocationHandler, ProxyDelegateAccessor { private final LibraryTable.ModifiableModel myDelegateModel; LibraryTableModelInvocationHandler(LibraryTable.ModifiableModel delegateModel) { myDelegateModel = delegateModel; } @Override public Object invoke(Object object, Method method, Object[] params) throws Throwable { final boolean needUpdate = METHOD_COMMIT.equals(method.getName()); try { Object result = method.invoke(myDelegateModel, unwrapParams(params)); if (result instanceof Library[]) { Library[] libraries = (Library[])result; for (int idx = 0; idx < libraries.length; idx++) { Library library = libraries[idx]; libraries[idx] = (Library)Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{LibraryEx.class}, new LibraryInvocationHandler(library)); } } if (result instanceof Library) { result = Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{LibraryEx.class}, new LibraryInvocationHandler((Library)result)); } return result; } catch (InvocationTargetException e) { throw e.getCause(); } finally { if (needUpdate) { updateOrderEntriesInEditors(true); } } } @Override public Object getDelegate() { return myDelegateModel; } } public interface ProxyDelegateAccessor { Object getDelegate(); } private static Object[] unwrapParams(Object[] params) { if (params == null || params.length == 0) { return params; } final Object[] unwrappedParams = new Object[params.length]; for (int idx = 0; idx < params.length; idx++) { Object param = params[idx]; if (param != null && Proxy.isProxyClass(param.getClass())) { final InvocationHandler invocationHandler = Proxy.getInvocationHandler(param); if (invocationHandler instanceof ProxyDelegateAccessor) { param = ((ProxyDelegateAccessor)invocationHandler).getDelegate(); } } unwrappedParams[idx] = param; } return unwrappedParams; } @Nullable public String getHelpTopic() { if (myEditors.isEmpty()) { return null; } final ModuleConfigurationEditor selectedEditor = getSelectedEditor(); return selectedEditor != null ? selectedEditor.getHelpTopic() : null; } public void setModuleName(final String name) { myName = name; } private class ModuleEditorPanel extends JPanel implements DataProvider{ public ModuleEditorPanel() { super(new BorderLayout()); } @Override public Object getData(String dataId) { if (LangDataKeys.MODULE_CONTEXT.is(dataId)) { return getModule(); } return null; } } @Override public void setHistory(final History history) { } }
{ "content_hash": "25c022a9c89fd425f7df2adf9d903529", "timestamp": "", "source": "github", "line_count": 577, "max_line_length": 194, "avg_line_length": 34.7209705372617, "alnum_prop": 0.7071977638015374, "repo_name": "lucafavatella/intellij-community", "id": "f5129a9f14670a573dca087fc23acab10f1995d6", "size": "20634", "binary": false, "copies": "4", "ref": "refs/heads/cli-wip", "path": "java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/ModuleEditor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AMPL", "bytes": "20665" }, { "name": "AspectJ", "bytes": "182" }, { "name": "Batchfile", "bytes": "63896" }, { "name": "C", "bytes": "214817" }, { "name": "C#", "bytes": "1538" }, { "name": "C++", "bytes": "191650" }, { "name": "CSS", "bytes": "195482" }, { "name": "CoffeeScript", "bytes": "1759" }, { "name": "Cucumber", "bytes": "14382" }, { "name": "Erlang", "bytes": "10" }, { "name": "Groff", "bytes": "35232" }, { "name": "Groovy", "bytes": "2470587" }, { "name": "HLSL", "bytes": "57" }, { "name": "HTML", "bytes": "1804175" }, { "name": "J", "bytes": "5050" }, { "name": "Java", "bytes": "155219443" }, { "name": "JavaScript", "bytes": "562259" }, { "name": "Jupyter Notebook", "bytes": "92629" }, { "name": "Kotlin", "bytes": "1454397" }, { "name": "Lex", "bytes": "179878" }, { "name": "Makefile", "bytes": "2352" }, { "name": "NSIS", "bytes": "53411" }, { "name": "Objective-C", "bytes": "27183" }, { "name": "Perl", "bytes": "903" }, { "name": "Perl6", "bytes": "26" }, { "name": "Protocol Buffer", "bytes": "6570" }, { "name": "Python", "bytes": "23630760" }, { "name": "Ruby", "bytes": "1213" }, { "name": "Scala", "bytes": "11698" }, { "name": "Shell", "bytes": "70829" }, { "name": "Smalltalk", "bytes": "64" }, { "name": "TeX", "bytes": "25473" }, { "name": "TypeScript", "bytes": "9469" }, { "name": "XSLT", "bytes": "113040" } ], "symlink_target": "" }
package com.hazelcast.nio; import com.hazelcast.version.Version; /** * An instance which supports versioning. * <p> * It may be any version (cluster version, node version, custom version, etc.), * that is up to the implementer. * * @since 3.8 */ public interface VersionAware { /** * @return the version or {@code Version.UNKNOWN} if version is unknown to the object */ Version getVersion(); /** * If the serializer supports versioning it may set the version to use for * the serialization on this object. * * @param version version to set */ void setVersion(Version version); }
{ "content_hash": "8edafc3615b0f06e5379f61e4da99843", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 89, "avg_line_length": 22.17241379310345, "alnum_prop": 0.6609642301710731, "repo_name": "mesutcelik/hazelcast", "id": "8c4d5d416209d018fb347ca363f3119d7c0f0206", "size": "1268", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "hazelcast/src/main/java/com/hazelcast/nio/VersionAware.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1261" }, { "name": "C", "bytes": "353" }, { "name": "Java", "bytes": "39634706" }, { "name": "Shell", "bytes": "29479" } ], "symlink_target": "" }
(function (factoryFn) { if (typeof define === 'function' && define.amd) define(['lodash', 'dagre'], factoryFn); else if (typeof module === 'object' && module.exports) module.exports = factoryFn(require('lodash'), require('dagre')); else this.nomnoml = factoryFn(_, dagre); })(function (_, dagre) { var skanaar = skanaar || {} skanaar.Canvas = function (canvas, callbacks){ var ctx = canvas.getContext('2d'); var mousePos = { x: 0, y: 0 } var twopi = 2*3.1416 function mouseEventToPos(event){ var e = canvas return { x: event.clientX - e.getBoundingClientRect().left - e.clientLeft + e.scrollLeft, y: event.clientY - e.getBoundingClientRect().top - e.clientTop + e.scrollTop } } if (callbacks) { canvas.addEventListener('mousedown', function (event){ if (callbacks.mousedown) callbacks.mousedown(mouseEventToPos(event)) }) canvas.addEventListener('mouseup', function (event){ if (callbacks.mouseup) callbacks.mouseup(mouseEventToPos(event)) }) canvas.addEventListener('mousemove', function (event){ mousePos = mouseEventToPos(event) if (callbacks.mousemove) callbacks.mousemove(mouseEventToPos(event)) }) } var chainable = { stroke: function (){ ctx.stroke() return chainable }, fill: function (){ ctx.fill() return chainable }, fillAndStroke: function (){ ctx.fill() ctx.stroke() return chainable } } function color255(r, g, b, a){ var optionalAlpha = a === undefined ? 1 : a var comps = [Math.floor(r), Math.floor(g), Math.floor(b), optionalAlpha] return 'rgba('+ comps.join() +')' } function tracePath(path, offset, s){ s = s === undefined ? 1 : s offset = offset || {x:0, y:0} ctx.beginPath() ctx.moveTo(offset.x + s*path[0].x, offset.y + s*path[0].y) for(var i=1, len=path.length; i<len; i++) ctx.lineTo(offset.x + s*path[i].x, offset.y + s*path[i].y) return chainable } return { mousePos: function (){ return mousePos }, width: function (){ return canvas.width }, height: function (){ return canvas.height }, ctx: ctx, background: function (r, g, b){ ctx.fillStyle = color255(r, g, b) ctx.fillRect (0, 0, canvas.width, canvas.height) }, clear: function (){ ctx.clearRect(0, 0, canvas.width, canvas.height) }, circle: function (x, y, r){ ctx.beginPath() if (arguments.length === 2) ctx.arc(x.x, x.y, y, 0, twopi) else ctx.arc(x, y, r, 0, twopi) return chainable }, ellipse: function (center, rx, ry, start, stop){ if (start === undefined) start = 0 if (stop === undefined) stop = twopi ctx.beginPath() ctx.save() ctx.translate(center.x, center.y) ctx.scale(1, ry/rx) ctx.arc(0, 0, rx/2, start, stop) ctx.restore() return chainable }, arc: function (x, y, r, start, stop){ ctx.beginPath() ctx.moveTo(x,y) ctx.arc(x, y, r, start, stop) return chainable }, roundRect: function (x, y, w, h, r){ ctx.beginPath() ctx.moveTo(x+r, y) ctx.arcTo(x+w, y, x+w, y+r, r) ctx.lineTo(x+w, y+h-r) ctx.arcTo(x+w, y+h, x+w-r, y+h, r) ctx.lineTo(x+r, y+h) ctx.arcTo(x, y+h, x, y+h-r, r) ctx.lineTo(x, y+r) ctx.arcTo(x, y, x+r, y, r) ctx.closePath() return chainable }, rect: function (x, y, w, h){ ctx.beginPath() ctx.moveTo(x, y) ctx.lineTo(x+w, y) ctx.lineTo(x+w, y+h) ctx.lineTo(x, y+h) ctx.closePath() return chainable }, path: tracePath, circuit: function (path, offset, s){ tracePath(path, offset, s) ctx.closePath() return chainable }, colorNorm: function (r, g, b, a){ return color255(255*r, 255*g, 255*b, a) }, color255: color255, colorObjHSL: function (hue, sat, lit){ function component(v){ var x = Math.cos(6.283*v)/2 + 0.5 return lit*(1-sat + sat*x*x) } return { r: component(hue), g: component(hue-1/3), b: component(hue+1/3) } }, radialGradient: function (x, y, r1, r2, colors){ var grad = ctx.createRadialGradient(x, y, r1, x, y, r2) for(var key in colors) if (colors.hasOwnProperty(key)) grad.addColorStop(key, colors[key]) return grad }, font: function (f){ ctx.font = f }, fillStyle: function (s){ ctx.fillStyle = s }, strokeStyle: function (s){ ctx.strokeStyle = s }, textAlign: function (a){ ctx.textAlign = a }, lineCap: function (cap){ ctx.lineCap = cap }, lineJoin: function (join){ ctx.lineJoin = join }, lineWidth: function (w){ ctx.lineWidth = w }, arcTo: function (){ return ctx.arcTo.apply( ctx, arguments) }, beginPath: function (){ return ctx.beginPath.apply( ctx, arguments) }, fillText: function (){ return ctx.fillText.apply( ctx, arguments) }, lineTo: function (){ return ctx.lineTo.apply( ctx, arguments) }, measureText: function (){ return ctx.measureText.apply(ctx, arguments) }, moveTo: function (){ return ctx.moveTo.apply( ctx, arguments) }, restore: function (){ return ctx.restore.apply( ctx, arguments) }, save: function (){ return ctx.save.apply( ctx, arguments) }, scale: function (){ return ctx.scale.apply( ctx, arguments) }, setLineDash: function (){ return ctx.setLineDash.apply(ctx, arguments) }, stroke: function (){ return ctx.stroke.apply( ctx, arguments) }, translate: function (){ return ctx.translate.apply( ctx, arguments) } } }; ; var skanaar = skanaar || {} skanaar.sum = function sum(list, plucker){ var transform = { 'undefined': _.identity, 'string': function (obj){ return obj[plucker] }, 'number': function (obj){ return obj[plucker] }, 'function': plucker }[typeof plucker] for(var i=0, summation=0, len=list.length; i<len; i++) summation += transform(list[i]) return summation } skanaar.hasSubstring = function hasSubstring(haystack, needle){ if (needle === '') return true if (!haystack) return false return haystack.indexOf(needle) !== -1 } skanaar.format = function format(template /* variadic params */){ var parts = Array.prototype.slice.call(arguments, 1) return _.flatten(_.zip(template.split('#'), parts)).join('') }; var skanaar = skanaar || {}; skanaar.vector = { dist: function (a,b){ return skanaar.vector.mag(skanaar.vector.diff(a,b)) }, add: function (a,b){ return { x: a.x + b.x, y: a.y + b.y } }, diff: function (a,b){ return { x: a.x - b.x, y: a.y - b.y } }, mult: function (v,factor){ return { x: factor*v.x, y: factor*v.y } }, mag: function (v){ return Math.sqrt(v.x*v.x + v.y*v.y) }, normalize: function (v){ return skanaar.vector.mult(v, 1/skanaar.vector.mag(v)) }, rot: function (a){ return { x: a.y, y: -a.x } } }; ; var skanaar = skanaar || {} skanaar.Svg = function (globalStyle){ var initialState = { x: 0, y: 0, stroke: 'none', fill: 'none', textAlign: 'left' } var states = [initialState] var elements = [] function Element(name, attr, content) { attr.style = attr.style || '' return { name: name, attr: attr, content: content || undefined, stroke: function (){ this.attr.style += 'stroke:'+lastDefined('stroke')+';fill:none;'; return this }, fill: function (){ this.attr.style += 'stroke:none; fill:'+lastDefined('fill')+';'; return this }, fillAndStroke: function (){ this.attr.style += 'stroke:'+lastDefined('stroke')+';fill:'+lastDefined('fill')+';'; return this } } } function State(dx, dy){ return { x: dx, y: dy, stroke: null, fill: null, textAlign: null } } function trans(coord, axis){ states.forEach(function (t){ coord += t[axis] }) return coord } function tX(coord){ return Math.round(10*trans(coord, 'x'))/10 } function tY(coord){ return Math.round(10*trans(coord, 'y'))/10 } function lastDefined(property){ for(var i=states.length-1; i>=0; i--) if (states[i][property]) return states[i][property] return undefined } function last(list){ return list[list.length-1] } function tracePath(path, offset, s){ s = s === undefined ? 1 : s offset = offset || {x:0, y:0} var d = path.map(function (e, i){ return (i ? 'L' : 'M') + tX(offset.x + s*e.x) + ' ' + tY(offset.y + s*e.y) }).join(' ') return newElement('path', { d: d }) } function newElement(type, attr, content) { var element = Element(type, attr, content) elements.push(element) return element } return { width: function (){ return elements.width }, height: function (){ return elements.height }, background: function (/*r, g, b*/){}, clear: function (){}, circle: function (x, y, r){ var attr = (arguments.length === 2) ? {r: y, cx: tX(x.x), cy: tY(x.y)} : {r: r, cx: tX(x), cy: tY(y)} var element = Element('circle', attr) elements.push(element) return element }, ellipse: function (center, w, h /*, start, stop*/){ return newElement('ellipse', { cx: tX(center.x), cy: tY(center.y), rx: w/2, ry: h/2 }) }, arc: function (x, y, r /*, start, stop*/){ return newElement('ellipse', { cx: tX(x), cy: tY(y), rx: r, ry: r }) }, roundRect: function (x, y, w, h, r){ return newElement('rect', { x: tX(x), y: tY(y), rx: r, ry: r, height: h, width: w }) }, rect: function (x, y, w, h){ return newElement('rect', { x: tX(x), y: tY(y), height: h, width: w }) }, path: tracePath, circuit: function (path, offset, s){ var element = tracePath(path, offset, s) element.attr.d += ' Z' return element }, font: function (font){ last(states).font = font; }, strokeStyle: function (stroke){ last(states).stroke = stroke }, fillStyle: function (fill){ last(states).fill = fill }, arcTo: function (x1, y1, x2, y2){ last(elements).attr.d += ('L'+tX(x1)+' '+tY(y1)+' L'+tX(x2)+' '+tY(y2)+' ') }, beginPath: function (){ return newElement('path', {d:''}) }, fillText: function (text, x, y){ if (lastDefined('textAlign') === 'center') x -= this.measureText(text).width/2 return newElement('text', { x: tX(x), y: tY(y) }, text) }, lineCap: function (cap){ globalStyle += ';stroke-linecap:'+cap }, lineJoin: function (join){ globalStyle += ';stroke-linejoin:'+join }, lineTo: function (x, y){ last(elements).attr.d += ('L' + tX(x) + ' ' + tY(y) + ' ') }, lineWidth: function (w){ globalStyle += ';stroke-width:'+w}, measureText: function (s){ return { width: s.length * 8.5 } }, moveTo: function (x, y){ last(elements).attr.d += ('M' + tX(x) + ' ' + tY(y) + ' ') }, restore: function (){ states.pop() }, save: function (){ states.push(State(0, 0)) }, scale: function (){}, setLineDash: function (){}, stroke: function (){ last(elements).stroke() }, textAlign: function (a){ last(states).textAlign = a }, translate: function (dx, dy){ last(states).x += dx last(states).y += dy }, serialize: function (_attributes){ var attrs = _attributes || {}; attrs.version = attrs.version || '1.1'; attrs.baseProfile = attrs.baseProfile || 'full'; attrs.width = attrs.width || '100%'; attrs.height = attrs.height || '100%'; attrs.xmlns = attrs.xmlns || 'http://www.w3.org/2000/svg'; attrs['xmlns:xlink'] = attrs['xmlns:xlink'] || 'http://www.w3.org/1999/xlink'; attrs['xmlns:ev'] = attrs['xmlns:ev'] || 'http://www.w3.org/2001/xml-events'; attrs.style = attrs.style || lastDefined('font') + ';' + globalStyle; function toAttr(obj){ function toKeyValue(key){ return key + '="' + obj[key] + '"' } return Object.keys(obj).map(toKeyValue).join(' ') } function toHtml(e){ return '<'+e.name+' '+toAttr(e.attr)+'>'+(e.content || '')+'</'+e.name+'>' } var innerSvg = elements.map(toHtml).join('\n') return toHtml(Element('svg', attrs, innerSvg)) } } }; ; /* parser generated by jison 0.4.13 */ /* Returns a Parser object of the following structure: Parser: { yy: {} } Parser.prototype: { yy: {}, trace: function(), symbols_: {associative list: name ==> number}, terminals_: {associative list: number ==> name}, productions_: [...], performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$), table: [...], defaultActions: {...}, parseError: function(str, hash), parse: function(input), lexer: { EOF: 1, parseError: function(str, hash), setInput: function(input), input: function(), unput: function(str), more: function(), less: function(n), pastInput: function(), upcomingInput: function(), showPosition: function(), test_match: function(regex_match_array, rule_index), next: function(), lex: function(), begin: function(condition), popState: function(), _currentRules: function(), topState: function(), pushState: function(condition), options: { ranges: boolean (optional: true ==> token location info will include a .range[] member) flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match) backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code) }, performAction: function(yy, yy_, $avoiding_name_collisions, YY_START), rules: [...], conditions: {associative list: name ==> set}, } } token location info (@$, _$, etc.): { first_line: n, last_line: n, first_column: n, last_column: n, range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based) } the parseError function receives a 'hash' object with these members for lexer and parser errors: { text: (matched text) token: (the produced terminal token, if any) line: (yylineno) } while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: { loc: (yylloc) expected: (string describing the set of expected tokens) recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error) } */ var nomnomlCoreParser = (function(){ var parser = {trace: function trace() { }, yy: {}, symbols_: {"error":2,"root":3,"compartment":4,"EOF":5,"slot":6,"IDENT":7,"class":8,"association":9,"SEP":10,"parts":11,"|":12,"[":13,"]":14,"$accept":0,"$end":1}, terminals_: {2:"error",5:"EOF",7:"IDENT",10:"SEP",12:"|",13:"[",14:"]"}, productions_: [0,[3,2],[6,1],[6,1],[6,1],[4,1],[4,3],[11,1],[11,3],[11,2],[9,3],[8,3]], performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */ /**/) { /* this == yyval */ var $0 = $$.length - 1; switch (yystate) { case 1: return $$[$0-1] break; case 2:this.$ = $$[$0].trim().replace(/\\(\[|\]|\|)/g, '$'+'1'); break; case 3:this.$ = $$[$0]; break; case 4:this.$ = $$[$0]; break; case 5:this.$ = [$$[$0]]; break; case 6:this.$ = $$[$0-2].concat($$[$0]); break; case 7:this.$ = [$$[$0]]; break; case 8:this.$ = $$[$0-2].concat([$$[$0]]); break; case 9:this.$ = $$[$0-1].concat([[]]); break; case 10: var t = $$[$0-1].trim().replace(/\\(\[|\]|\|)/g, '$'+'1').match('^(.*?)([<:o+]*-/?-*[:o+>]*)(.*)$'); this.$ = {assoc:t[2], start:$$[$0-2], end:$$[$0], startLabel:t[1].trim(), endLabel:t[3].trim()}; break; case 11: var type = 'CLASS'; var id = $$[$0-1][0][0]; var typeMatch = $$[$0-1][0][0].match('<([a-z]*)>(.*)'); if (typeMatch) { type = typeMatch[1].toUpperCase(); id = typeMatch[2].trim(); } $$[$0-1][0][0] = id; this.$ = {type:type, id:id, parts:$$[$0-1]}; break; } }, table: [{3:1,4:2,6:3,7:[1,4],8:5,9:6,13:[1,7]},{1:[3]},{5:[1,8],10:[1,9]},{5:[2,5],10:[2,5],12:[2,5],14:[2,5]},{5:[2,2],10:[2,2],12:[2,2],14:[2,2]},{5:[2,3],7:[1,10],10:[2,3],12:[2,3],14:[2,3]},{5:[2,4],10:[2,4],12:[2,4],14:[2,4]},{4:12,6:3,7:[1,4],8:5,9:6,11:11,13:[1,7]},{1:[2,1]},{6:13,7:[1,4],8:5,9:6,13:[1,7]},{8:14,13:[1,7]},{12:[1,16],14:[1,15]},{10:[1,9],12:[2,7],14:[2,7]},{5:[2,6],10:[2,6],12:[2,6],14:[2,6]},{5:[2,10],10:[2,10],12:[2,10],14:[2,10]},{5:[2,11],7:[2,11],10:[2,11],12:[2,11],14:[2,11]},{4:17,6:3,7:[1,4],8:5,9:6,12:[2,9],13:[1,7],14:[2,9]},{10:[1,9],12:[2,8],14:[2,8]}], defaultActions: {8:[2,1]}, parseError: function parseError(str, hash) { if (hash.recoverable) { this.trace(str); } else { throw new Error(str); } }, parse: function parse(input) { var self = this, stack = [0], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1; var args = lstack.slice.call(arguments, 1); this.lexer.setInput(input); this.lexer.yy = this.yy; this.yy.lexer = this.lexer; this.yy.parser = this; if (typeof this.lexer.yylloc == 'undefined') { this.lexer.yylloc = {}; } var yyloc = this.lexer.yylloc; lstack.push(yyloc); var ranges = this.lexer.options && this.lexer.options.ranges; if (typeof this.yy.parseError === 'function') { this.parseError = this.yy.parseError; } else { this.parseError = Object.getPrototypeOf(this).parseError; } function popStack(n) { stack.length = stack.length - 2 * n; vstack.length = vstack.length - n; lstack.length = lstack.length - n; } function lex() { var token; token = self.lexer.lex() || EOF; if (typeof token !== 'number') { token = self.symbols_[token] || token; } return token; } var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected; while (true) { state = stack[stack.length - 1]; if (this.defaultActions[state]) { action = this.defaultActions[state]; } else { if (symbol === null || typeof symbol == 'undefined') { symbol = lex(); } action = table[state] && table[state][symbol]; } if (typeof action === 'undefined' || !action.length || !action[0]) { var errStr = ''; expected = []; for (p in table[state]) { if (this.terminals_[p] && p > TERROR) { expected.push('\'' + this.terminals_[p] + '\''); } } if (this.lexer.showPosition) { errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + this.lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\''; } else { errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\''); } this.parseError(errStr, { text: this.lexer.match, token: this.terminals_[symbol] || symbol, line: this.lexer.yylineno, loc: yyloc, expected: expected }); } if (action[0] instanceof Array && action.length > 1) { throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol); } switch (action[0]) { case 1: stack.push(symbol); vstack.push(this.lexer.yytext); lstack.push(this.lexer.yylloc); stack.push(action[1]); symbol = null; if (!preErrorSymbol) { yyleng = this.lexer.yyleng; yytext = this.lexer.yytext; yylineno = this.lexer.yylineno; yyloc = this.lexer.yylloc; if (recovering > 0) { recovering--; } } else { symbol = preErrorSymbol; preErrorSymbol = null; } break; case 2: len = this.productions_[action[1]][1]; yyval.$ = vstack[vstack.length - len]; yyval._$ = { first_line: lstack[lstack.length - (len || 1)].first_line, last_line: lstack[lstack.length - 1].last_line, first_column: lstack[lstack.length - (len || 1)].first_column, last_column: lstack[lstack.length - 1].last_column }; if (ranges) { yyval._$.range = [ lstack[lstack.length - (len || 1)].range[0], lstack[lstack.length - 1].range[1] ]; } r = this.performAction.apply(yyval, [ yytext, yyleng, yylineno, this.yy, action[1], vstack, lstack ].concat(args)); if (typeof r !== 'undefined') { return r; } if (len) { stack = stack.slice(0, -1 * len * 2); vstack = vstack.slice(0, -1 * len); lstack = lstack.slice(0, -1 * len); } stack.push(this.productions_[action[1]][0]); vstack.push(yyval.$); lstack.push(yyval._$); newState = table[stack[stack.length - 2]][stack[stack.length - 1]]; stack.push(newState); break; case 3: return true; } } return true; }}; /* generated by jison-lex 0.2.1 */ var lexer = (function(){ var lexer = { EOF:1, parseError:function parseError(str, hash) { if (this.yy.parser) { this.yy.parser.parseError(str, hash); } else { throw new Error(str); } }, // resets the lexer, sets new input setInput:function (input) { this._input = input; this._more = this._backtrack = this.done = false; this.yylineno = this.yyleng = 0; this.yytext = this.matched = this.match = ''; this.conditionStack = ['INITIAL']; this.yylloc = { first_line: 1, first_column: 0, last_line: 1, last_column: 0 }; if (this.options.ranges) { this.yylloc.range = [0,0]; } this.offset = 0; return this; }, // consumes and returns one char from the input input:function () { var ch = this._input[0]; this.yytext += ch; this.yyleng++; this.offset++; this.match += ch; this.matched += ch; var lines = ch.match(/(?:\r\n?|\n).*/g); if (lines) { this.yylineno++; this.yylloc.last_line++; } else { this.yylloc.last_column++; } if (this.options.ranges) { this.yylloc.range[1]++; } this._input = this._input.slice(1); return ch; }, // unshifts one char (or a string) into the input unput:function (ch) { var len = ch.length; var lines = ch.split(/(?:\r\n?|\n)/g); this._input = ch + this._input; this.yytext = this.yytext.substr(0, this.yytext.length - len - 1); //this.yyleng -= len; this.offset -= len; var oldLines = this.match.split(/(?:\r\n?|\n)/g); this.match = this.match.substr(0, this.match.length - 1); this.matched = this.matched.substr(0, this.matched.length - 1); if (lines.length - 1) { this.yylineno -= lines.length - 1; } var r = this.yylloc.range; this.yylloc = { first_line: this.yylloc.first_line, last_line: this.yylineno + 1, first_column: this.yylloc.first_column, last_column: lines ? (lines.length === oldLines.length ? this.yylloc.first_column : 0) + oldLines[oldLines.length - lines.length].length - lines[0].length : this.yylloc.first_column - len }; if (this.options.ranges) { this.yylloc.range = [r[0], r[0] + this.yyleng - len]; } this.yyleng = this.yytext.length; return this; }, // When called from action, caches matched text and appends it on next action more:function () { this._more = true; return this; }, // When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead. reject:function () { if (this.options.backtrack_lexer) { this._backtrack = true; } else { return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), { text: "", token: null, line: this.yylineno }); } return this; }, // retain first n characters of the match less:function (n) { this.unput(this.match.slice(n)); }, // displays already matched input, i.e. for error messages pastInput:function () { var past = this.matched.substr(0, this.matched.length - this.match.length); return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, ""); }, // displays upcoming input, i.e. for error messages upcomingInput:function () { var next = this.match; if (next.length < 20) { next += this._input.substr(0, 20-next.length); } return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, ""); }, // displays the character position where the lexing error occurred, i.e. for error messages showPosition:function () { var pre = this.pastInput(); var c = new Array(pre.length + 1).join("-"); return pre + this.upcomingInput() + "\n" + c + "^"; }, // test the lexed token: return FALSE when not a match, otherwise return token test_match:function (match, indexed_rule) { var token, lines, backup; if (this.options.backtrack_lexer) { // save context backup = { yylineno: this.yylineno, yylloc: { first_line: this.yylloc.first_line, last_line: this.last_line, first_column: this.yylloc.first_column, last_column: this.yylloc.last_column }, yytext: this.yytext, match: this.match, matches: this.matches, matched: this.matched, yyleng: this.yyleng, offset: this.offset, _more: this._more, _input: this._input, yy: this.yy, conditionStack: this.conditionStack.slice(0), done: this.done }; if (this.options.ranges) { backup.yylloc.range = this.yylloc.range.slice(0); } } lines = match[0].match(/(?:\r\n?|\n).*/g); if (lines) { this.yylineno += lines.length; } this.yylloc = { first_line: this.yylloc.last_line, last_line: this.yylineno + 1, first_column: this.yylloc.last_column, last_column: lines ? lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length : this.yylloc.last_column + match[0].length }; this.yytext += match[0]; this.match += match[0]; this.matches = match; this.yyleng = this.yytext.length; if (this.options.ranges) { this.yylloc.range = [this.offset, this.offset += this.yyleng]; } this._more = false; this._backtrack = false; this._input = this._input.slice(match[0].length); this.matched += match[0]; token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]); if (this.done && this._input) { this.done = false; } if (token) { return token; } else if (this._backtrack) { // recover context for (var k in backup) { this[k] = backup[k]; } return false; // rule action called reject() implying the next rule should be tested instead. } return false; }, // return next match in input next:function () { if (this.done) { return this.EOF; } if (!this._input) { this.done = true; } var token, match, tempMatch, index; if (!this._more) { this.yytext = ''; this.match = ''; } var rules = this._currentRules(); for (var i = 0; i < rules.length; i++) { tempMatch = this._input.match(this.rules[rules[i]]); if (tempMatch && (!match || tempMatch[0].length > match[0].length)) { match = tempMatch; index = i; if (this.options.backtrack_lexer) { token = this.test_match(tempMatch, rules[i]); if (token !== false) { return token; } else if (this._backtrack) { match = false; continue; // rule action called reject() implying a rule MISmatch. } else { // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) return false; } } else if (!this.options.flex) { break; } } } if (match) { token = this.test_match(match, rules[index]); if (token !== false) { return token; } // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace) return false; } if (this._input === "") { return this.EOF; } else { return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), { text: "", token: null, line: this.yylineno }); } }, // return next match that has a token lex:function lex() { var r = this.next(); if (r) { return r; } else { return this.lex(); } }, // activates a new lexer condition state (pushes the new lexer condition state onto the condition stack) begin:function begin(condition) { this.conditionStack.push(condition); }, // pop the previously active lexer condition state off the condition stack popState:function popState() { var n = this.conditionStack.length - 1; if (n > 0) { return this.conditionStack.pop(); } else { return this.conditionStack[0]; } }, // produce the lexer rule set which is active for the currently active lexer condition state _currentRules:function _currentRules() { if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) { return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules; } else { return this.conditions["INITIAL"].rules; } }, // return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available topState:function topState(n) { n = this.conditionStack.length - 1 - Math.abs(n || 0); if (n >= 0) { return this.conditionStack[n]; } else { return "INITIAL"; } }, // alias for begin(condition) pushState:function pushState(condition) { this.begin(condition); }, // return the number of states currently on the stack stateStackSize:function stateStackSize() { return this.conditionStack.length; }, options: {}, performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START /**/) { var YYSTATE=YY_START; switch($avoiding_name_collisions) { case 0:return 12 break; case 1:return 7 break; case 2:return 13 break; case 3:return 14 break; case 4:return 10 break; case 5:return 5 break; case 6:return 'INVALID' break; } }, rules: [/^(?:\s*\|\s*)/,/^(?:(\\(\[|\]|\|)|[^\]\[|;\n])+)/,/^(?:\[)/,/^(?:\s*\])/,/^(?:[ ]*(;|\n)+[ ]*)/,/^(?:$)/,/^(?:.)/], conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6],"inclusive":true}} }; return lexer; })(); parser.lexer = lexer; function Parser () { this.yy = {}; } Parser.prototype = parser;parser.Parser = Parser; return new Parser; })(); if (typeof require !== 'undefined' && typeof exports !== 'undefined') { exports.parser = nomnomlCoreParser; exports.Parser = nomnomlCoreParser.Parser; exports.parse = function () { return nomnomlCoreParser.parse.apply(nomnomlCoreParser, arguments); }; exports.main = function commonjsMain(args) { if (!args[1]) { console.log('Usage: '+args[0]+' FILE'); process.exit(1); } var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8"); return exports.parser.parse(source); }; if (typeof module !== 'undefined' && require.main === module) { exports.main(process.argv.slice(1)); } }; var nomnoml = nomnoml || {} nomnoml.parse = function (source){ function onlyCompilables(line){ var ok = line[0] !== '#' && line.substring(0,2) !== '//' return ok ? line : '' } var isDirective = function (line){ return line.text[0] === '#' } var lines = source.split('\n').map(function (s, i){ return {text: s.trim(), index: i } }) var pureDirectives = _.filter(lines, isDirective) var directives = _.object(pureDirectives.map(function (line){ try { var tokens = line.text.substring(1).split(':') return [tokens[0].trim(), tokens[1].trim()] } catch (e) { throw new Error('line ' + (line.index + 1)) } })) var pureDiagramCode = _.map(_.pluck(lines, 'text'), onlyCompilables).join('\n').trim() var ast = nomnoml.transformParseIntoSyntaxTree(nomnoml.intermediateParse(pureDiagramCode)) ast.directives = directives return ast } nomnoml.intermediateParse = function (source){ return nomnomlCoreParser.parse(source) } nomnoml.transformParseIntoSyntaxTree = function (entity){ var relationId = 0 function transformCompartment(parts){ var lines = [] var rawClassifiers = [] var relations = [] _.each(parts, function (p){ if (typeof p === 'string') lines.push(p) if (p.assoc){ // is a relation rawClassifiers.push(p.start) rawClassifiers.push(p.end) relations.push({ id: relationId++, assoc: p.assoc, start: p.start.parts[0][0], end: p.end.parts[0][0], startLabel: p.startLabel, endLabel: p.endLabel }) } if (p.parts){ // is a classifier rawClassifiers.push(p) } }) var allClassifiers = _.map(rawClassifiers, transformItem) var noDuplicates = _.map(_.groupBy(allClassifiers, 'name'), function (cList){ return _.max(cList, function (c){ return c.compartments.length }) }) return nomnoml.Compartment(lines, noDuplicates, relations) } function transformItem(entity){ if (typeof entity === 'string') return entity if (_.isArray(entity)) return transformCompartment(entity) if (entity.parts){ var compartments = _.map(entity.parts, transformCompartment) return nomnoml.Classifier(entity.type, entity.id, compartments) } return undefined } return transformItem(entity) }; var nomnoml = nomnoml || {} nomnoml.styles = { ABSTRACT: { center: 1, bold: 0, underline: 0, italic: 1, dashed: 0, empty: 0, hull: 'auto', visual: 'class' }, ACTOR: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'actor' }, CHOICE: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'rhomb' }, CLASS: { center: 1, bold: 1, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'class' }, DATABASE: { center: 1, bold: 1, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'database' }, END: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 1, hull: 'icon', visual: 'end' }, FRAME: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'frame' }, HIDDEN: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 1, hull: 'empty', visual: 'hidden' }, INPUT: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'input' }, INSTANCE: { center: 1, bold: 0, underline: 1, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'class' }, LABEL: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'none' }, NOTE: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'note' }, PACKAGE: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'package' }, RECEIVER: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'receiver' }, REFERENCE:{ center: 1, bold: 0, underline: 0, italic: 0, dashed: 1, empty: 0, hull: 'auto', visual: 'class' }, SENDER: { center: 0, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'sender' }, START: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 1, hull: 'icon', visual: 'start' }, STATE: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'roundrect' }, USECASE: { center: 1, bold: 0, underline: 0, italic: 0, dashed: 0, empty: 0, hull: 'auto', visual: 'ellipse' }, } nomnoml.visualizers = { actor : function (node, x, y, padding, config, g) { var a = padding/2 var yp = y + a/2 var actorCenter = {x: node.x, y: yp-a} g.circle(actorCenter, a).fillAndStroke() g.path([ {x: node.x, y: yp}, {x: node.x, y: yp+2*a} ]).stroke() g.path([ {x: node.x-a, y: yp+a}, {x: node.x+a, y: yp+a} ]).stroke() g.path([ {x: node.x-a, y: yp+a+padding}, {x: node.x , y: yp+padding}, {x: node.x+a, y: yp+a+padding} ]).stroke() }, class : function (node, x, y, padding, config, g) { g.rect(x, y, node.width, node.height).fillAndStroke() }, database : function (node, x, y, padding, config, g) { var cy = y-padding/2 var pi = 3.1416 g.rect(x, y, node.width, node.height).fill() g.path([{x: x, y: cy}, {x: x, y: cy+node.height}]).stroke() g.path([ {x: x+node.width, y: cy}, {x: x+node.width, y: cy+node.height}]).stroke() g.ellipse({x: node.x, y: cy}, node.width, padding*1.5).fillAndStroke() g.ellipse({x: node.x, y: cy+node.height}, node.width, padding*1.5, 0, pi) .fillAndStroke() }, ellipse : function (node, x, y, padding, config, g) { g.ellipse({x: node.x, y: node.y}, node.width, node.height).fillAndStroke() }, end : function (node, x, y, padding, config, g) { g.circle(node.x, y+node.height/2, node.height/3).fillAndStroke() g.fillStyle(config.stroke) g.circle(node.x, y+node.height/2, node.height/3-padding/2).fill() }, frame : function (node, x, y, padding, config, g) { g.rect(x, y, node.width, node.height).fillAndStroke() }, hidden : function (node, x, y, padding, config, g) { }, input : function (node, x, y, padding, config, g) { g.circuit([ {x:x+padding, y:y}, {x:x+node.width, y:y}, {x:x+node.width-padding, y:y+node.height}, {x:x, y:y+node.height} ]).fillAndStroke() }, none : function (node, x, y, padding, config, g) { }, note : function (node, x, y, padding, config, g) { g.circuit([ {x: x, y: y}, {x: x+node.width-padding, y: y}, {x: x+node.width, y: y+padding}, {x: x+node.width, y: y+node.height}, {x: x, y: y+node.height}, {x: x, y: y} ]).fillAndStroke() g.path([ {x: x+node.width-padding, y: y}, {x: x+node.width-padding, y: y+padding}, {x: x+node.width, y: y+padding} ]).stroke() }, package : function (node, x, y, padding, config, g) { var headHeight = node.compartments[0].height g.rect(x, y+headHeight, node.width, node.height-headHeight).fillAndStroke() var w = g.measureText(node.name).width + 2*padding g.circuit([ {x:x, y:y+headHeight}, {x:x, y:y}, {x:x+w, y:y}, {x:x+w, y:y+headHeight} ]).fillAndStroke() }, receiver : function (node, x, y, padding, config, g) { g.circuit([ {x: x, y: y}, {x: x+node.width+padding, y: y}, {x: x+node.width-padding, y: y+node.height/2}, {x: x+node.width+padding, y: y+node.height}, {x: x, y: y+node.height} ]).fillAndStroke() }, rhomb : function (node, x, y, padding, config, g) { g.circuit([ {x:node.x, y:y - padding}, {x:x+node.width + padding, y:node.y}, {x:node.x, y:y+node.height + padding}, {x:x - padding, y:node.y} ]).fillAndStroke() }, roundrect : function (node, x, y, padding, config, g) { var r = Math.min(padding*2*config.leading, node.height/2) g.roundRect(x, y, node.width, node.height, r).fillAndStroke() }, sender : function (node, x, y, padding, config, g) { g.circuit([ {x: x, y: y}, {x: x+node.width-padding, y: y}, {x: x+node.width+padding, y: y+node.height/2}, {x: x+node.width-padding, y: y+node.height}, {x: x, y: y+node.height} ]).fillAndStroke() }, start : function (node, x, y, padding, config, g) { g.fillStyle(config.stroke) g.circle(node.x, y+node.height/2, node.height/2.5).fill() }, }; var nomnoml = nomnoml || {} nomnoml.Classifier = function (type, name, compartments){ return { type: type, name: name, compartments: compartments } } nomnoml.Compartment = function (lines, nodes, relations){ return { lines: lines, nodes: nodes, relations: relations } } nomnoml.layout = function (measurer, config, ast){ function runDagre(input){ return dagre.layout() .rankSep(config.spacing) .nodeSep(config.spacing) .edgeSep(config.spacing) .rankDir(config.direction) .run(input) } function measureLines(lines, fontWeight){ if (!lines.length) return { width: 0, height: config.padding } measurer.setFont(config, fontWeight) return { width: Math.round(_.max(_.map(lines, measurer.textWidth)) + 2*config.padding), height: Math.round(measurer.textHeight() * lines.length + 2*config.padding) } } function layoutCompartment(c, compartmentIndex){ var textSize = measureLines(c.lines, compartmentIndex ? 'normal' : 'bold') c.width = textSize.width c.height = textSize.height if (!c.nodes.length && !c.relations.length) return _.each(c.nodes, layoutClassifier) var g = new dagre.Digraph() _.each(c.nodes, function (e){ g.addNode(e.name, { width: e.width, height: e.height }) }) _.each(c.relations, function (r){ g.addEdge(r.id, r.start, r.end) }) var dLayout = runDagre(g) var rels = _.indexBy(c.relations, 'id') var nodes = _.indexBy(c.nodes, 'name') function toPoint(o){ return {x:o.x, y:o.y} } dLayout.eachNode(function(u, value) { nodes[u].x = value.x nodes[u].y = value.y }) dLayout.eachEdge(function(e, u, v, value) { var start = nodes[u], end = nodes[v] rels[e].path = _.map(_.flatten([start, value.points, end]), toPoint) }) var graph = dLayout.graph() var graphHeight = graph.height ? graph.height + 2*config.gutter : 0 var graphWidth = graph.width ? graph.width + 2*config.gutter : 0 c.width = Math.max(textSize.width, graphWidth) + 2*config.padding c.height = textSize.height + graphHeight + config.padding } function layoutClassifier(clas){ var style = config.styles[clas.type] || nomnoml.styles.CLASS if (style.hull == 'icon'){ clas.width = config.fontSize * 2.5 clas.height = config.fontSize * 2.5 return } if (style.hull === 'empty'){ clas.width = 0 clas.height = 0 return } _.each(clas.compartments, layoutCompartment) clas.width = _.max(_.pluck(clas.compartments, 'width')) clas.height = skanaar.sum(clas.compartments, 'height') clas.x = clas.width/2 clas.y = clas.height/2 _.each(clas.compartments, function(co){ co.width = clas.width }) } layoutCompartment(ast) return ast } ; var nomnoml = nomnoml || {} nomnoml.render = function (graphics, config, compartment, setFont){ var padding = config.padding var g = graphics var vm = skanaar.vector function renderCompartment(compartment, style, level){ g.save() g.translate(padding, padding) g.fillStyle(config.stroke) _.each(compartment.lines, function (text, i){ g.textAlign(style.center ? 'center' : 'left') var x = style.center ? compartment.width/2 - padding : 0 var y = (0.5+(i+0.5)*config.leading)*config.fontSize if (text){ g.fillText(text, x, y) } if (style.underline){ var w = g.measureText(text).width y += Math.round(config.fontSize * 0.2)+0.5 g.path([{x:x-w/2, y:y}, {x:x+w/2, y:y}]).stroke() g.lineWidth = config.lineWidth } }) g.translate(config.gutter, config.gutter) _.each(compartment.relations, function (r){ renderRelation(r, compartment) }) _.each(compartment.nodes, function (n){ renderNode(n, level) }) g.restore() } function renderNode(node, level){ var x = Math.round(node.x-node.width/2) var y = Math.round(node.y-node.height/2) var style = config.styles[node.type] || nomnoml.styles.CLASS g.fillStyle(style.fill || config.fill[level] || _.last(config.fill)) if (style.dashed){ var dash = Math.max(4, 2*config.lineWidth) g.setLineDash([dash, dash]) } var drawNode = nomnoml.visualizers[style.visual] || nomnoml.visualizers.class drawNode(node, x, y, padding, config, g) g.setLineDash([]) var yDivider = (style.visual === 'actor' ? y + padding*3/4 : y) _.each(node.compartments, function (part, i){ var s = i > 0 ? {} : style; // only style node title if (s.empty) return g.save() g.translate(x, yDivider) setFont(config, s.bold ? 'bold' : 'normal', s.italic) renderCompartment(part, s, level+1) g.restore() if (i+1 === node.compartments.length) return yDivider += part.height if (style.visual === 'frame' && i === 0){ var w = g.measureText(node.name).width+part.height/2+padding g.path([ {x:x, y:yDivider}, {x:x+w-part.height/2, y:yDivider}, {x:x+w, y:yDivider-part.height/2}, {x:x+w, y:yDivider-part.height} ]).stroke() } else g.path([{x:x, y:yDivider}, {x:x+node.width, y:yDivider}]).stroke() }) } function strokePath(p){ if (config.edges === 'rounded'){ var radius = config.spacing * config.bendSize g.beginPath() g.moveTo(p[0].x, p[0].y) for (var i = 1; i < p.length-1; i++){ g.arcTo(p[i].x, p[i].y, p[i+1].x, p[i+1].y, radius) } g.lineTo(_.last(p).x, _.last(p).y) g.stroke() } else g.path(p).stroke() } var empty = false, filled = true, diamond = true function renderRelation(r, compartment){ var startNode = _.findWhere(compartment.nodes, {name:r.start}) var endNode = _.findWhere(compartment.nodes, {name:r.end}) var start = rectIntersection(r.path[1], _.first(r.path), startNode) var end = rectIntersection(r.path[r.path.length-2], _.last(r.path), endNode) var path = _.flatten([start, _.tail(_.initial(r.path)), end]) var fontSize = config.fontSize g.fillStyle(config.stroke) setFont(config, 'normal') var textW = g.measureText(r.endLabel).width var labelX = config.direction === 'LR' ? -padding-textW : padding if (r.startLabel) g.fillText(r.startLabel, start.x+padding, start.y+padding+fontSize) if (r.endLabel) g.fillText(r.endLabel, end.x+labelX, end.y-padding) if (r.assoc !== '-/-'){ if (g.setLineDash && skanaar.hasSubstring(r.assoc, '--')){ var dash = Math.max(4, 2*config.lineWidth) g.setLineDash([dash, dash]) strokePath(path) g.setLineDash([]) } else strokePath(path) } function drawArrowEnd(id, path, end){ if (id === '>' || id === '<') drawArrow(path, filled, end) else if (id === ':>' || id === '<:') drawArrow(path, empty, end) else if (id === '+') drawArrow(path, filled, end, diamond) else if (id === 'o') drawArrow(path, empty, end, diamond) } var tokens = r.assoc.split('-') drawArrowEnd(_.last(tokens), path, end) drawArrowEnd(_.first(tokens), path.reverse(), start) } function rectIntersection(p1, p2, rect) { if (rect.width || rect.height) { var xBound = rect.width/2 + config.edgeMargin; var yBound = rect.height/2 + config.edgeMargin; var delta = vm.diff(p1, p2); var t; if (delta.x && delta.y) { t = Math.min(Math.abs(xBound/delta.x), Math.abs(yBound/delta.y)); } else { t = Math.abs(delta.x ? xBound/delta.x : yBound/delta.y); } return vm.add(p2, vm.mult(delta, t)); } return p2; } function drawArrow(path, isOpen, arrowPoint, diamond){ var size = (config.spacing - 2*config.edgeMargin) * config.arrowSize / 30 var v = vm.diff(path[path.length-2], _.last(path)) var nv = vm.normalize(v) function getArrowBase(s){ return vm.add(arrowPoint, vm.mult(nv, s*size)) } var arrowBase = getArrowBase(diamond ? 7 : 10) var t = vm.rot(nv) var arrowButt = (diamond) ? getArrowBase(14) : (isOpen && !config.fillArrows) ? getArrowBase(5) : arrowBase var arrow = [ vm.add(arrowBase, vm.mult(t, 4*size)), arrowButt, vm.add(arrowBase, vm.mult(t, -4*size)), arrowPoint ] g.fillStyle(isOpen ? config.stroke : config.fill[0]) g.circuit(arrow).fillAndStroke() } function snapToPixels(){ if (config.lineWidth % 2 === 1) g.translate(0.5, 0.5) } g.clear() setFont(config, 'bold') g.save() g.lineWidth(config.lineWidth) g.lineJoin('round') g.lineCap('round') g.strokeStyle(config.stroke) g.scale(config.zoom, config.zoom) snapToPixels() renderCompartment(compartment, {}, 0) g.restore() } ; var nomnoml = nomnoml || {}; (function () { 'use strict'; function getConfig(d) { var userStyles = {} _.each(d, function (styleDef, key){ if (key[0] != '.') return userStyles[key.substring(1).toUpperCase()] = { center: _.contains(styleDef, 'center'), bold: _.contains(styleDef, 'bold'), underline: _.contains(styleDef, 'underline'), italic: _.contains(styleDef, 'italic'), dashed: _.contains(styleDef, 'dashed'), empty: _.contains(styleDef, 'empty'), fill: _.last(styleDef.match('fill=([^ ]*)')), visual: _.last(styleDef.match('visual=([^ ]*)')) || 'class' } }) return { arrowSize: +d.arrowSize || 1, bendSize: +d.bendSize || 0.3, direction: { down: 'TB', right: 'LR' }[d.direction] || 'TB', gutter: +d.gutter || 5, edgeMargin: (+d.edgeMargin) || 0, edges: { hard: 'hard', rounded: 'rounded' }[d.edges] || 'rounded', fill: (d.fill || '#eee8d5;#fdf6e3;#eee8d5;#fdf6e3').split(';'), fillArrows: d.fillArrows === 'true', font: d.font || 'Calibri', fontSize: (+d.fontSize) || 12, leading: (+d.leading) || 1.25, lineWidth: (+d.lineWidth) || 3, padding: (+d.padding) || 8, spacing: (+d.spacing) || 40, stroke: d.stroke || '#33322E', title: d.title || 'nomnoml', zoom: +d.zoom || 1, styles: _.extend({}, nomnoml.styles, userStyles) }; } function fitCanvasSize(canvas, rect, zoom) { canvas.width = rect.width * zoom; canvas.height = rect.height * zoom; } function setFont(config, isBold, isItalic, graphics) { var style = (isBold === 'bold' ? 'bold' : '') if (isItalic) style = 'italic ' + style var defaultFont = 'Helvetica, sans-serif' var font = skanaar.format('# #pt #, #', style, config.fontSize, config.font, defaultFont) graphics.font(font) } function parseAndRender(code, graphics, canvas, scale) { var ast = nomnoml.parse(code); var config = getConfig(ast.directives); var measurer = { setFont: function (a, b, c) { setFont(a, b, c, graphics); }, textWidth: function (s) { return graphics.measureText(s).width }, textHeight: function () { return config.leading * config.fontSize } }; var layout = nomnoml.layout(measurer, config, ast); fitCanvasSize(canvas, layout, config.zoom * scale); config.zoom *= scale; nomnoml.render(graphics, config, layout, measurer.setFont); return { config: config }; } nomnoml.draw = function (canvas, code, scale) { return parseAndRender(code, skanaar.Canvas(canvas), canvas, scale || 1) }; nomnoml.renderSvg = function (code) { var ast = nomnoml.parse(code) var config = getConfig(ast.directives) var skCanvas = skanaar.Svg('') function setFont(config, isBold, isItalic) { var style = (isBold === 'bold' ? 'bold' : '') if (isItalic) style = 'italic ' + style var defFont = 'Helvetica, sans-serif' var template = 'font-weight:#; font-size:#pt; font-family:\'#\', #' var font = skanaar.format(template, style, config.fontSize, config.font, defFont) skCanvas.font(font) } var measurer = { setFont: function (a, b, c) { setFont(a, b, c, skCanvas); }, textWidth: function (s) { return skCanvas.measureText(s).width }, textHeight: function () { return config.leading * config.fontSize } }; var layout = nomnoml.layout(measurer, config, ast) nomnoml.render(skCanvas, config, layout, measurer.setFont) return skCanvas.serialize({ width: layout.width, height: layout.height }) }; })(); ; return nomnoml; });
{ "content_hash": "d2ae9449995757fec374228516c5d35e", "timestamp": "", "source": "github", "line_count": 1618, "max_line_length": 594, "avg_line_length": 33.53955500618047, "alnum_prop": 0.5729448836309359, "repo_name": "prantlf/nomnoml", "id": "1bbe57ffd16e17613dce2f3a08d2499322e5c4ae", "size": "54267", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dist/nomnoml.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "20128" }, { "name": "HTML", "bytes": "16466" }, { "name": "JavaScript", "bytes": "134613" }, { "name": "Yacc", "bytes": "1493" } ], "symlink_target": "" }
require 'rails_helper' feature 'Participant Invitations' do let(:user) { create(:person) } let(:invitation) { create(:participant_invitation, role: 'organizer') } before { login_user(user) } context "User has received a participant invitation" do it "can accept the invitation" do visit accept_participant_invitation_path(invitation.slug, invitation.token) expect(page).to have_text('You successfully accepted the invitation') expect(page).to have_text('Organize') end end end
{ "content_hash": "3ed069656b5a27cf4bd7641c01b068f8", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 81, "avg_line_length": 30.41176470588235, "alnum_prop": 0.7156673114119922, "repo_name": "rubyaustralia/cfp-app", "id": "0238ec5b34a257fe36906ff4a9b395a98a3aaf60", "size": "517", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/features/participant_invitation_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "9991" }, { "name": "HTML", "bytes": "65367" }, { "name": "JavaScript", "bytes": "6640" }, { "name": "Ruby", "bytes": "252847" } ], "symlink_target": "" }
const EMPTY = ''; const NEWLINE = '\n'; const SLASH = '/'; const SPACE = ' '; const identity = x => x; const getLongest = items => items.reduce((a, b) => (a.length > b.length ? a : b)); module.exports.indented = (str, indent = 2) => str.split(`${NEWLINE}`).join(`${NEWLINE}${SPACE.repeat(indent)}`); module.exports.bulleted = strs => strs.map(str => `• ${str}`).join(NEWLINE); module.exports.inlineList = strs => strs.reduce((acc, str, index) => [acc, str].join(index === strs.length - 1 ? ' & ' : ', ')); const padding = (module.exports.padding = (str, len, char = SPACE) => char.repeat(len > str.length ? len - str.length : 0)); module.exports.padLeft = (str, len, char = SPACE) => padding(str, len, char) + String(str); const padRight = (module.exports.padRight = (str, len, char = SPACE) => String(str) + padding(str, len, char)); module.exports.listPairs = (x, style = identity) => { const keys = Object.keys(x); const longest = getLongest(keys).length; return keys .map(key => { return `${style(padRight(key, longest))} ${x[key]}`; }) .join(NEWLINE); }; module.exports.zipTemplateLiterals = (literals, numSpacesBetween) => { const literalsLines = literals.map(literal => literal.split(NEWLINE)); return literalsLines[0].reduce( (memo, _, index) => [memo, NEWLINE].join(literalsLines.map(lines => lines[index]).join(SPACE.repeat(numSpacesBetween))), EMPTY ); }; module.exports.styleLastSegment = (str, style = identity, separator = SLASH) => { return str .split(separator) .map((segment, index, segments) => { return index === segments.length - 1 ? style(segment) : segment; }) .join(separator); }; /** * Return a sluggified version of the string * * @param {string} input - The string to convert * @returns {string} */ module.exports.sluggify = input => input .toLowerCase() .replace(/\s/g, '-') .replace(/[^0-9a-z\-\_]/g, '');
{ "content_hash": "3b0e387cd2b03a4ce3eb4c2507a05294", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 113, "avg_line_length": 30.3125, "alnum_prop": 0.6216494845360825, "repo_name": "abcnews/aunty", "id": "738b1e26584e0ca4095b7bad0b6f8ab5cbdcd013", "size": "1942", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/utils/text.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "892" }, { "name": "JavaScript", "bytes": "95298" }, { "name": "SCSS", "bytes": "1015" }, { "name": "Svelte", "bytes": "2610" }, { "name": "TypeScript", "bytes": "18687" } ], "symlink_target": "" }
<!doctype html> <html class="no-js" lang=""> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <title>Something's Brewing Beer Swap</title> <meta name="description" content="Something's Brewing Beer Swap, an app to ."> <link rel="shortcut icon" href="images/favicon.ico" type="image/x-icon"> <link rel="icon" type="image/png" href="images/favicon-32x32.png" sizes="32x32"> <link rel="icon" type="image/png" href="images/favicon-96x96.png" sizes="96x96"> <link rel="icon" type="image/png" href="images/favicon-16x16.png" sizes="16x16"> <!-- Use minimum-scale=1 to enable GPU rasterization --> <meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=0, maximum-scale=1, minimum-scale=1" > <link rel="stylesheet" type="text/css" href="main.css"> </head> <body> <div id="app"></div> <!-- This script adds the Roboto font to our project. For more detail go to this site: http://www.google.com/fonts#UsePlace:use/Collection:Roboto:400,300,500 --> <script> var WebFontConfig = { google: { families: [ 'Roboto:400,300,500:latin' ] } }; (function() { var wf = document.createElement('script'); wf.src = 'https://ajax.googleapis.com/ajax/libs/webfont/1/webfont.js'; wf.type = 'text/javascript'; wf.async = 'true'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(wf, s); })(); </script> <script src="app.js"></script> </body> </html>
{ "content_hash": "890024118b67db14304c251724af86e9", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 166, "avg_line_length": 39.625, "alnum_prop": 0.6271293375394322, "repo_name": "lolamastro/beerswap", "id": "9dc6b822e854d4d2dd2bad29cb7750ffe9b779d4", "size": "1585", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/www/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "207" }, { "name": "HTML", "bytes": "1585" }, { "name": "JavaScript", "bytes": "48551" } ], "symlink_target": "" }
<?xml version="1.0" ?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <config> <luceneMatchVersion>${tests.luceneMatchVersion:LUCENE_CURRENT}</luceneMatchVersion> <directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/> <indexConfig> <deletionPolicy class="org.apache.solr.core.FakeDeletionPolicy"> <str name="var1">value1</str> <str name="var2">value2</str> </deletionPolicy> </indexConfig> <requestHandler name="standard" class="solr.StandardRequestHandler"></requestHandler> </config>
{ "content_hash": "5d0516be3511469211ba47c281e7b23b", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 103, "avg_line_length": 41.40625, "alnum_prop": 0.7456603773584906, "repo_name": "terrancesnyder/solr-analytics", "id": "9b96ea4a445e188cb2bae3725c773f6dba76279b", "size": "1325", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "solr/core/src/test-files/solr/collection1/conf/solrconfig-delpolicy2.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "13898" }, { "name": "Java", "bytes": "31968690" }, { "name": "JavaScript", "bytes": "1221046" }, { "name": "Perl", "bytes": "81566" }, { "name": "Python", "bytes": "179898" }, { "name": "Shell", "bytes": "19867" } ], "symlink_target": "" }
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package misc import ( "fmt" "path" "time" "github.com/coreos/mantle/kola/register" "github.com/coreos/mantle/platform" "github.com/coreos/mantle/util" "github.com/coreos/mantle/Godeps/_workspace/src/github.com/coreos/coreos-cloudinit/config" "github.com/coreos/mantle/Godeps/_workspace/src/github.com/coreos/pkg/capnslog" ) var ( plog = capnslog.NewPackageLogger("github.com/coreos/mantle", "kola/tests/misc") nfsserverconf = config.CloudConfig{ CoreOS: config.CoreOS{ Units: []config.Unit{ config.Unit{ Name: "rpc-statd.service", Command: "start", }, config.Unit{ Name: "rpc-mountd.service", Command: "start", }, config.Unit{ Name: "nfsd.service", Command: "start", }, }, }, WriteFiles: []config.File{ config.File{ Content: "/tmp *(ro,insecure,all_squash,no_subtree_check,fsid=0)", Path: "/etc/exports", }, }, Hostname: "nfs1", } mounttmpl = `[Unit] Description=NFS Client After=network-online.target Requires=network-online.target After=rpc-statd.service Requires=rpc-statd.service [Mount] What=%s:/tmp Where=/mnt Type=nfs Options=defaults,noexec,nfsvers=%d ` ) func init() { register.Register(&register.Test{ Run: NFSv3, ClusterSize: 0, Name: "linux.nfs.v3", Platforms: []string{"qemu", "aws"}, UserData: `#cloud-config`, }) register.Register(&register.Test{ Run: NFSv4, ClusterSize: 0, Name: "linux.nfs.v4", Platforms: []string{"qemu", "aws"}, UserData: `#cloud-config`, }) } func testNFS(c platform.TestCluster, nfsversion int) error { m1, err := c.NewMachine(nfsserverconf.String()) if err != nil { return fmt.Errorf("Cluster.NewMachine: %s", err) } defer m1.Destroy() plog.Info("NFS server booted.") /* poke a file in /tmp */ tmp, err := m1.SSH("mktemp") if err != nil { return fmt.Errorf("Machine.SSH: %s", err) } plog.Infof("Test file %q created on server.", tmp) c2 := config.CloudConfig{ CoreOS: config.CoreOS{ Units: []config.Unit{ config.Unit{ Name: "mnt.mount", Command: "start", Content: fmt.Sprintf(mounttmpl, m1.PrivateIP(), nfsversion), }, }, }, Hostname: "nfs2", } m2, err := c.NewMachine(c2.String()) if err != nil { return fmt.Errorf("Cluster.NewMachine: %s", err) } defer m2.Destroy() plog.Info("NFS client booted.") plog.Info("Waiting for NFS mount on client...") checkmount := func() error { status, err := m2.SSH("systemctl is-active mnt.mount") if err != nil || string(status) != "active" { return fmt.Errorf("mnt.mount status is %q: %v", status, err) } plog.Info("Got NFS mount.") return nil } if err = util.Retry(10, 3*time.Second, checkmount); err != nil { return err } _, err = m2.SSH(fmt.Sprintf("stat /mnt/%s", path.Base(string(tmp)))) if err != nil { return fmt.Errorf("file %q does not exist", tmp) } return nil } // Test that the kernel NFS server and client work within CoreOS. func NFSv3(c platform.TestCluster) error { return testNFS(c, 3) } // Test that NFSv4 without security works on CoreOS. func NFSv4(c platform.TestCluster) error { return testNFS(c, 4) }
{ "content_hash": "1070908f145197745c4085ba82fa7351", "timestamp": "", "source": "github", "line_count": 163, "max_line_length": 91, "avg_line_length": 23.141104294478527, "alnum_prop": 0.6593319194061505, "repo_name": "mjg59/mantle", "id": "391d027edbada7cdeccd01b4c3245c4caee43bb0", "size": "3772", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kola/tests/misc/nfs.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "561381" }, { "name": "Shell", "bytes": "2214" } ], "symlink_target": "" }
package org.jclouds.ec2.binders; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import javax.inject.Singleton; import org.jclouds.aws.util.AWSUtils; import org.jclouds.http.HttpRequest; import org.jclouds.rest.Binder; import com.google.common.collect.Multimap; /** * Binds the Multimap to form parameters for filtering. * * <pre> * https://ec2.amazonaws.com/?Action=DescribeTags * &Filter.1.Name=resource-type * &Filter.1.Value.1=instance * &Filter.2.Name=key * &Filter.2.Value.1=stack * &Filter.3.Name=value * &Filter.3.Value.1=Test * &Filter.3.Value.2=Production * &AUTHPARAMS * </pre> */ @Singleton public class BindFiltersToIndexedFormParams implements Binder { @Override public <R extends HttpRequest> R bindToRequest(R request, Object input) { checkArgument(checkNotNull(input, "input") instanceof Multimap, "this binder is only valid for Multimap"); @SuppressWarnings("unchecked") Multimap<String, String> filters = (Multimap<String, String>) input; return AWSUtils.indexMultimapToFormValuesWithPrefix(request, "Filter", "Name", "Value", filters); } }
{ "content_hash": "eab50da46511eada9ef85706770482f3", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 112, "avg_line_length": 29.95, "alnum_prop": 0.7445742904841403, "repo_name": "yanzhijun/jclouds-aliyun", "id": "5478d64542d34872206fe514ca1c13b69d75d091", "size": "1999", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "apis/ec2/src/main/java/org/jclouds/ec2/binders/BindFiltersToIndexedFormParams.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "12999" }, { "name": "CSS", "bytes": "10692" }, { "name": "Clojure", "bytes": "99051" }, { "name": "Emacs Lisp", "bytes": "852" }, { "name": "HTML", "bytes": "381689" }, { "name": "Java", "bytes": "19478047" }, { "name": "JavaScript", "bytes": "7110" }, { "name": "Shell", "bytes": "121121" } ], "symlink_target": "" }
package org.hl7.fhir.dstu2016may.hapi.validation; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.ConceptValidationOptions; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.context.support.ValidationSupportContext; import ca.uhn.fhir.rest.api.Constants; import org.apache.commons.lang3.Validate; import org.hl7.fhir.dstu2016may.formats.IParser; import org.hl7.fhir.dstu2016may.formats.ParserType; import org.hl7.fhir.dstu2016may.model.CodeSystem; import org.hl7.fhir.dstu2016may.model.CodeSystem.ConceptDefinitionComponent; import org.hl7.fhir.dstu2016may.model.CodeType; import org.hl7.fhir.dstu2016may.model.CodeableConcept; import org.hl7.fhir.dstu2016may.model.Coding; import org.hl7.fhir.dstu2016may.model.ConceptMap; import org.hl7.fhir.dstu2016may.model.OperationOutcome; import org.hl7.fhir.dstu2016may.model.Resource; import org.hl7.fhir.dstu2016may.model.ResourceType; import org.hl7.fhir.dstu2016may.model.StructureDefinition; import org.hl7.fhir.dstu2016may.model.ValueSet; import org.hl7.fhir.dstu2016may.model.ValueSet.ConceptReferenceComponent; import org.hl7.fhir.dstu2016may.model.ValueSet.ConceptSetComponent; import org.hl7.fhir.dstu2016may.model.ValueSet.ValueSetExpansionComponent; import org.hl7.fhir.dstu2016may.model.ValueSet.ValueSetExpansionContainsComponent; import org.hl7.fhir.dstu2016may.terminologies.ValueSetExpander.ValueSetExpansionOutcome; import org.hl7.fhir.dstu2016may.utils.INarrativeGenerator; import org.hl7.fhir.dstu2016may.utils.IWorkerContext; import org.hl7.fhir.utilities.i18n.I18nBase; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public final class HapiWorkerContext extends I18nBase implements IWorkerContext { private final FhirContext myCtx; private Map<String, Resource> myFetchedResourceCache = new HashMap<>(); private IValidationSupport myValidationSupport; public HapiWorkerContext(FhirContext theCtx, IValidationSupport theValidationSupport) { Validate.notNull(theCtx, "theCtx must not be null"); Validate.notNull(theValidationSupport, "theValidationSupport must not be null"); myCtx = theCtx; myValidationSupport = theValidationSupport; setValidationMessageLanguage(getLocale()); } @Override public List<StructureDefinition> allStructures() { return myValidationSupport.fetchAllStructureDefinitions(); } @Override public CodeSystem fetchCodeSystem(String theSystem) { if (myValidationSupport == null) { return null; } else { return (CodeSystem) myValidationSupport.fetchCodeSystem(theSystem); } } @Override public <T extends Resource> T fetchResource(Class<T> theClass, String theUri) { if (myValidationSupport == null) { return null; } else { @SuppressWarnings("unchecked") T retVal = (T) myFetchedResourceCache.get(theUri); if (retVal == null) { retVal = myValidationSupport.fetchResource(theClass, theUri); if (retVal != null) { myFetchedResourceCache.put(theUri, retVal); } } return retVal; } } @Override public List<ConceptMap> findMapsForSource(String theUrl) { throw new UnsupportedOperationException(); } @Override public String getAbbreviation(String theName) { throw new UnsupportedOperationException(); } @Override public IParser getParser(ParserType theType) { throw new UnsupportedOperationException(); } @Override public IParser getParser(String theType) { throw new UnsupportedOperationException(); } @Override public List<String> getResourceNames() { List<String> result = new ArrayList<>(); for (ResourceType next : ResourceType.values()) { result.add(next.name()); } Collections.sort(result); return result; } @Override public <T extends Resource> boolean hasResource(Class<T> theClass_, String theUri) { throw new UnsupportedOperationException(); } @Override public IParser newJsonParser() { throw new UnsupportedOperationException(); } @Override public IParser newXmlParser() { throw new UnsupportedOperationException(); } @Override public INarrativeGenerator getNarrativeGenerator(String theS, String theS1) { throw new UnsupportedOperationException(); } @Override public String oid2Uri(String theCode) { throw new UnsupportedOperationException(); } @Override public StructureDefinition fetchTypeDefinition(String typeName) { return fetchResource(org.hl7.fhir.dstu2016may.model.StructureDefinition.class, "http://hl7.org/fhir/StructureDefinition/" + typeName); } @Override public boolean supportsSystem(String theSystem) { if (myValidationSupport == null) { return false; } else { return myValidationSupport.isCodeSystemSupported(new ValidationSupportContext(myValidationSupport), theSystem); } } @Override public Set<String> typeTails() { return new HashSet<>(Arrays.asList("Integer", "UnsignedInt", "PositiveInt", "Decimal", "DateTime", "Date", "Time", "Instant", "String", "Uri", "Oid", "Uuid", "Id", "Boolean", "Code", "Markdown", "Base64Binary", "Coding", "CodeableConcept", "Attachment", "Identifier", "Quantity", "SampledData", "Range", "Period", "Ratio", "HumanName", "Address", "ContactPoint", "Timing", "Reference", "Annotation", "Signature", "Meta")); } @Override public ValidationResult validateCode(CodeableConcept theCode, ValueSet theVs) { for (Coding next : theCode.getCoding()) { ValidationResult retVal = validateCode(next, theVs); if (retVal.isOk()) { return retVal; } } return new ValidationResult(null, null); } @Override public ValidationResult validateCode(Coding theCode, ValueSet theVs) { String system = theCode.getSystem(); String code = theCode.getCode(); String display = theCode.getDisplay(); return validateCode(system, code, display, theVs); } @Override public ValidationResult validateCode(String theSystem, String theCode, String theDisplay) { IValidationSupport.CodeValidationResult result = myValidationSupport.validateCode(new ValidationSupportContext(myValidationSupport), new ConceptValidationOptions(), theSystem, theCode, theDisplay, null); if (result == null) { return null; } OperationOutcome.IssueSeverity severity = null; if (result.getSeverity() != null) { severity = OperationOutcome.IssueSeverity.fromCode(result.getSeverityCode()); } ConceptDefinitionComponent definition = result.getCode() != null ? new ConceptDefinitionComponent().setCode(result.getCode()) : null; return new ValidationResult(severity, result.getMessage(), definition); } @Override public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ConceptSetComponent theVsi) { throw new UnsupportedOperationException(); } @Override public ValidationResult validateCode(String theSystem, String theCode, String theDisplay, ValueSet theVs) { if (theVs != null && isNotBlank(theCode)) { for (ConceptSetComponent next : theVs.getCompose().getInclude()) { if (isBlank(theSystem) || theSystem.equals(next.getSystem())) { for (ConceptReferenceComponent nextCode : next.getConcept()) { if (theCode.equals(nextCode.getCode())) { CodeType code = new CodeType(theCode); return new ValidationResult(new ConceptDefinitionComponent(code)); } } } } } boolean caseSensitive = true; if (isNotBlank(theSystem)) { CodeSystem system = fetchCodeSystem(theSystem); if (system == null) { return new ValidationResult(OperationOutcome.IssueSeverity.INFORMATION, "Code " + Constants.codeSystemWithDefaultDescription(theSystem) + "/" + theCode + " was not validated because the code system is not present"); } if (system.hasCaseSensitive()) { caseSensitive = system.getCaseSensitive(); } } String wantCode = theCode; if (!caseSensitive) { wantCode = wantCode.toUpperCase(); } ValueSetExpansionOutcome expandedValueSet = null; /* * The following valueset is a special case, since the BCP codesystem is very difficult to expand */ if (theVs != null && "http://hl7.org/fhir/ValueSet/languages".equals(theVs.getUrl())) { ValueSet expansion = new ValueSet(); for (ConceptSetComponent nextInclude : theVs.getCompose().getInclude()) { for (ConceptReferenceComponent nextConcept : nextInclude.getConcept()) { expansion.getExpansion().addContains().setCode(nextConcept.getCode()).setDisplay(nextConcept.getDisplay()); } } expandedValueSet = new ValueSetExpansionOutcome(expansion); } if (expandedValueSet == null) { expandedValueSet = expandVS(theVs, true); } for (ValueSetExpansionContainsComponent next : expandedValueSet.getValueset().getExpansion().getContains()) { String nextCode = next.getCode(); if (!caseSensitive) { nextCode = nextCode.toUpperCase(); } if (nextCode.equals(wantCode)) { if (theSystem == null || next.getSystem().equals(theSystem)) { ConceptDefinitionComponent definition = new ConceptDefinitionComponent(); definition.setCode(next.getCode()); definition.setDisplay(next.getDisplay()); return new ValidationResult(definition); } } } return new ValidationResult(OperationOutcome.IssueSeverity.ERROR, "Unknown code[" + theCode + "] in system[" + Constants.codeSystemWithDefaultDescription(theSystem) + "]"); } @Override public ValueSetExpansionOutcome expandVS(ValueSet theSource, boolean theCacheOk) { throw new UnsupportedOperationException(); } @Override public ValueSetExpansionComponent expandVS(ConceptSetComponent theInc) { throw new UnsupportedOperationException(); } }
{ "content_hash": "3812c721c45550619d3c7435465fa110", "timestamp": "", "source": "github", "line_count": 287, "max_line_length": 219, "avg_line_length": 34.153310104529616, "alnum_prop": 0.7596408896143644, "repo_name": "SingingTree/hapi-fhir", "id": "f1c2fe2bb0f8fd7f9c9fcf5bfc1b222d6c97f2ce", "size": "9802", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "hapi-fhir-structures-dstu2.1/src/main/java/org/hl7/fhir/dstu2016may/hapi/validation/HapiWorkerContext.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3861" }, { "name": "CSS", "bytes": "7305" }, { "name": "Dockerfile", "bytes": "130" }, { "name": "GAP", "bytes": "25037" }, { "name": "HTML", "bytes": "244839" }, { "name": "Java", "bytes": "23821620" }, { "name": "JavaScript", "bytes": "31583" }, { "name": "Kotlin", "bytes": "3972" }, { "name": "Ruby", "bytes": "230674" }, { "name": "Shell", "bytes": "46254" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Reflection; using Glimpse.Core.Message; namespace Glimpse.AspNet.Message { public class ProcessConstraintMessage : ITimedMessage, ISourceMessage { public ProcessConstraintMessage(int routeHashCode, int constraintHashCode, bool isMatch, string parameterName, object constraint, IDictionary<string, object> values, System.Web.Routing.RouteDirection routeDirection) { RouteHashCode = routeHashCode; ConstraintHashCode = constraintHashCode; IsMatch = isMatch; ParameterName = parameterName; Constraint = constraint; Values = values; RouteDirection = routeDirection; } public Guid Id { get; private set; } public TimeSpan Offset { get; set; } public TimeSpan Duration { get; set; } public DateTime StartTime { get; set; } public Type ExecutedType { get; set; } public MethodInfo ExecutedMethod { get; set; } public int RouteHashCode { get; private set; } public int ConstraintHashCode { get; private set; } public bool IsMatch { get; private set; } public IDictionary<string, object> Values { get; private set; } public string ParameterName { get; private set; } public object Constraint { get; private set; } public System.Web.Routing.RouteDirection RouteDirection { get; private set; } } }
{ "content_hash": "ca85ebd3dba5ca48d7bdc9ba4054864e", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 224, "avg_line_length": 33.8936170212766, "alnum_prop": 0.6177024482109228, "repo_name": "rho24/Glimpse", "id": "8692b756a749c37f4a8b4b53df5d21a5295b8f8b", "size": "1595", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "source/Glimpse.AspNet/Message/ProcessConstraintMessage.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "67657" }, { "name": "C#", "bytes": "2515350" }, { "name": "CSS", "bytes": "53931" }, { "name": "HTML", "bytes": "18174" }, { "name": "JavaScript", "bytes": "195850" }, { "name": "Pascal", "bytes": "1143" }, { "name": "PowerShell", "bytes": "32016" }, { "name": "Shell", "bytes": "398" } ], "symlink_target": "" }
VIRT='https://pypi.python.org/packages/source/v/virtualenv/virtualenv-12.1.1.tar.gz' mkdir -p .bootstrap wget $VIRT -O .bootstrap/virtualenv.tar.gz tar xfv .bootstrap/virtualenv.tar.gz -C .bootstrap/ ./.bootstrap/virtualenv*/virtualenv.py ./.bootstrap/virt sudo apt-get install -y gcc python-dev source ./.bootstrap/virt/bin/activate pip install -r requirements.txt
{ "content_hash": "fe3280c2948d7a698f2f896c632a26ef", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 84, "avg_line_length": 28.53846153846154, "alnum_prop": 0.7654986522911051, "repo_name": "aaronschif/machine_state", "id": "bc6c6769f9cfa59c3c7333da5fe709e3fd2b3bd3", "size": "388", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "virtualenv_bootstrap.sh", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "3215" }, { "name": "Shell", "bytes": "388" }, { "name": "VimL", "bytes": "101" } ], "symlink_target": "" }
//---THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT EDIT---// //To change this file, alter the niftools/docsys/gen_niflib.py Python script. #include <string> #include <iostream> #include "../../include/NIF_IO.h" #include "../../include/gen/enums.h" #include "../../include/gen/enums_intl.h" using namespace std; namespace Niflib { //--ForceType--// void NifStream( ForceType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ForceType(temp); } void NifStream( ForceType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ForceType const & val ) { switch ( val ) { case FORCE_PLANAR: return out << "FORCE_PLANAR"; case FORCE_SPHERICAL: return out << "FORCE_SPHERICAL"; case FORCE_UNKNOWN: return out << "FORCE_UNKNOWN"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--EffectShaderControlledColor--// void NifStream( EffectShaderControlledColor & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = EffectShaderControlledColor(temp); } void NifStream( EffectShaderControlledColor const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, EffectShaderControlledColor const & val ) { switch ( val ) { case ESCC_EMISSIVE_COLOR: return out << "ESCC_EMISSIVE_COLOR"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--CollisionMode--// void NifStream( CollisionMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = CollisionMode(temp); } void NifStream( CollisionMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, CollisionMode const & val ) { switch ( val ) { case CM_USE_OBB: return out << "CM_USE_OBB"; case CM_USE_TRI: return out << "CM_USE_TRI"; case CM_USE_ABV: return out << "CM_USE_ABV"; case CM_NOTEST: return out << "CM_NOTEST"; case CM_USE_NIBOUND: return out << "CM_USE_NIBOUND"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--DeactivatorType--// void NifStream( DeactivatorType & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = DeactivatorType(temp); } void NifStream( DeactivatorType const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, DeactivatorType const & val ) { switch ( val ) { case DEACTIVATOR_INVALID: return out << "DEACTIVATOR_INVALID"; case DEACTIVATOR_NEVER: return out << "DEACTIVATOR_NEVER"; case DEACTIVATOR_SPATIAL: return out << "DEACTIVATOR_SPATIAL"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--hkResponseType--// void NifStream( hkResponseType & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = hkResponseType(temp); } void NifStream( hkResponseType const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, hkResponseType const & val ) { switch ( val ) { case RESPONSE_INVALID: return out << "RESPONSE_INVALID"; case RESPONSE_SIMPLE_CONTACT: return out << "RESPONSE_SIMPLE_CONTACT"; case RESPONSE_REPORTING: return out << "RESPONSE_REPORTING"; case RESPONSE_NONE: return out << "RESPONSE_NONE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--TexTransform--// void NifStream( TexTransform & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = TexTransform(temp); } void NifStream( TexTransform const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, TexTransform const & val ) { switch ( val ) { case TT_TRANSLATE_U: return out << "TT_TRANSLATE_U"; case TT_TRANSLATE_V: return out << "TT_TRANSLATE_V"; case TT_ROTATE: return out << "TT_ROTATE"; case TT_SCALE_U: return out << "TT_SCALE_U"; case TT_SCALE_V: return out << "TT_SCALE_V"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--HavokMaterial--// void NifStream( HavokMaterial & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = HavokMaterial(temp); } void NifStream( HavokMaterial const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, HavokMaterial const & val ) { switch ( val ) { case HAV_MAT_STONE: return out << "HAV_MAT_STONE"; case HAV_MAT_CLOTH: return out << "HAV_MAT_CLOTH"; case HAV_MAT_DIRT: return out << "HAV_MAT_DIRT"; case HAV_MAT_GLASS: return out << "HAV_MAT_GLASS"; case HAV_MAT_GRASS: return out << "HAV_MAT_GRASS"; case HAV_MAT_METAL: return out << "HAV_MAT_METAL"; case HAV_MAT_ORGANIC: return out << "HAV_MAT_ORGANIC"; case HAV_MAT_SKIN: return out << "HAV_MAT_SKIN"; case HAV_MAT_WATER: return out << "HAV_MAT_WATER"; case HAV_MAT_WOOD: return out << "HAV_MAT_WOOD"; case HAV_MAT_HEAVY_STONE: return out << "HAV_MAT_HEAVY_STONE"; case HAV_MAT_HEAVY_METAL: return out << "HAV_MAT_HEAVY_METAL"; case HAV_MAT_HEAVY_WOOD: return out << "HAV_MAT_HEAVY_WOOD"; case HAV_MAT_CHAIN: return out << "HAV_MAT_CHAIN"; case HAV_MAT_SNOW: return out << "HAV_MAT_SNOW"; case HAV_MAT_STONE_STAIRS: return out << "HAV_MAT_STONE_STAIRS"; case HAV_MAT_CLOTH_STAIRS: return out << "HAV_MAT_CLOTH_STAIRS"; case HAV_MAT_DIRT_STAIRS: return out << "HAV_MAT_DIRT_STAIRS"; case HAV_MAT_GLASS_STAIRS: return out << "HAV_MAT_GLASS_STAIRS"; case HAV_MAT_GRASS_STAIRS: return out << "HAV_MAT_GRASS_STAIRS"; case HAV_MAT_METAL_STAIRS: return out << "HAV_MAT_METAL_STAIRS"; case HAV_MAT_ORGANIC_STAIRS: return out << "HAV_MAT_ORGANIC_STAIRS"; case HAV_MAT_SKIN_STAIRS: return out << "HAV_MAT_SKIN_STAIRS"; case HAV_MAT_WATER_STAIRS: return out << "HAV_MAT_WATER_STAIRS"; case HAV_MAT_WOOD_STAIRS: return out << "HAV_MAT_WOOD_STAIRS"; case HAV_MAT_HEAVY_STONE_STAIRS: return out << "HAV_MAT_HEAVY_STONE_STAIRS"; case HAV_MAT_HEAVY_METAL_STAIRS: return out << "HAV_MAT_HEAVY_METAL_STAIRS"; case HAV_MAT_HEAVY_WOOD_STAIRS: return out << "HAV_MAT_HEAVY_WOOD_STAIRS"; case HAV_MAT_CHAIN_STAIRS: return out << "HAV_MAT_CHAIN_STAIRS"; case HAV_MAT_SNOW_STAIRS: return out << "HAV_MAT_SNOW_STAIRS"; case HAV_MAT_ELEVATOR: return out << "HAV_MAT_ELEVATOR"; case HAV_MAT_RUBBER: return out << "HAV_MAT_RUBBER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--EmitFrom--// void NifStream( EmitFrom & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = EmitFrom(temp); } void NifStream( EmitFrom const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, EmitFrom const & val ) { switch ( val ) { case EMIT_FROM_VERTICES: return out << "EMIT_FROM_VERTICES"; case EMIT_FROM_FACE_CENTER: return out << "EMIT_FROM_FACE_CENTER"; case EMIT_FROM_EDGE_CENTER: return out << "EMIT_FROM_EDGE_CENTER"; case EMIT_FROM_FACE_SURFACE: return out << "EMIT_FROM_FACE_SURFACE"; case EMIT_FROM_EDGE_SURFACE: return out << "EMIT_FROM_EDGE_SURFACE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--EndianType--// void NifStream( EndianType & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = EndianType(temp); } void NifStream( EndianType const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, EndianType const & val ) { switch ( val ) { case ENDIAN_BIG: return out << "ENDIAN_BIG"; case ENDIAN_LITTLE: return out << "ENDIAN_LITTLE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--VelocityType--// void NifStream( VelocityType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = VelocityType(temp); } void NifStream( VelocityType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, VelocityType const & val ) { switch ( val ) { case VELOCITY_USE_NORMALS: return out << "VELOCITY_USE_NORMALS"; case VELOCITY_USE_RANDOM: return out << "VELOCITY_USE_RANDOM"; case VELOCITY_USE_DIRECTION: return out << "VELOCITY_USE_DIRECTION"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--AnimationType--// void NifStream( AnimationType & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = AnimationType(temp); } void NifStream( AnimationType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, AnimationType const & val ) { switch ( val ) { case SIT: return out << "Sit"; case SLEEP: return out << "Sleep"; case LEAN: return out << "Lean"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--DataStreamUsage--// void NifStream( DataStreamUsage & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = DataStreamUsage(temp); } void NifStream( DataStreamUsage const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, DataStreamUsage const & val ) { switch ( val ) { case USAGE_VERTEX_INDEX: return out << "USAGE_VERTEX_INDEX"; case USAGE_VERTEX: return out << "USAGE_VERTEX"; case USAGE_SHADER_CONSTANT: return out << "USAGE_SHADER_CONSTANT"; case USAGE_USER: return out << "USAGE_USER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--StencilCompareMode--// void NifStream( StencilCompareMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = StencilCompareMode(temp); } void NifStream( StencilCompareMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, StencilCompareMode const & val ) { switch ( val ) { case TEST_NEVER: return out << "TEST_NEVER"; case TEST_LESS: return out << "TEST_LESS"; case TEST_EQUAL: return out << "TEST_EQUAL"; case TEST_LESS_EQUAL: return out << "TEST_LESS_EQUAL"; case TEST_GREATER: return out << "TEST_GREATER"; case TEST_NOT_EQUAL: return out << "TEST_NOT_EQUAL"; case TEST_GREATER_EQUAL: return out << "TEST_GREATER_EQUAL"; case TEST_ALWAYS: return out << "TEST_ALWAYS"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--EffectShaderControlledVariable--// void NifStream( EffectShaderControlledVariable & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = EffectShaderControlledVariable(temp); } void NifStream( EffectShaderControlledVariable const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, EffectShaderControlledVariable const & val ) { switch ( val ) { case ESCV_EMISSIVEMULTIPLE: return out << "ESCV_EmissiveMultiple"; case ESCV_FALLOFF_START_ANGLE: return out << "ESCV_FALLOFF_START_ANGLE"; case ESCV_FALLOFF_STOP_ANGLE: return out << "ESCV_FALLOFF_STOP_ANGLE"; case ESCV_FALLOFF_START_OPACITY: return out << "ESCV_FALLOFF_START_OPACITY"; case ESCV_FALLOFF_STOP_OPACITY: return out << "ESCV_FALLOFF_STOP_OPACITY"; case ESCV_ALPHA_TRANSPARENCY: return out << "ESCV_ALPHA_TRANSPARENCY"; case ESCV_U_OFFSET: return out << "ESCV_U_OFFSET"; case ESCV_U_SCALE: return out << "ESCV_U_SCALE"; case ESCV_V_OFFSET: return out << "ESCV_V_OFFSET"; case ESCV_V_SCALE: return out << "ESCV_V_SCALE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--TexFilterMode--// void NifStream( TexFilterMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = TexFilterMode(temp); } void NifStream( TexFilterMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, TexFilterMode const & val ) { switch ( val ) { case FILTER_NEAREST: return out << "FILTER_NEAREST"; case FILTER_BILERP: return out << "FILTER_BILERP"; case FILTER_TRILERP: return out << "FILTER_TRILERP"; case FILTER_NEAREST_MIPNEAREST: return out << "FILTER_NEAREST_MIPNEAREST"; case FILTER_NEAREST_MIPLERP: return out << "FILTER_NEAREST_MIPLERP"; case FILTER_BILERP_MIPNEAREST: return out << "FILTER_BILERP_MIPNEAREST"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SymmetryType--// void NifStream( SymmetryType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SymmetryType(temp); } void NifStream( SymmetryType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SymmetryType const & val ) { switch ( val ) { case SPHERICAL_SYMMETRY: return out << "SPHERICAL_SYMMETRY"; case CYLINDRICAL_SYMMETRY: return out << "CYLINDRICAL_SYMMETRY"; case PLANAR_SYMMETRY: return out << "PLANAR_SYMMETRY"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--LightMode--// void NifStream( LightMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = LightMode(temp); } void NifStream( LightMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, LightMode const & val ) { switch ( val ) { case LIGHT_MODE_EMISSIVE: return out << "LIGHT_MODE_EMISSIVE"; case LIGHT_MODE_EMI_AMB_DIF: return out << "LIGHT_MODE_EMI_AMB_DIF"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--MipMapFormat--// void NifStream( MipMapFormat & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = MipMapFormat(temp); } void NifStream( MipMapFormat const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, MipMapFormat const & val ) { switch ( val ) { case MIP_FMT_NO: return out << "MIP_FMT_NO"; case MIP_FMT_YES: return out << "MIP_FMT_YES"; case MIP_FMT_DEFAULT: return out << "MIP_FMT_DEFAULT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--StencilAction--// void NifStream( StencilAction & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = StencilAction(temp); } void NifStream( StencilAction const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, StencilAction const & val ) { switch ( val ) { case ACTION_KEEP: return out << "ACTION_KEEP"; case ACTION_ZERO: return out << "ACTION_ZERO"; case ACTION_REPLACE: return out << "ACTION_REPLACE"; case ACTION_INCREMENT: return out << "ACTION_INCREMENT"; case ACTION_DECREMENT: return out << "ACTION_DECREMENT"; case ACTION_INVERT: return out << "ACTION_INVERT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SyncPoint--// void NifStream( SyncPoint & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = SyncPoint(temp); } void NifStream( SyncPoint const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, SyncPoint const & val ) { switch ( val ) { case SYNC_ANY: return out << "SYNC_ANY"; case SYNC_UPDATE: return out << "SYNC_UPDATE"; case SYNC_POST_UPDATE: return out << "SYNC_POST_UPDATE"; case SYNC_VISIBLE: return out << "SYNC_VISIBLE"; case SYNC_RENDER: return out << "SYNC_RENDER"; case SYNC_PHYSICS_SIMULATE: return out << "SYNC_PHYSICS_SIMULATE"; case SYNC_PHYSICS_COMPLETED: return out << "SYNC_PHYSICS_COMPLETED"; case SYNC_REFLECTIONS: return out << "SYNC_REFLECTIONS"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--TexType--// void NifStream( TexType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = TexType(temp); } void NifStream( TexType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, TexType const & val ) { switch ( val ) { case BASE_MAP: return out << "BASE_MAP"; case DARK_MAP: return out << "DARK_MAP"; case DETAIL_MAP: return out << "DETAIL_MAP"; case GLOSS_MAP: return out << "GLOSS_MAP"; case GLOW_MAP: return out << "GLOW_MAP"; case BUMP_MAP: return out << "BUMP_MAP"; case NORMAL_MAP: return out << "NORMAL_MAP"; case UNKNOWN2_MAP: return out << "UNKNOWN2_MAP"; case DECAL_0_MAP: return out << "DECAL_0_MAP"; case DECAL_1_MAP: return out << "DECAL_1_MAP"; case DECAL_2_MAP: return out << "DECAL_2_MAP"; case DECAL_3_MAP: return out << "DECAL_3_MAP"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--LightingShaderControlledVariable--// void NifStream( LightingShaderControlledVariable & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = LightingShaderControlledVariable(temp); } void NifStream( LightingShaderControlledVariable const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, LightingShaderControlledVariable const & val ) { switch ( val ) { case LSCV_UNKNOWN1: return out << "LSCV_UNKNOWN1"; case LSCV_ENVIRONMENTMAPSCALE: return out << "LSCV_ENVIRONMENTMAPSCALE"; case LSCV_GLOSSINESS: return out << "LSCV_GLOSSINESS"; case LSCV_SPECULAR_STRENGTH: return out << "LSCV_SPECULAR_STRENGTH"; case LSCV_EMISSIVE_MULTIPLE: return out << "LSCV_EMISSIVE_MULTIPLE"; case LSCV_ALPHA: return out << "LSCV_ALPHA"; case LSCV_U_OFFSET: return out << "LSCV_U_OFFSET"; case LSCV_U_SCALE: return out << "LSCV_U_SCALE"; case LSCV_V_OFFSET: return out << "LSCV_V_OFFSET"; case LSCV_V_SCALE: return out << "LSCV_V_SCALE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSDismemberBodyPartType--// void NifStream( BSDismemberBodyPartType & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = BSDismemberBodyPartType(temp); } void NifStream( BSDismemberBodyPartType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, BSDismemberBodyPartType const & val ) { switch ( val ) { case BP_TORSO: return out << "BP_TORSO"; case BP_HEAD: return out << "BP_HEAD"; case BP_HEAD2: return out << "BP_HEAD2"; case BP_LEFTARM: return out << "BP_LEFTARM"; case BP_LEFTARM2: return out << "BP_LEFTARM2"; case BP_RIGHTARM: return out << "BP_RIGHTARM"; case BP_RIGHTARM2: return out << "BP_RIGHTARM2"; case BP_LEFTLEG: return out << "BP_LEFTLEG"; case BP_LEFTLEG2: return out << "BP_LEFTLEG2"; case BP_LEFTLEG3: return out << "BP_LEFTLEG3"; case BP_RIGHTLEG: return out << "BP_RIGHTLEG"; case BP_RIGHTLEG2: return out << "BP_RIGHTLEG2"; case BP_RIGHTLEG3: return out << "BP_RIGHTLEG3"; case BP_BRAIN: return out << "BP_BRAIN"; case SBP_30_HEAD: return out << "SBP_30_HEAD"; case SBP_31_HAIR: return out << "SBP_31_HAIR"; case SBP_32_BODY: return out << "SBP_32_BODY"; case SBP_33_HANDS: return out << "SBP_33_HANDS"; case SBP_34_FOREARMS: return out << "SBP_34_FOREARMS"; case SBP_35_AMULET: return out << "SBP_35_AMULET"; case SBP_36_RING: return out << "SBP_36_RING"; case SBP_37_FEET: return out << "SBP_37_FEET"; case SBP_38_CALVES: return out << "SBP_38_CALVES"; case SBP_39_SHIELD: return out << "SBP_39_SHIELD"; case SBP_40_TAIL: return out << "SBP_40_TAIL"; case SBP_41_LONGHAIR: return out << "SBP_41_LONGHAIR"; case SBP_42_CIRCLET: return out << "SBP_42_CIRCLET"; case SBP_43_EARS: return out << "SBP_43_EARS"; case SBP_44_DRAGON_BLOODHEAD_OR_MOD_MOUTH: return out << "SBP_44_DRAGON_BLOODHEAD_OR_MOD_MOUTH"; case SBP_45_DRAGON_BLOODWINGL_OR_MOD_NECK: return out << "SBP_45_DRAGON_BLOODWINGL_OR_MOD_NECK"; case SBP_46_DRAGON_BLOODWINGR_OR_MOD_CHEST_PRIMARY: return out << "SBP_46_DRAGON_BLOODWINGR_OR_MOD_CHEST_PRIMARY"; case SBP_47_DRAGON_BLOODTAIL_OR_MOD_BACK: return out << "SBP_47_DRAGON_BLOODTAIL_OR_MOD_BACK"; case SBP_48_MOD_MISC1: return out << "SBP_48_MOD_MISC1"; case SBP_49_MOD_PELVIS_PRIMARY: return out << "SBP_49_MOD_PELVIS_PRIMARY"; case SBP_50_DECAPITATEDHEAD: return out << "SBP_50_DECAPITATEDHEAD"; case SBP_51_DECAPITATE: return out << "SBP_51_DECAPITATE"; case SBP_52_MOD_PELVIS_SECONDARY: return out << "SBP_52_MOD_PELVIS_SECONDARY"; case SBP_53_MOD_LEG_RIGHT: return out << "SBP_53_MOD_LEG_RIGHT"; case SBP_54_MOD_LEG_LEFT: return out << "SBP_54_MOD_LEG_LEFT"; case SBP_55_MOD_FACE_JEWELRY: return out << "SBP_55_MOD_FACE_JEWELRY"; case SBP_56_MOD_CHEST_SECONDARY: return out << "SBP_56_MOD_CHEST_SECONDARY"; case SBP_57_MOD_SHOULDER: return out << "SBP_57_MOD_SHOULDER"; case SBP_58_MOD_ARM_LEFT: return out << "SBP_58_MOD_ARM_LEFT"; case SBP_59_MOD_ARM_RIGHT: return out << "SBP_59_MOD_ARM_RIGHT"; case SBP_60_MOD_MISC2: return out << "SBP_60_MOD_MISC2"; case SBP_61_FX01: return out << "SBP_61_FX01"; case BP_SECTIONCAP_HEAD: return out << "BP_SECTIONCAP_HEAD"; case BP_SECTIONCAP_HEAD2: return out << "BP_SECTIONCAP_HEAD2"; case BP_SECTIONCAP_LEFTARM: return out << "BP_SECTIONCAP_LEFTARM"; case BP_SECTIONCAP_LEFTARM2: return out << "BP_SECTIONCAP_LEFTARM2"; case BP_SECTIONCAP_RIGHTARM: return out << "BP_SECTIONCAP_RIGHTARM"; case BP_SECTIONCAP_RIGHTARM2: return out << "BP_SECTIONCAP_RIGHTARM2"; case BP_SECTIONCAP_LEFTLEG: return out << "BP_SECTIONCAP_LEFTLEG"; case BP_SECTIONCAP_LEFTLEG2: return out << "BP_SECTIONCAP_LEFTLEG2"; case BP_SECTIONCAP_LEFTLEG3: return out << "BP_SECTIONCAP_LEFTLEG3"; case BP_SECTIONCAP_RIGHTLEG: return out << "BP_SECTIONCAP_RIGHTLEG"; case BP_SECTIONCAP_RIGHTLEG2: return out << "BP_SECTIONCAP_RIGHTLEG2"; case BP_SECTIONCAP_RIGHTLEG3: return out << "BP_SECTIONCAP_RIGHTLEG3"; case BP_SECTIONCAP_BRAIN: return out << "BP_SECTIONCAP_BRAIN"; case SBP_130_HEAD: return out << "SBP_130_HEAD"; case SBP_131_HAIR: return out << "SBP_131_HAIR"; case SBP_141_LONGHAIR: return out << "SBP_141_LONGHAIR"; case SBP_142_CIRCLET: return out << "SBP_142_CIRCLET"; case SBP_143_EARS: return out << "SBP_143_EARS"; case SBP_150_DECAPITATEDHEAD: return out << "SBP_150_DECAPITATEDHEAD"; case BP_TORSOCAP_HEAD: return out << "BP_TORSOCAP_HEAD"; case BP_TORSOCAP_HEAD2: return out << "BP_TORSOCAP_HEAD2"; case BP_TORSOCAP_LEFTARM: return out << "BP_TORSOCAP_LEFTARM"; case BP_TORSOCAP_LEFTARM2: return out << "BP_TORSOCAP_LEFTARM2"; case BP_TORSOCAP_RIGHTARM: return out << "BP_TORSOCAP_RIGHTARM"; case BP_TORSOCAP_RIGHTARM2: return out << "BP_TORSOCAP_RIGHTARM2"; case BP_TORSOCAP_LEFTLEG: return out << "BP_TORSOCAP_LEFTLEG"; case BP_TORSOCAP_LEFTLEG2: return out << "BP_TORSOCAP_LEFTLEG2"; case BP_TORSOCAP_LEFTLEG3: return out << "BP_TORSOCAP_LEFTLEG3"; case BP_TORSOCAP_RIGHTLEG: return out << "BP_TORSOCAP_RIGHTLEG"; case BP_TORSOCAP_RIGHTLEG2: return out << "BP_TORSOCAP_RIGHTLEG2"; case BP_TORSOCAP_RIGHTLEG3: return out << "BP_TORSOCAP_RIGHTLEG3"; case BP_TORSOCAP_BRAIN: return out << "BP_TORSOCAP_BRAIN"; case SBP_230_HEAD: return out << "SBP_230_HEAD"; case BP_TORSOSECTION_HEAD: return out << "BP_TORSOSECTION_HEAD"; case BP_TORSOSECTION_HEAD2: return out << "BP_TORSOSECTION_HEAD2"; case BP_TORSOSECTION_LEFTARM: return out << "BP_TORSOSECTION_LEFTARM"; case BP_TORSOSECTION_LEFTARM2: return out << "BP_TORSOSECTION_LEFTARM2"; case BP_TORSOSECTION_RIGHTARM: return out << "BP_TORSOSECTION_RIGHTARM"; case BP_TORSOSECTION_RIGHTARM2: return out << "BP_TORSOSECTION_RIGHTARM2"; case BP_TORSOSECTION_LEFTLEG: return out << "BP_TORSOSECTION_LEFTLEG"; case BP_TORSOSECTION_LEFTLEG2: return out << "BP_TORSOSECTION_LEFTLEG2"; case BP_TORSOSECTION_LEFTLEG3: return out << "BP_TORSOSECTION_LEFTLEG3"; case BP_TORSOSECTION_RIGHTLEG: return out << "BP_TORSOSECTION_RIGHTLEG"; case BP_TORSOSECTION_RIGHTLEG2: return out << "BP_TORSOSECTION_RIGHTLEG2"; case BP_TORSOSECTION_RIGHTLEG3: return out << "BP_TORSOSECTION_RIGHTLEG3"; case BP_TORSOSECTION_BRAIN: return out << "BP_TORSOSECTION_BRAIN"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--MoppDataBuildType--// void NifStream( MoppDataBuildType & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = MoppDataBuildType(temp); } void NifStream( MoppDataBuildType const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, MoppDataBuildType const & val ) { switch ( val ) { case BUILT_WITH_CHUNK_SUBDIVISION: return out << "BUILT_WITH_CHUNK_SUBDIVISION"; case BUILT_WITHOUT_CHUNK_SUBDIVISION: return out << "BUILT_WITHOUT_CHUNK_SUBDIVISION"; case BUILD_NOT_SET: return out << "BUILD_NOT_SET"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ChannelType--// void NifStream( ChannelType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ChannelType(temp); } void NifStream( ChannelType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ChannelType const & val ) { switch ( val ) { case CHNL_RED: return out << "CHNL_RED"; case CHNL_GREEN: return out << "CHNL_GREEN"; case CHNL_BLUE: return out << "CHNL_BLUE"; case CHNL_ALPHA: return out << "CHNL_ALPHA"; case CHNL_COMPRESSED: return out << "CHNL_COMPRESSED"; case CHNL_INDEX: return out << "CHNL_INDEX"; case CHNL_EMPTY: return out << "CHNL_EMPTY"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyObjectType--// void NifStream( SkyObjectType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SkyObjectType(temp); } void NifStream( SkyObjectType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SkyObjectType const & val ) { switch ( val ) { case BSSM_SKY_TEXTURE: return out << "BSSM_SKY_TEXTURE"; case BSSM_SKY_SUNGLARE: return out << "BSSM_SKY_SUNGLARE"; case BSSM_SKY: return out << "BSSM_SKY"; case BSSM_SKY_CLOUDS: return out << "BSSM_SKY_CLOUDS"; case BSSM_SKY_STARS: return out << "BSSM_SKY_STARS"; case BSSM_SKY_MOON_STARS_MASK: return out << "BSSM_SKY_MOON_STARS_MASK"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--OblivionLayer--// void NifStream( OblivionLayer & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = OblivionLayer(temp); } void NifStream( OblivionLayer const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, OblivionLayer const & val ) { switch ( val ) { case OL_UNIDENTIFIED: return out << "OL_UNIDENTIFIED"; case OL_STATIC: return out << "OL_STATIC"; case OL_ANIM_STATIC: return out << "OL_ANIM_STATIC"; case OL_TRANSPARENT: return out << "OL_TRANSPARENT"; case OL_CLUTTER: return out << "OL_CLUTTER"; case OL_WEAPON: return out << "OL_WEAPON"; case OL_PROJECTILE: return out << "OL_PROJECTILE"; case OL_SPELL: return out << "OL_SPELL"; case OL_BIPED: return out << "OL_BIPED"; case OL_TREES: return out << "OL_TREES"; case OL_PROPS: return out << "OL_PROPS"; case OL_WATER: return out << "OL_WATER"; case OL_TRIGGER: return out << "OL_TRIGGER"; case OL_TERRAIN: return out << "OL_TERRAIN"; case OL_TRAP: return out << "OL_TRAP"; case OL_NONCOLLIDABLE: return out << "OL_NONCOLLIDABLE"; case OL_CLOUD_TRAP: return out << "OL_CLOUD_TRAP"; case OL_GROUND: return out << "OL_GROUND"; case OL_PORTAL: return out << "OL_PORTAL"; case OL_STAIRS: return out << "OL_STAIRS"; case OL_CHAR_CONTROLLER: return out << "OL_CHAR_CONTROLLER"; case OL_AVOID_BOX: return out << "OL_AVOID_BOX"; case OL_UNKNOWN1: return out << "OL_UNKNOWN1"; case OL_UNKNOWN2: return out << "OL_UNKNOWN2"; case OL_CAMERA_PICK: return out << "OL_CAMERA_PICK"; case OL_ITEM_PICK: return out << "OL_ITEM_PICK"; case OL_LINE_OF_SIGHT: return out << "OL_LINE_OF_SIGHT"; case OL_PATH_PICK: return out << "OL_PATH_PICK"; case OL_CUSTOM_PICK_1: return out << "OL_CUSTOM_PICK_1"; case OL_CUSTOM_PICK_2: return out << "OL_CUSTOM_PICK_2"; case OL_SPELL_EXPLOSION: return out << "OL_SPELL_EXPLOSION"; case OL_DROPPING_PICK: return out << "OL_DROPPING_PICK"; case OL_OTHER: return out << "OL_OTHER"; case OL_HEAD: return out << "OL_HEAD"; case OL_BODY: return out << "OL_BODY"; case OL_SPINE1: return out << "OL_SPINE1"; case OL_SPINE2: return out << "OL_SPINE2"; case OL_L_UPPER_ARM: return out << "OL_L_UPPER_ARM"; case OL_L_FOREARM: return out << "OL_L_FOREARM"; case OL_L_HAND: return out << "OL_L_HAND"; case OL_L_THIGH: return out << "OL_L_THIGH"; case OL_L_CALF: return out << "OL_L_CALF"; case OL_L_FOOT: return out << "OL_L_FOOT"; case OL_R_UPPER_ARM: return out << "OL_R_UPPER_ARM"; case OL_R_FOREARM: return out << "OL_R_FOREARM"; case OL_R_HAND: return out << "OL_R_HAND"; case OL_R_THIGH: return out << "OL_R_THIGH"; case OL_R_CALF: return out << "OL_R_CALF"; case OL_R_FOOT: return out << "OL_R_FOOT"; case OL_TAIL: return out << "OL_TAIL"; case OL_SIDE_WEAPON: return out << "OL_SIDE_WEAPON"; case OL_SHIELD: return out << "OL_SHIELD"; case OL_QUIVER: return out << "OL_QUIVER"; case OL_BACK_WEAPON: return out << "OL_BACK_WEAPON"; case OL_BACK_WEAPON2: return out << "OL_BACK_WEAPON2"; case OL_PONYTAIL: return out << "OL_PONYTAIL"; case OL_WING: return out << "OL_WING"; case OL_NULL: return out << "OL_NULL"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--PSLoopBehavior--// void NifStream( PSLoopBehavior & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = PSLoopBehavior(temp); } void NifStream( PSLoopBehavior const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, PSLoopBehavior const & val ) { switch ( val ) { case PS_LOOP_CLAMP_BIRTH: return out << "PS_LOOP_CLAMP_BIRTH"; case PS_LOOP_CLAMP_DEATH: return out << "PS_LOOP_CLAMP_DEATH"; case PS_LOOP_AGESCALE: return out << "PS_LOOP_AGESCALE"; case PS_LOOP_LOOP: return out << "PS_LOOP_LOOP"; case PS_LOOP_REFLECT: return out << "PS_LOOP_REFLECT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SolverDeactivation--// void NifStream( SolverDeactivation & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = SolverDeactivation(temp); } void NifStream( SolverDeactivation const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, SolverDeactivation const & val ) { switch ( val ) { case SOLVER_DEACTIVATION_INVALID: return out << "SOLVER_DEACTIVATION_INVALID"; case SOLVER_DEACTIVATION_OFF: return out << "SOLVER_DEACTIVATION_OFF"; case SOLVER_DEACTIVATION_LOW: return out << "SOLVER_DEACTIVATION_LOW"; case SOLVER_DEACTIVATION_MEDIUM: return out << "SOLVER_DEACTIVATION_MEDIUM"; case SOLVER_DEACTIVATION_HIGH: return out << "SOLVER_DEACTIVATION_HIGH"; case SOLVER_DEACTIVATION_MAX: return out << "SOLVER_DEACTIVATION_MAX"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--MeshPrimitiveType--// void NifStream( MeshPrimitiveType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = MeshPrimitiveType(temp); } void NifStream( MeshPrimitiveType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, MeshPrimitiveType const & val ) { switch ( val ) { case MESH_PRIMITIVE_TRIANGLES: return out << "MESH_PRIMITIVE_TRIANGLES"; case MESH_PRIMITIVE_TRISTRIPS: return out << "MESH_PRIMITIVE_TRISTRIPS"; case MESH_PRIMITIVE_LINESTRIPS: return out << "MESH_PRIMITIVE_LINESTRIPS"; case MESH_PRIMITIVE_QUADS: return out << "MESH_PRIMITIVE_QUADS"; case MESH_PRIMITIVE_POINTS: return out << "MESH_PRIMITIVE_POINTS"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--FaceDrawMode--// void NifStream( FaceDrawMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = FaceDrawMode(temp); } void NifStream( FaceDrawMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, FaceDrawMode const & val ) { switch ( val ) { case DRAW_CCW_OR_BOTH: return out << "DRAW_CCW_OR_BOTH"; case DRAW_CCW: return out << "DRAW_CCW"; case DRAW_CW: return out << "DRAW_CW"; case DRAW_BOTH: return out << "DRAW_BOTH"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--AlphaFormat--// void NifStream( AlphaFormat & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = AlphaFormat(temp); } void NifStream( AlphaFormat const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, AlphaFormat const & val ) { switch ( val ) { case ALPHA_NONE: return out << "ALPHA_NONE"; case ALPHA_BINARY: return out << "ALPHA_BINARY"; case ALPHA_SMOOTH: return out << "ALPHA_SMOOTH"; case ALPHA_DEFAULT: return out << "ALPHA_DEFAULT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BoundVolumeType--// void NifStream( BoundVolumeType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = BoundVolumeType(temp); } void NifStream( BoundVolumeType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, BoundVolumeType const & val ) { switch ( val ) { case BASE_BV: return out << "BASE_BV"; case SPHERE_BV: return out << "SPHERE_BV"; case BOX_BV: return out << "BOX_BV"; case CAPSULE_BV: return out << "CAPSULE_BV"; case UNION_BV: return out << "UNION_BV"; case HALFSPACE_BV: return out << "HALFSPACE_BV"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--EffectType--// void NifStream( EffectType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = EffectType(temp); } void NifStream( EffectType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, EffectType const & val ) { switch ( val ) { case EFFECT_PROJECTED_LIGHT: return out << "EFFECT_PROJECTED_LIGHT"; case EFFECT_PROJECTED_SHADOW: return out << "EFFECT_PROJECTED_SHADOW"; case EFFECT_ENVIRONMENT_MAP: return out << "EFFECT_ENVIRONMENT_MAP"; case EFFECT_FOG_MAP: return out << "EFFECT_FOG_MAP"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--KeyType--// void NifStream( KeyType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = KeyType(temp); } void NifStream( KeyType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, KeyType const & val ) { switch ( val ) { case LINEAR_KEY: return out << "LINEAR_KEY"; case QUADRATIC_KEY: return out << "QUADRATIC_KEY"; case TBC_KEY: return out << "TBC_KEY"; case XYZ_ROTATION_KEY: return out << "XYZ_ROTATION_KEY"; case CONST_KEY: return out << "CONST_KEY"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--VertMode--// void NifStream( VertMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = VertMode(temp); } void NifStream( VertMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, VertMode const & val ) { switch ( val ) { case VERT_MODE_SRC_IGNORE: return out << "VERT_MODE_SRC_IGNORE"; case VERT_MODE_SRC_EMISSIVE: return out << "VERT_MODE_SRC_EMISSIVE"; case VERT_MODE_SRC_AMB_DIF: return out << "VERT_MODE_SRC_AMB_DIF"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SortingMode--// void NifStream( SortingMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SortingMode(temp); } void NifStream( SortingMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SortingMode const & val ) { switch ( val ) { case SORTING_INHERIT: return out << "SORTING_INHERIT"; case SORTING_OFF: return out << "SORTING_OFF"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--CoordGenType--// void NifStream( CoordGenType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = CoordGenType(temp); } void NifStream( CoordGenType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, CoordGenType const & val ) { switch ( val ) { case CG_WORLD_PARALLEL: return out << "CG_WORLD_PARALLEL"; case CG_WORLD_PERSPECTIVE: return out << "CG_WORLD_PERSPECTIVE"; case CG_SPHERE_MAP: return out << "CG_SPHERE_MAP"; case CG_SPECULAR_CUBE_MAP: return out << "CG_SPECULAR_CUBE_MAP"; case CG_DIFFUSE_CUBE_MAP: return out << "CG_DIFFUSE_CUBE_MAP"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSShaderType--// void NifStream( BSShaderType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = BSShaderType(temp); } void NifStream( BSShaderType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, BSShaderType const & val ) { switch ( val ) { case SHADER_TALL_GRASS: return out << "SHADER_TALL_GRASS"; case SHADER_DEFAULT: return out << "SHADER_DEFAULT"; case SHADER_SKY: return out << "SHADER_SKY"; case SHADER_SKIN: return out << "SHADER_SKIN"; case SHADER_WATER: return out << "SHADER_WATER"; case SHADER_LIGHTING30: return out << "SHADER_LIGHTING30"; case SHADER_TILE: return out << "SHADER_TILE"; case SHADER_NOLIGHTING: return out << "SHADER_NOLIGHTING"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ApplyMode--// void NifStream( ApplyMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ApplyMode(temp); } void NifStream( ApplyMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ApplyMode const & val ) { switch ( val ) { case APPLY_REPLACE: return out << "APPLY_REPLACE"; case APPLY_DECAL: return out << "APPLY_DECAL"; case APPLY_MODULATE: return out << "APPLY_MODULATE"; case APPLY_HILIGHT: return out << "APPLY_HILIGHT"; case APPLY_HILIGHT2: return out << "APPLY_HILIGHT2"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--MotionSystem--// void NifStream( MotionSystem & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = MotionSystem(temp); } void NifStream( MotionSystem const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, MotionSystem const & val ) { switch ( val ) { case MO_SYS_INVALID: return out << "MO_SYS_INVALID"; case MO_SYS_DYNAMIC: return out << "MO_SYS_DYNAMIC"; case MO_SYS_SPHERE: return out << "MO_SYS_SPHERE"; case MO_SYS_SPHERE_INERTIA: return out << "MO_SYS_SPHERE_INERTIA"; case MO_SYS_BOX: return out << "MO_SYS_BOX"; case MO_SYS_BOX_STABILIZED: return out << "MO_SYS_BOX_STABILIZED"; case MO_SYS_KEYFRAMED: return out << "MO_SYS_KEYFRAMED"; case MO_SYS_FIXED: return out << "MO_SYS_FIXED"; case MO_SYS_THIN_BOX: return out << "MO_SYS_THIN_BOX"; case MO_SYS_CHARACTER: return out << "MO_SYS_CHARACTER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BillboardMode--// void NifStream( BillboardMode & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = BillboardMode(temp); } void NifStream( BillboardMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, BillboardMode const & val ) { switch ( val ) { case ALWAYS_FACE_CAMERA: return out << "ALWAYS_FACE_CAMERA"; case ROTATE_ABOUT_UP: return out << "ROTATE_ABOUT_UP"; case RIGID_FACE_CAMERA: return out << "RIGID_FACE_CAMERA"; case ALWAYS_FACE_CENTER: return out << "ALWAYS_FACE_CENTER"; case RIGID_FACE_CENTER: return out << "RIGID_FACE_CENTER"; case ROTATE_ABOUT_UP2: return out << "ROTATE_ABOUT_UP2"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ZCompareMode--// void NifStream( ZCompareMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ZCompareMode(temp); } void NifStream( ZCompareMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ZCompareMode const & val ) { switch ( val ) { case ZCOMP_ALWAYS: return out << "ZCOMP_ALWAYS"; case ZCOMP_LESS: return out << "ZCOMP_LESS"; case ZCOMP_EQUAL: return out << "ZCOMP_EQUAL"; case ZCOMP_LESS_EQUAL: return out << "ZCOMP_LESS_EQUAL"; case ZCOMP_GREATER: return out << "ZCOMP_GREATER"; case ZCOMP_NOT_EQUAL: return out << "ZCOMP_NOT_EQUAL"; case ZCOMP_GREATER_EQUAL: return out << "ZCOMP_GREATER_EQUAL"; case ZCOMP_NEVER: return out << "ZCOMP_NEVER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--TargetColor--// void NifStream( TargetColor & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = TargetColor(temp); } void NifStream( TargetColor const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, TargetColor const & val ) { switch ( val ) { case TC_AMBIENT: return out << "TC_AMBIENT"; case TC_DIFFUSE: return out << "TC_DIFFUSE"; case TC_SPECULAR: return out << "TC_SPECULAR"; case TC_SELF_ILLUM: return out << "TC_SELF_ILLUM"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyrimHavokMaterial--// void NifStream( SkyrimHavokMaterial & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SkyrimHavokMaterial(temp); } void NifStream( SkyrimHavokMaterial const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SkyrimHavokMaterial const & val ) { switch ( val ) { case SKY_HAV_MAT_LIGHT_WOOD: return out << "SKY_HAV_MAT_LIGHT_WOOD"; case SKY_HAV_MAT_BROKEN_STONE: return out << "SKY_HAV_MAT_BROKEN_STONE"; case SKY_HAV_MAT_SNOW: return out << "SKY_HAV_MAT_SNOW"; case SKY_HAV_MAT_GRAVEL: return out << "SKY_HAV_MAT_GRAVEL"; case SKY_HAV_MAT_MATERIAL_CHAIN_METAL: return out << "SKY_HAV_MAT_MATERIAL_CHAIN_METAL"; case SKY_HAV_MAT_BOTTLE: return out << "SKY_HAV_MAT_BOTTLE"; case SKY_HAV_MAT_WOOD: return out << "SKY_HAV_MAT_WOOD"; case SKY_HAV_MAT_SKIN: return out << "SKY_HAV_MAT_SKIN"; case SKY_HAV_MAT_BARREL: return out << "SKY_HAV_MAT_BARREL"; case SKY_HAV_MAT_MATERIAL_CERAMIC_MEDIUM: return out << "SKY_HAV_MAT_MATERIAL_CERAMIC_MEDIUM"; case SKY_HAV_MAT_MATERIAL_BASKET: return out << "SKY_HAV_MAT_MATERIAL_BASKET"; case SKY_HAV_MAT_ICE: return out << "SKY_HAV_MAT_ICE"; case SKY_HAV_MAT_STAIRS_STONE: return out << "SKY_HAV_MAT_STAIRS_STONE"; case SKY_HAV_MAT_MATERIAL_BLADE_1HAND: return out << "SKY_HAV_MAT_MATERIAL_BLADE_1HAND"; case SKY_HAV_MAT_WATER: return out << "SKY_HAV_MAT_WATER"; case SKY_HAV_MAT_UNKNOWN_1028101969: return out << "SKY_HAV_MAT_UNKNOWN_1028101969"; case SKY_HAV_MAT_MATERIAL_BOOK: return out << "SKY_HAV_MAT_MATERIAL_BOOK"; case SKY_HAV_MAT_MATERIAL_CARPET: return out << "SKY_HAV_MAT_MATERIAL_CARPET"; case SKY_HAV_MAT_SOLID_METAL: return out << "SKY_HAV_MAT_SOLID_METAL"; case SKY_HAV_MAT_MATERIAL_AXE_1HAND: return out << "SKY_HAV_MAT_MATERIAL_AXE_1HAND"; case SKY_HAV_MAT_UNKNOWN_1440721808: return out << "SKY_HAV_MAT_UNKNOWN_1440721808"; case SKY_HAV_MAT_STAIRS_WOOD: return out << "SKY_HAV_MAT_STAIRS_WOOD"; case SKY_HAV_MAT_MUD: return out << "SKY_HAV_MAT_MUD"; case SKY_HAV_MAT_MATERIAL_BOULDER_SMALL: return out << "SKY_HAV_MAT_MATERIAL_BOULDER_SMALL"; case SKY_HAV_MAT_STAIRS_SNOW: return out << "SKY_HAV_MAT_STAIRS_SNOW"; case SKY_HAV_MAT_HEAVY_STONE: return out << "SKY_HAV_MAT_HEAVY_STONE"; case SKY_HAV_MAT_UNKNOWN_1574477864: return out << "SKY_HAV_MAT_UNKNOWN_1574477864"; case SKY_HAV_MAT_UNKNOWN_1591009235: return out << "SKY_HAV_MAT_UNKNOWN_1591009235"; case SKY_HAV_MAT_MATERIAL_BOWS_STAVES: return out << "SKY_HAV_MAT_MATERIAL_BOWS_STAVES"; case SKY_HAV_MAT_MATERIAL_WOOD_AS_STAIRS: return out << "SKY_HAV_MAT_MATERIAL_WOOD_AS_STAIRS"; case SKY_HAV_MAT_GRASS: return out << "SKY_HAV_MAT_GRASS"; case SKY_HAV_MAT_MATERIAL_BOULDER_LARGE: return out << "SKY_HAV_MAT_MATERIAL_BOULDER_LARGE"; case SKY_HAV_MAT_MATERIAL_STONE_AS_STAIRS: return out << "SKY_HAV_MAT_MATERIAL_STONE_AS_STAIRS"; case SKY_HAV_MAT_MATERIAL_BLADE_2HAND: return out << "SKY_HAV_MAT_MATERIAL_BLADE_2HAND"; case SKY_HAV_MAT_MATERIAL_BOTTLE_SMALL: return out << "SKY_HAV_MAT_MATERIAL_BOTTLE_SMALL"; case SKY_HAV_MAT_SAND: return out << "SKY_HAV_MAT_SAND"; case SKY_HAV_MAT_HEAVY_METAL: return out << "SKY_HAV_MAT_HEAVY_METAL"; case SKY_HAV_MAT_DRAGON: return out << "SKY_HAV_MAT_DRAGON"; case SKY_HAV_MAT_MATERIAL_BLADE_1HAND_SMALL: return out << "SKY_HAV_MAT_MATERIAL_BLADE_1HAND_SMALL"; case SKY_HAV_MAT_MATERIAL_SKIN_SMALL: return out << "SKY_HAV_MAT_MATERIAL_SKIN_SMALL"; case SKY_HAV_MAT_STAIRS_BROKEN_STONE: return out << "SKY_HAV_MAT_STAIRS_BROKEN_STONE"; case SKY_HAV_MAT_MATERIAL_SKIN_LARGE: return out << "SKY_HAV_MAT_MATERIAL_SKIN_LARGE"; case SKY_HAV_MAT_ORGANIC: return out << "SKY_HAV_MAT_ORGANIC"; case SKY_HAV_MAT_MATERIAL_BONE: return out << "SKY_HAV_MAT_MATERIAL_BONE"; case SKY_HAV_MAT_HEAVY_WOOD: return out << "SKY_HAV_MAT_HEAVY_WOOD"; case SKY_HAV_MAT_MATERIAL_CHAIN: return out << "SKY_HAV_MAT_MATERIAL_CHAIN"; case SKY_HAV_MAT_DIRT: return out << "SKY_HAV_MAT_DIRT"; case SKY_HAV_MAT_MATERIAL_ARMOR_LIGHT: return out << "SKY_HAV_MAT_MATERIAL_ARMOR_LIGHT"; case SKY_HAV_MAT_MATERIAL_SHIELD_LIGHT: return out << "SKY_HAV_MAT_MATERIAL_SHIELD_LIGHT"; case SKY_HAV_MAT_MATERIAL_COIN: return out << "SKY_HAV_MAT_MATERIAL_COIN"; case SKY_HAV_MAT_MATERIAL_SHIELD_HEAVY: return out << "SKY_HAV_MAT_MATERIAL_SHIELD_HEAVY"; case SKY_HAV_MAT_MATERIAL_ARMOR_HEAVY: return out << "SKY_HAV_MAT_MATERIAL_ARMOR_HEAVY"; case SKY_HAV_MAT_MATERIAL_ARROW: return out << "SKY_HAV_MAT_MATERIAL_ARROW"; case SKY_HAV_MAT_GLASS: return out << "SKY_HAV_MAT_GLASS"; case SKY_HAV_MAT_STONE: return out << "SKY_HAV_MAT_STONE"; case SKY_HAV_MAT_CLOTH: return out << "SKY_HAV_MAT_CLOTH"; case SKY_HAV_MAT_MATERIAL_BLUNT_2HAND: return out << "SKY_HAV_MAT_MATERIAL_BLUNT_2HAND"; case SKY_HAV_MAT_MATERIAL_BOULDER_MEDIUM: return out << "SKY_HAV_MAT_MATERIAL_BOULDER_MEDIUM"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ChannelConvention--// void NifStream( ChannelConvention & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ChannelConvention(temp); } void NifStream( ChannelConvention const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ChannelConvention const & val ) { switch ( val ) { case CC_FIXED: return out << "CC_FIXED"; case CC_INDEX: return out << "CC_INDEX"; case CC_COMPRESSED: return out << "CC_COMPRESSED"; case CC_EMPTY: return out << "CC_EMPTY"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ImageType--// void NifStream( ImageType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ImageType(temp); } void NifStream( ImageType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ImageType const & val ) { switch ( val ) { case RGB: return out << "RGB"; case RGBA: return out << "RGBA"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--LightingShaderControlledColor--// void NifStream( LightingShaderControlledColor & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = LightingShaderControlledColor(temp); } void NifStream( LightingShaderControlledColor const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, LightingShaderControlledColor const & val ) { switch ( val ) { case LSCC_SPECULAR_COLOR: return out << "LSCC_SPECULAR_COLOR"; case LSCC_EMISSIVE_COLOR: return out << "LSCC_EMISSIVE_COLOR"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--PixelLayout--// void NifStream( PixelLayout & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = PixelLayout(temp); } void NifStream( PixelLayout const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, PixelLayout const & val ) { switch ( val ) { case PIX_LAY_PALETTISED: return out << "PIX_LAY_PALETTISED"; case PIX_LAY_HIGH_COLOR_16: return out << "PIX_LAY_HIGH_COLOR_16"; case PIX_LAY_TRUE_COLOR_32: return out << "PIX_LAY_TRUE_COLOR_32"; case PIX_LAY_COMPRESSED: return out << "PIX_LAY_COMPRESSED"; case PIX_LAY_BUMPMAP: return out << "PIX_LAY_BUMPMAP"; case PIX_LAY_PALETTISED_4: return out << "PIX_LAY_PALETTISED_4"; case PIX_LAY_DEFAULT: return out << "PIX_LAY_DEFAULT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ConsistencyType--// void NifStream( ConsistencyType & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = ConsistencyType(temp); } void NifStream( ConsistencyType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, ConsistencyType const & val ) { switch ( val ) { case CT_MUTABLE: return out << "CT_MUTABLE"; case CT_STATIC: return out << "CT_STATIC"; case CT_VOLATILE: return out << "CT_VOLATILE"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--TexClampMode--// void NifStream( TexClampMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = TexClampMode(temp); } void NifStream( TexClampMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, TexClampMode const & val ) { switch ( val ) { case CLAMP_S_CLAMP_T: return out << "CLAMP_S_CLAMP_T"; case CLAMP_S_WRAP_T: return out << "CLAMP_S_WRAP_T"; case WRAP_S_CLAMP_T: return out << "WRAP_S_CLAMP_T"; case WRAP_S_WRAP_T: return out << "WRAP_S_WRAP_T"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--ComponentFormat--// void NifStream( ComponentFormat & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = ComponentFormat(temp); } void NifStream( ComponentFormat const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, ComponentFormat const & val ) { switch ( val ) { case F_UNKNOWN: return out << "F_UNKNOWN"; case F_INT8_1: return out << "F_INT8_1"; case F_INT8_2: return out << "F_INT8_2"; case F_INT8_3: return out << "F_INT8_3"; case F_INT8_4: return out << "F_INT8_4"; case F_UINT8_1: return out << "F_UINT8_1"; case F_UINT8_2: return out << "F_UINT8_2"; case F_UINT8_3: return out << "F_UINT8_3"; case F_UINT8_4: return out << "F_UINT8_4"; case F_NORMINT8_1: return out << "F_NORMINT8_1"; case F_NORMINT8_2: return out << "F_NORMINT8_2"; case F_NORMINT8_3: return out << "F_NORMINT8_3"; case F_NORMINT8_4: return out << "F_NORMINT8_4"; case F_NORMUINT8_1: return out << "F_NORMUINT8_1"; case F_NORMUINT8_2: return out << "F_NORMUINT8_2"; case F_NORMUINT8_3: return out << "F_NORMUINT8_3"; case F_NORMUINT8_4: return out << "F_NORMUINT8_4"; case F_INT16_1: return out << "F_INT16_1"; case F_INT16_2: return out << "F_INT16_2"; case F_INT16_3: return out << "F_INT16_3"; case F_INT16_4: return out << "F_INT16_4"; case F_UINT16_1: return out << "F_UINT16_1"; case F_UINT16_2: return out << "F_UINT16_2"; case F_UINT16_3: return out << "F_UINT16_3"; case F_UINT16_4: return out << "F_UINT16_4"; case F_NORMINT16_1: return out << "F_NORMINT16_1"; case F_NORMINT16_2: return out << "F_NORMINT16_2"; case F_NORMINT16_3: return out << "F_NORMINT16_3"; case F_NORMINT16_4: return out << "F_NORMINT16_4"; case F_NORMUINT16_1: return out << "F_NORMUINT16_1"; case F_NORMUINT16_2: return out << "F_NORMUINT16_2"; case F_NORMUINT16_3: return out << "F_NORMUINT16_3"; case F_NORMUINT16_4: return out << "F_NORMUINT16_4"; case F_INT32_1: return out << "F_INT32_1"; case F_INT32_2: return out << "F_INT32_2"; case F_INT32_3: return out << "F_INT32_3"; case F_INT32_4: return out << "F_INT32_4"; case F_UINT32_1: return out << "F_UINT32_1"; case F_UINT32_2: return out << "F_UINT32_2"; case F_UINT32_3: return out << "F_UINT32_3"; case F_UINT32_4: return out << "F_UINT32_4"; case F_NORMINT32_1: return out << "F_NORMINT32_1"; case F_NORMINT32_2: return out << "F_NORMINT32_2"; case F_NORMINT32_3: return out << "F_NORMINT32_3"; case F_NORMINT32_4: return out << "F_NORMINT32_4"; case F_NORMUINT32_1: return out << "F_NORMUINT32_1"; case F_NORMUINT32_2: return out << "F_NORMUINT32_2"; case F_NORMUINT32_3: return out << "F_NORMUINT32_3"; case F_NORMUINT32_4: return out << "F_NORMUINT32_4"; case F_FLOAT16_1: return out << "F_FLOAT16_1"; case F_FLOAT16_2: return out << "F_FLOAT16_2"; case F_FLOAT16_3: return out << "F_FLOAT16_3"; case F_FLOAT16_4: return out << "F_FLOAT16_4"; case F_FLOAT32_1: return out << "F_FLOAT32_1"; case F_FLOAT32_2: return out << "F_FLOAT32_2"; case F_FLOAT32_3: return out << "F_FLOAT32_3"; case F_FLOAT32_4: return out << "F_FLOAT32_4"; case F_UINT_10_10_10_L1: return out << "F_UINT_10_10_10_L1"; case F_NORMINT_10_10_10_L1: return out << "F_NORMINT_10_10_10_L1"; case F_NORMINT_11_11_10: return out << "F_NORMINT_11_11_10"; case F_NORMUINT8_4_BGRA: return out << "F_NORMUINT8_4_BGRA"; case F_NORMINT_10_10_10_2: return out << "F_NORMINT_10_10_10_2"; case F_UINT_10_10_10_2: return out << "F_UINT_10_10_10_2"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--MotionQuality--// void NifStream( MotionQuality & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = MotionQuality(temp); } void NifStream( MotionQuality const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, MotionQuality const & val ) { switch ( val ) { case MO_QUAL_INVALID: return out << "MO_QUAL_INVALID"; case MO_QUAL_FIXED: return out << "MO_QUAL_FIXED"; case MO_QUAL_KEYFRAMED: return out << "MO_QUAL_KEYFRAMED"; case MO_QUAL_DEBRIS: return out << "MO_QUAL_DEBRIS"; case MO_QUAL_MOVING: return out << "MO_QUAL_MOVING"; case MO_QUAL_CRITICAL: return out << "MO_QUAL_CRITICAL"; case MO_QUAL_BULLET: return out << "MO_QUAL_BULLET"; case MO_QUAL_USER: return out << "MO_QUAL_USER"; case MO_QUAL_CHARACTER: return out << "MO_QUAL_CHARACTER"; case MO_QUAL_KEYFRAMED_REPORT: return out << "MO_QUAL_KEYFRAMED_REPORT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--CloningBehavior--// void NifStream( CloningBehavior & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = CloningBehavior(temp); } void NifStream( CloningBehavior const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, CloningBehavior const & val ) { switch ( val ) { case CLONING_SHARE: return out << "CLONING_SHARE"; case CLONING_COPY: return out << "CLONING_COPY"; case CLONING_BLANK_COPY: return out << "CLONING_BLANK_COPY"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--PropagationMode--// void NifStream( PropagationMode & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = PropagationMode(temp); } void NifStream( PropagationMode const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, PropagationMode const & val ) { switch ( val ) { case PROPAGATE_ON_SUCCESS: return out << "PROPAGATE_ON_SUCCESS"; case PROPAGATE_ON_FAILURE: return out << "PROPAGATE_ON_FAILURE"; case PROPAGATE_ALWAYS: return out << "PROPAGATE_ALWAYS"; case PROPAGATE_NEVER: return out << "PROPAGATE_NEVER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--PixelFormat--// void NifStream( PixelFormat & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = PixelFormat(temp); } void NifStream( PixelFormat const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, PixelFormat const & val ) { switch ( val ) { case PX_FMT_RGB8: return out << "PX_FMT_RGB8"; case PX_FMT_RGBA8: return out << "PX_FMT_RGBA8"; case PX_FMT_PAL8: return out << "PX_FMT_PAL8"; case PX_FMT_DXT1: return out << "PX_FMT_DXT1"; case PX_FMT_DXT5: return out << "PX_FMT_DXT5"; case PX_FMT_DXT5_ALT: return out << "PX_FMT_DXT5_ALT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--CycleType--// void NifStream( CycleType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = CycleType(temp); } void NifStream( CycleType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, CycleType const & val ) { switch ( val ) { case CYCLE_LOOP: return out << "CYCLE_LOOP"; case CYCLE_REVERSE: return out << "CYCLE_REVERSE"; case CYCLE_CLAMP: return out << "CYCLE_CLAMP"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--FieldType--// void NifStream( FieldType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = FieldType(temp); } void NifStream( FieldType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, FieldType const & val ) { switch ( val ) { case FIELD_WIND: return out << "FIELD_WIND"; case FIELD_POINT: return out << "FIELD_POINT"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSLightingShaderPropertyShaderType--// void NifStream( BSLightingShaderPropertyShaderType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = BSLightingShaderPropertyShaderType(temp); } void NifStream( BSLightingShaderPropertyShaderType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, BSLightingShaderPropertyShaderType const & val ) { switch ( val ) { case LSPST_DEFAULT: return out << "LSPST_Default"; case LSPST_ENVIRONMENT_MAP: return out << "LSPST_Environment Map"; case LSPST_GLOW_SHADER: return out << "LSPST_Glow Shader"; case LSPST_HEIGHTMAP: return out << "LSPST_Heightmap"; case LSPST_FACE_TINT: return out << "LSPST_Face Tint"; case LSPST_SKIN_TINT: return out << "LSPST_Skin Tint"; case LSPST_HAIR_TINT: return out << "LSPST_Hair Tint"; case LSPST_PARALLAX_OCC_MATERIAL: return out << "LSPST_Parallax Occ Material"; case LSPST_WORLD_MULTITEXTURE: return out << "LSPST_World Multitexture"; case LSPST_WORLDMAP1: return out << "LSPST_WorldMap1"; case LSPST_UNKNOWN_10: return out << "LSPST_Unknown 10"; case LSPST_MULTILAYER_PARALLAX: return out << "LSPST_MultiLayer Parallax"; case LSPST_UNKNOWN_12: return out << "LSPST_Unknown 12"; case LSPST_WORLDMAP2: return out << "LSPST_WorldMap2"; case LSPST_SPARKLE_SNOW: return out << "LSPST_Sparkle Snow"; case LSPST_WORLDMAP3: return out << "LSPST_WorldMap3"; case LSPST_EYE_ENVMAP: return out << "LSPST_Eye Envmap"; case LSPST_UNKNOWN_17: return out << "LSPST_Unknown 17"; case LSPST_WORLDMAP4: return out << "LSPST_WorldMap4"; case LSPST_WORLD_LOD_MULTITEXTURE: return out << "LSPST_World LOD Multitexture"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--DecayType--// void NifStream( DecayType & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = DecayType(temp); } void NifStream( DecayType const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, DecayType const & val ) { switch ( val ) { case DECAY_NONE: return out << "DECAY_NONE"; case DECAY_LINEAR: return out << "DECAY_LINEAR"; case DECAY_EXPONENTIAL: return out << "DECAY_EXPONENTIAL"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyrimLayer--// void NifStream( SkyrimLayer & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = SkyrimLayer(temp); } void NifStream( SkyrimLayer const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, SkyrimLayer const & val ) { switch ( val ) { case SKYL_UNIDENTIFIED: return out << "SKYL_UNIDENTIFIED"; case SKYL_STATIC: return out << "SKYL_STATIC"; case SKYL_ANIMSTATIC: return out << "SKYL_ANIMSTATIC"; case SKYL_TRANSPARENT: return out << "SKYL_TRANSPARENT"; case SKYL_CLUTTER: return out << "SKYL_CLUTTER"; case SKYL_WEAPON: return out << "SKYL_WEAPON"; case SKYL_PROJECTILE: return out << "SKYL_PROJECTILE"; case SKYL_SPELL: return out << "SKYL_SPELL"; case SKYL_BIPED: return out << "SKYL_BIPED"; case SKYL_TREES: return out << "SKYL_TREES"; case SKYL_PROPS: return out << "SKYL_PROPS"; case SKYL_WATER: return out << "SKYL_WATER"; case SKYL_TRIGGER: return out << "SKYL_TRIGGER"; case SKYL_TERRAIN: return out << "SKYL_TERRAIN"; case SKYL_TRAP: return out << "SKYL_TRAP"; case SKYL_NONCOLLIDABLE: return out << "SKYL_NONCOLLIDABLE"; case SKYL_CLOUD_TRAP: return out << "SKYL_CLOUD_TRAP"; case SKYL_GROUND: return out << "SKYL_GROUND"; case SKYL_PORTAL: return out << "SKYL_PORTAL"; case SKYL_DEBRIS_SMALL: return out << "SKYL_DEBRIS_SMALL"; case SKYL_DEBRIS_LARGE: return out << "SKYL_DEBRIS_LARGE"; case SKYL_ACOUSTIC_SPACE: return out << "SKYL_ACOUSTIC_SPACE"; case SKYL_ACTORZONE: return out << "SKYL_ACTORZONE"; case SKYL_PROJECTILEZONE: return out << "SKYL_PROJECTILEZONE"; case SKYL_GASTRAP: return out << "SKYL_GASTRAP"; case SKYL_SHELLCASING: return out << "SKYL_SHELLCASING"; case SKYL_TRANSPARENT_SMALL: return out << "SKYL_TRANSPARENT_SMALL"; case SKYL_INVISIBLE_WALL: return out << "SKYL_INVISIBLE_WALL"; case SKYL_TRANSPARENT_SMALL_ANIM: return out << "SKYL_TRANSPARENT_SMALL_ANIM"; case SKYL_WARD: return out << "SKYL_WARD"; case SKYL_CHARCONTROLLER: return out << "SKYL_CHARCONTROLLER"; case SKYL_STAIRHELPER: return out << "SKYL_STAIRHELPER"; case SKYL_DEADBIP: return out << "SKYL_DEADBIP"; case SKYL_BIPED_NO_CC: return out << "SKYL_BIPED_NO_CC"; case SKYL_AVOIDBOX: return out << "SKYL_AVOIDBOX"; case SKYL_COLLISIONBOX: return out << "SKYL_COLLISIONBOX"; case SKYL_CAMERASHPERE: return out << "SKYL_CAMERASHPERE"; case SKYL_DOORDETECTION: return out << "SKYL_DOORDETECTION"; case SKYL_CONEPROJECTILE: return out << "SKYL_CONEPROJECTILE"; case SKYL_CAMERAPICK: return out << "SKYL_CAMERAPICK"; case SKYL_ITEMPICK: return out << "SKYL_ITEMPICK"; case SKYL_LINEOFSIGHT: return out << "SKYL_LINEOFSIGHT"; case SKYL_PATHPICK: return out << "SKYL_PATHPICK"; case SKYL_CUSTOMPICK1: return out << "SKYL_CUSTOMPICK1"; case SKYL_CUSTOMPICK2: return out << "SKYL_CUSTOMPICK2"; case SKYL_SPELLEXPLOSION: return out << "SKYL_SPELLEXPLOSION"; case SKYL_DROPPINGPICK: return out << "SKYL_DROPPINGPICK"; case SKYL_NULL: return out << "SKYL_NULL"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyrimWaterShaderFlags--// void NifStream( SkyrimWaterShaderFlags & val, istream& in, const NifInfo & info ) { byte temp; NifStream( temp, in, info ); val = SkyrimWaterShaderFlags(temp); } void NifStream( SkyrimWaterShaderFlags const & val, ostream& out, const NifInfo & info ) { NifStream( (byte)(val), out, info ); } ostream & operator<<( ostream & out, SkyrimWaterShaderFlags const & val ) { switch ( val ) { case SWSF1_UNKNOWN0: return out << "SWSF1_UNKNOWN0"; case SWSF1_BYPASS_REFRACTION_MAP: return out << "SWSF1_Bypass_Refraction_Map"; case SWSF1_WATER_TOGGLE: return out << "SWSF1_Water_Toggle"; case SWSF1_UNKNOWN3: return out << "SWSF1_UNKNOWN3"; case SWSF1_UNKNOWN4: return out << "SWSF1_UNKNOWN4"; case SWSF1_UNKNOWN5: return out << "SWSF1_UNKNOWN5"; case SWSF1_HIGHLIGHT_LAYER_TOGGLE: return out << "SWSF1_Highlight_Layer_Toggle"; case SWSF1_ENABLED: return out << "SWSF1_Enabled"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyrimShaderPropertyFlags1--// void NifStream( SkyrimShaderPropertyFlags1 & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SkyrimShaderPropertyFlags1(temp); } void NifStream( SkyrimShaderPropertyFlags1 const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SkyrimShaderPropertyFlags1 const & val ) { switch ( val ) { case SLSF1_SPECULAR: return out << "SLSF1_Specular"; case SLSF1_SKINNED: return out << "SLSF1_Skinned"; case SLSF1_TEMP_REFRACTION: return out << "SLSF1_Temp_Refraction"; case SLSF1_VERTEX_ALPHA: return out << "SLSF1_Vertex_Alpha"; case SLSF1_GREYSCALE_TO_PALETTECOLOR: return out << "SLSF1_Greyscale_To_PaletteColor"; case SLSF1_GREYSCALE_TO_PALETTEALPHA: return out << "SLSF1_Greyscale_To_PaletteAlpha"; case SLSF1_USE_FALLOFF: return out << "SLSF1_Use_Falloff"; case SLSF1_ENVIRONMENT_MAPPING: return out << "SLSF1_Environment_Mapping"; case SLSF1_RECIEVE_SHADOWS: return out << "SLSF1_Recieve_Shadows"; case SLSF1_CAST_SHADOWS: return out << "SLSF1_Cast_Shadows"; case SLSF1_FACEGEN_DETAIL_MAP: return out << "SLSF1_Facegen_Detail_Map"; case SLSF1_PARALLAX: return out << "SLSF1_Parallax"; case SLSF1_MODEL_SPACE_NORMALS: return out << "SLSF1_Model_Space_Normals"; case SLSF1_NON_PROJECTIVE_SHADOWS: return out << "SLSF1_Non_Projective_Shadows"; case SLSF1_LANDSCAPE: return out << "SLSF1_Landscape"; case SLSF1_REFRACTION: return out << "SLSF1_Refraction"; case SLSF1_FIRE_REFRACTION: return out << "SLSF1_Fire_Refraction"; case SLSF1_EYE_ENVIRONMENT_MAPPING: return out << "SLSF1_Eye_Environment_Mapping"; case SLSF1_HAIR_SOFT_LIGHTING: return out << "SLSF1_Hair_Soft_Lighting"; case SLSF1_SCREENDOOR_ALPHA_FADE: return out << "SLSF1_Screendoor_Alpha_Fade"; case SLSF1_LOCALMAP_HIDE_SECRET: return out << "SLSF1_Localmap_Hide_Secret"; case SLSF1_FACEGEN_RGB_TINT: return out << "SLSF1_FaceGen_RGB_Tint"; case SLSF1_OWN_EMIT: return out << "SLSF1_Own_Emit"; case SLSF1_PROJECTED_UV: return out << "SLSF1_Projected_UV"; case SLSF1_MULTIPLE_TEXTURES: return out << "SLSF1_Multiple_Textures"; case SLSF1_REMAPPABLE_TEXTURES: return out << "SLSF1_Remappable_Textures"; case SLSF1_DECAL: return out << "SLSF1_Decal"; case SLSF1_DYNAMIC_DECAL: return out << "SLSF1_Dynamic_Decal"; case SLSF1_PARALLAX_OCCLUSION: return out << "SLSF1_Parallax_Occlusion"; case SLSF1_EXTERNAL_EMITTANCE: return out << "SLSF1_External_Emittance"; case SLSF1_SOFT_EFFECT: return out << "SLSF1_Soft_Effect"; case SLSF1_ZBUFFER_TEST: return out << "SLSF1_ZBuffer_Test"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSShaderFlags--// void NifStream( BSShaderFlags & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = BSShaderFlags(temp); } void NifStream( BSShaderFlags const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, BSShaderFlags const & val ) { switch ( val ) { case SF_SPECULAR: return out << "SF_Specular"; case SF_SKINNED: return out << "SF_Skinned"; case SF_LOWDETAIL: return out << "SF_LowDetail"; case SF_VERTEX_ALPHA: return out << "SF_Vertex_Alpha"; case SF_UNKNOWN_1: return out << "SF_Unknown_1"; case SF_SINGLE_PASS: return out << "SF_Single_Pass"; case SF_EMPTY: return out << "SF_Empty"; case SF_ENVIRONMENT_MAPPING: return out << "SF_Environment_Mapping"; case SF_ALPHA_TEXTURE: return out << "SF_Alpha_Texture"; case SF_UNKNOWN_2: return out << "SF_Unknown_2"; case SF_FACEGEN: return out << "SF_FaceGen"; case SF_PARALLAX_SHADER_INDEX_15: return out << "SF_Parallax_Shader_Index_15"; case SF_UNKNOWN_3: return out << "SF_Unknown_3"; case SF_NON_PROJECTIVE_SHADOWS: return out << "SF_Non_Projective_Shadows"; case SF_UNKNOWN_4: return out << "SF_Unknown_4"; case SF_REFRACTION: return out << "SF_Refraction"; case SF_FIRE_REFRACTION: return out << "SF_Fire_Refraction"; case SF_EYE_ENVIRONMENT_MAPPING: return out << "SF_Eye_Environment_Mapping"; case SF_HAIR: return out << "SF_Hair"; case SF_DYNAMIC_ALPHA: return out << "SF_Dynamic_Alpha"; case SF_LOCALMAP_HIDE_SECRET: return out << "SF_Localmap_Hide_Secret"; case SF_WINDOW_ENVIRONMENT_MAPPING: return out << "SF_Window_Environment_Mapping"; case SF_TREE_BILLBOARD: return out << "SF_Tree_Billboard"; case SF_SHADOW_FRUSTUM: return out << "SF_Shadow_Frustum"; case SF_MULTIPLE_TEXTURES: return out << "SF_Multiple_Textures"; case SF_REMAPPABLE_TEXTURES: return out << "SF_Remappable_Textures"; case SF_DECAL_SINGLE_PASS: return out << "SF_Decal_Single_Pass"; case SF_DYNAMIC_DECAL_SINGLE_PASS: return out << "SF_Dynamic_Decal_Single_Pass"; case SF_PARALLAX_OCCULSION: return out << "SF_Parallax_Occulsion"; case SF_EXTERNAL_EMITTANCE: return out << "SF_External_Emittance"; case SF_SHADOW_MAP: return out << "SF_Shadow_Map"; case SF_ZBUFFER_TEST: return out << "SF_ZBuffer_Test"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--FurnitureEntryPoints--// void NifStream( FurnitureEntryPoints & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = FurnitureEntryPoints(temp); } void NifStream( FurnitureEntryPoints const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, FurnitureEntryPoints const & val ) { switch ( val ) { case FRONT: return out << "Front"; case BEHIND: return out << "Behind"; case RIGHT: return out << "Right"; case LEFT: return out << "Left"; case UP: return out << "Up"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSPartFlag--// void NifStream( BSPartFlag & val, istream& in, const NifInfo & info ) { unsigned short temp; NifStream( temp, in, info ); val = BSPartFlag(temp); } void NifStream( BSPartFlag const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned short)(val), out, info ); } ostream & operator<<( ostream & out, BSPartFlag const & val ) { switch ( val ) { case PF_EDITOR_VISIBLE: return out << "PF_EDITOR_VISIBLE"; case PF_START_NET_BONESET: return out << "PF_START_NET_BONESET"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--DataStreamAccess--// void NifStream( DataStreamAccess & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = DataStreamAccess(temp); } void NifStream( DataStreamAccess const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, DataStreamAccess const & val ) { switch ( val ) { case CPU_READ: return out << "CPU Read"; case CPU_WRITE_STATIC: return out << "CPU Write Static"; case CPU_WRITE_MUTABLE: return out << "CPU Write Mutable"; case CPU_WRITE_VOLATILE: return out << "CPU Write Volatile"; case GPU_READ: return out << "GPU Read"; case GPU_WRITE: return out << "GPU Write"; case CPU_WRITE_STATIC_INITITIALIZED: return out << "CPU Write Static Inititialized"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--SkyrimShaderPropertyFlags2--// void NifStream( SkyrimShaderPropertyFlags2 & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = SkyrimShaderPropertyFlags2(temp); } void NifStream( SkyrimShaderPropertyFlags2 const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, SkyrimShaderPropertyFlags2 const & val ) { switch ( val ) { case SLSF2_ZBUFFER_WRITE: return out << "SLSF2_ZBuffer_Write"; case SLSF2_LOD_LANDSCAPE: return out << "SLSF2_LOD_Landscape"; case SLSF2_LOD_OBJECTS: return out << "SLSF2_LOD_Objects"; case SLSF2_NO_FADE: return out << "SLSF2_No_Fade"; case SLSF2_DOUBLE_SIDED: return out << "SLSF2_Double_Sided"; case SLSF2_VERTEX_COLORS: return out << "SLSF2_Vertex_Colors"; case SLSF2_GLOW_MAP: return out << "SLSF2_Glow_Map"; case SLSF2_ASSUME_SHADOWMASK: return out << "SLSF2_Assume_Shadowmask"; case SLSF2_PACKED_TANGENT: return out << "SLSF2_Packed_Tangent"; case SLSF2_MULTI_INDEX_SNOW: return out << "SLSF2_Multi_Index_Snow"; case SLSF2_VERTEX_LIGHTING: return out << "SLSF2_Vertex_Lighting"; case SLSF2_UNIFORM_SCALE: return out << "SLSF2_Uniform_Scale"; case SLSF2_FIT_SLOPE: return out << "SLSF2_Fit_Slope"; case SLSF2_BILLBOARD: return out << "SLSF2_Billboard"; case SLSF2_NO_LOD_LAND_BLEND: return out << "SLSF2_No_LOD_Land_Blend"; case SLSF2_ENVMAP_LIGHT_FADE: return out << "SLSF2_EnvMap_Light_Fade"; case SLSF2_WIREFRAME: return out << "SLSF2_Wireframe"; case SLSF2_WEAPON_BLOOD: return out << "SLSF2_Weapon_Blood"; case SLSF2_HIDE_ON_LOCAL_MAP: return out << "SLSF2_Hide_On_Local_Map"; case SLSF2_PREMULT_ALPHA: return out << "SLSF2_Premult_Alpha"; case SLSF2_CLOUD_LOD: return out << "SLSF2_Cloud_LOD"; case SLSF2_ANISOTROPIC_LIGHTING: return out << "SLSF2_Anisotropic_Lighting"; case SLSF2_NO_TRANSPARENCY_MULTISAMPLING: return out << "SLSF2_No_Transparency_Multisampling"; case SLSF2_UNUSED01: return out << "SLSF2_Unused01"; case SLSF2_MULTI_LAYER_PARALLAX: return out << "SLSF2_Multi_Layer_Parallax"; case SLSF2_SOFT_LIGHTING: return out << "SLSF2_Soft_Lighting"; case SLSF2_RIM_LIGHTING: return out << "SLSF2_Rim_Lighting"; case SLSF2_BACK_LIGHTING: return out << "SLSF2_Back_Lighting"; case SLSF2_UNUSED02: return out << "SLSF2_Unused02"; case SLSF2_TREE_ANIM: return out << "SLSF2_Tree_Anim"; case SLSF2_EFFECT_LIGHTING: return out << "SLSF2_Effect_Lighting"; case SLSF2_HD_LOD_OBJECTS: return out << "SLSF2_HD_LOD_Objects"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } //--BSSegmentFlags--// void NifStream( BSSegmentFlags & val, istream& in, const NifInfo & info ) { unsigned int temp; NifStream( temp, in, info ); val = BSSegmentFlags(temp); } void NifStream( BSSegmentFlags const & val, ostream& out, const NifInfo & info ) { NifStream( (unsigned int)(val), out, info ); } ostream & operator<<( ostream & out, BSSegmentFlags const & val ) { switch ( val ) { case BSSEG_WATER: return out << "BSSEG_WATER"; default: return out << "Invalid Value! - " << (unsigned int)(val); } } }
{ "content_hash": "14c42e331bfe23c1e1a340a9994f5990", "timestamp": "", "source": "github", "line_count": 2084, "max_line_length": 116, "avg_line_length": 37.4457773512476, "alnum_prop": 0.6801132795981394, "repo_name": "figment/niflib", "id": "1a88b3078b1380362bc0e100f24e4ae6e26975b2", "size": "78151", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/gen/enums.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "393" }, { "name": "C++", "bytes": "5559387" }, { "name": "CMake", "bytes": "13336" }, { "name": "HTML", "bytes": "485" }, { "name": "Objective-C", "bytes": "10426" } ], "symlink_target": "" }
package org.apache.tez.dag.app.dag; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.tez.common.counters.TezCounters; import org.apache.tez.dag.api.client.DAGStatusBuilder; import org.apache.tez.dag.api.client.StatusGetOpts; import org.apache.tez.dag.api.client.VertexStatusBuilder; import org.apache.tez.dag.api.records.DAGProtos.DAGPlan; import org.apache.tez.dag.history.HistoryEvent; import org.apache.tez.dag.records.TezDAGID; import org.apache.tez.dag.records.TezVertexID; /** * Main interface to interact with the job. */ public interface DAG { TezDAGID getID(); String getName(); DAGState getState(); DAGReport getReport(); /** * Get all the counters of this DAG. This includes job-counters aggregated * together with the counters of each task. This creates a clone of the * Counters, so use this judiciously. * @return job-counters and aggregate task-counters */ TezCounters getAllCounters(); /** * Get Vertex by vertex name */ Vertex getVertex(String vertexName); Map<TezVertexID,Vertex> getVertices(); Vertex getVertex(TezVertexID vertexId); List<String> getDiagnostics(); int getTotalVertices(); int getSuccessfulVertices(); float getProgress(); boolean isUber(); String getUserName(); Configuration getConf(); DAGPlan getJobPlan(); DAGStatusBuilder getDAGStatus(Set<StatusGetOpts> statusOptions); VertexStatusBuilder getVertexStatus(String vertexName, Set<StatusGetOpts> statusOptions); boolean isComplete(); /** * @return the ACLs for this job for each type of JobACL given. */ Map<ApplicationAccessType, String> getJobACLs(); boolean checkAccess(UserGroupInformation callerUGI, ApplicationAccessType jobOperation); Credentials getCredentials(); UserGroupInformation getDagUGI(); DAGState restoreFromEvent(HistoryEvent historyEvent); }
{ "content_hash": "da9542f557c019915058ee4a6c591446", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 90, "avg_line_length": 28.91891891891892, "alnum_prop": 0.7542056074766356, "repo_name": "apache/incubator-tez", "id": "45fb50a629cebc5f50e39fe4a0407bfda934ff3f", "size": "2930", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "tez-dag/src/main/java/org/apache/tez/dag/app/dag/DAG.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "4705240" }, { "name": "Python", "bytes": "3444" }, { "name": "TeX", "bytes": "19322" } ], "symlink_target": "" }
using System; using System.Reflection; using MongoDB.Configuration.Mapping.Auto; using MongoDB.Configuration.Mapping.Conventions; using MongoDB.Util; namespace MongoDB.Configuration.Builders { /// <summary> /// /// </summary> public class AutoMappingProfileBuilder { private readonly AutoMappingProfile _profile; /// <summary> /// Initializes a new instance of the <see cref="AutoMappingProfileBuilder"/> class. /// </summary> /// <param name="profile">The profile.</param> internal AutoMappingProfileBuilder(AutoMappingProfile profile) { if (profile == null) throw new ArgumentNullException("profile"); _profile = profile; } /// <summary> /// Aliaseses the are camel cased. /// </summary> /// <returns></returns> public AutoMappingProfileBuilder AliasesAreCamelCased() { _profile.Conventions.AliasConvention = new DelegateAliasConvention(m => Inflector.ToCamelCase(m.Name)); return this; } /// <summary> /// Aliaseses the are. /// </summary> /// <param name="alias">The alias.</param> /// <returns></returns> public AutoMappingProfileBuilder AliasesAre(Func<MemberInfo, string> alias) { _profile.Conventions.AliasConvention = new DelegateAliasConvention(alias); return this; } /// <summary> /// Collectionses the are named. /// </summary> /// <param name="collectionName">Name of the collection.</param> /// <returns></returns> public AutoMappingProfileBuilder CollectionsAreNamed(Func<Type, string> collectionName) { _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(collectionName); return this; } /// <summary> /// Collections the names are camel cased. /// </summary> /// <returns></returns> public AutoMappingProfileBuilder CollectionNamesAreCamelCased() { _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(t => Inflector.ToCamelCase(t.Name)); return this; } /// <summary> /// Collections the names are camel cased and plural. /// </summary> /// <returns></returns> public AutoMappingProfileBuilder CollectionNamesAreCamelCasedAndPlural() { _profile.Conventions.CollectionNameConvention = new DelegateCollectionNameConvention(t => Inflector.MakePlural(Inflector.ToCamelCase(t.Name))); return this; } /// <summary> /// Conventionses the are. /// </summary> /// <param name="conventions">The conventions.</param> /// <returns></returns> public AutoMappingProfileBuilder ConventionsAre(ConventionProfile conventions) { _profile.Conventions = conventions; return this; } /// <summary> /// Discriminators the aliases are. /// </summary> /// <param name="discriminatorAlias">The discriminator alias.</param> /// <returns></returns> public AutoMappingProfileBuilder DiscriminatorAliasesAre(Func<Type, string> discriminatorAlias) { _profile.Conventions.DiscriminatorAliasConvention = new DelegateDiscriminatorAliasConvention(discriminatorAlias); return this; } /// <summary> /// Discriminators the values are. /// </summary> /// <param name="discriminator">The discriminator.</param> /// <returns></returns> public AutoMappingProfileBuilder DiscriminatorValuesAre(Func<Type, object> discriminator) { _profile.Conventions.DiscriminatorConvention = new DelegateDiscriminatorConvention(discriminator); return this; } /// <summary> /// Extendeds the properties are. /// </summary> /// <param name="extendedProperty">The extended property.</param> /// <returns></returns> public AutoMappingProfileBuilder ExtendedPropertiesAre(Func<MemberInfo, bool> extendedProperty) { _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(extendedProperty); return this; } /// <summary> /// Extendeds the properties are. /// </summary> /// <param name="extendedProperty">The extended property.</param> /// <param name="memberTypes">The member types.</param> /// <param name="bindingFlags">The binding flags.</param> /// <returns></returns> public AutoMappingProfileBuilder ExtendedPropertiesAre(Func<MemberInfo, bool> extendedProperty, MemberTypes memberTypes, BindingFlags bindingFlags) { _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(extendedProperty, memberTypes, bindingFlags); return this; } /// <summary> /// Extendeds the properties are named. /// </summary> /// <param name="name">The name.</param> /// <returns></returns> public AutoMappingProfileBuilder ExtendedPropertiesAreNamed(string name) { _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(m => m.Name == name); return this; } /// <summary> /// Extendeds the properties are named. /// </summary> /// <param name="name">The name.</param> /// <param name="memberTypes">The member types.</param> /// <param name="bindingFlags">The binding flags.</param> /// <returns></returns> public AutoMappingProfileBuilder ExtendedPropertiesAreNamed(string name, MemberTypes memberTypes, BindingFlags bindingFlags) { _profile.Conventions.ExtendedPropertiesConvention = new DelegateExtendedPropertiesConvention(m => m.Name == name, memberTypes, bindingFlags); return this; } /// <summary> /// Finds the members with. /// </summary> /// <param name="memberFinder">The member finder.</param> /// <returns></returns> public AutoMappingProfileBuilder FindMembersWith(IMemberFinder memberFinder) { _profile.MemberFinder = memberFinder; return this; } /// <summary> /// Idses the are. /// </summary> /// <param name="id">The id.</param> /// <returns></returns> public AutoMappingProfileBuilder IdsAre(Func<MemberInfo, bool> id) { _profile.Conventions.IdConvention = new DelegateIdConvention(id); return this; } /// <summary> /// Idses the are. /// </summary> /// <param name="id">The id.</param> /// <param name="memberTypes">The member types.</param> /// <param name="bindingFlags">The binding flags.</param> /// <returns></returns> public AutoMappingProfileBuilder IdsAre(Func<MemberInfo, bool> id, MemberTypes memberTypes, BindingFlags bindingFlags) { _profile.Conventions.IdConvention = new DelegateIdConvention(id, memberTypes, bindingFlags); return this; } /// <summary> /// Idses the are named. /// </summary> /// <param name="name">The name.</param> /// <returns></returns> public AutoMappingProfileBuilder IdsAreNamed(string name) { _profile.Conventions.IdConvention = new DelegateIdConvention(m => m.Name == name); return this; } /// <summary> /// Idses the are named. /// </summary> /// <param name="name">The name.</param> /// <param name="memberTypes">The member types.</param> /// <param name="bindingFlags">The binding flags.</param> /// <returns></returns> public AutoMappingProfileBuilder IdsAreNamed(string name, MemberTypes memberTypes, BindingFlags bindingFlags) { _profile.Conventions.IdConvention = new DelegateIdConvention(m => m.Name == name, memberTypes, bindingFlags); return this; } /// <summary> /// Subs the classes are. /// </summary> /// <param name="isSubClass">The is sub class.</param> /// <returns></returns> public AutoMappingProfileBuilder SubClassesAre(Func<Type, bool> isSubClass) { _profile.IsSubClassDelegate = isSubClass; return this; } /// <summary> /// Uses the collection adapter convention. /// </summary> /// <param name="collectionAdapterConvention">The collection adapter convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseCollectionAdapterConvention(ICollectionAdapterConvention collectionAdapterConvention) { _profile.Conventions.CollectionAdapterConvention = collectionAdapterConvention; return this; } /// <summary> /// Uses the collection name convention. /// </summary> /// <param name="collectionNameConvention">The collection name convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseCollectionNameConvention(ICollectionNameConvention collectionNameConvention) { _profile.Conventions.CollectionNameConvention = collectionNameConvention; return this; } /// <summary> /// Uses the default value convention. /// </summary> /// <param name="defaultValueConvention">The default value convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseDefaultValueConvention(IDefaultValueConvention defaultValueConvention) { _profile.Conventions.DefaultValueConvention = defaultValueConvention; return this; } /// <summary> /// Uses the discriminator alias convention. /// </summary> /// <param name="discriminatorAliasConvention">The discriminator alias convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseDiscriminatorAliasConvention(IDiscriminatorAliasConvention discriminatorAliasConvention) { _profile.Conventions.DiscriminatorAliasConvention = discriminatorAliasConvention; return this; } /// <summary> /// Uses the discriminator convention. /// </summary> /// <param name="discriminatorConvention">The discriminator convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseDiscriminatorConvention(IDiscriminatorConvention discriminatorConvention) { _profile.Conventions.DiscriminatorConvention = discriminatorConvention; return this; } /// <summary> /// Uses the extended properties convention. /// </summary> /// <param name="extendedPropertiesConvention">The extended properties convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseExtendedPropertiesConvention(IExtendedPropertiesConvention extendedPropertiesConvention) { _profile.Conventions.ExtendedPropertiesConvention = extendedPropertiesConvention; return this; } /// <summary> /// Uses the id convention. /// </summary> /// <param name="idConvention">The id convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseIdConvention(IIdConvention idConvention) { _profile.Conventions.IdConvention = idConvention; return this; } /// <summary> /// Uses the id generator convention. /// </summary> /// <param name="idGeneratorConvention">The id generator convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseIdGeneratorConvention(IIdGeneratorConvention idGeneratorConvention) { _profile.Conventions.IdGeneratorConvention = idGeneratorConvention; return this; } /// <summary> /// Uses the id unsaved value convention. /// </summary> /// <param name="idUnsavedValueConvention">The id unsaved value convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseIdUnsavedValueConvention(IIdUnsavedValueConvention idUnsavedValueConvention) { _profile.Conventions.IdUnsavedValueConvention = idUnsavedValueConvention; return this; } /// <summary> /// Uses the member alias convention. /// </summary> /// <param name="aliasConvention">The alias convention.</param> /// <returns></returns> public AutoMappingProfileBuilder UseMemberAliasConvention(IAliasConvention aliasConvention) { _profile.Conventions.AliasConvention = aliasConvention; return this; } } }
{ "content_hash": "02205bd588846e8ba24e32f1d1a72b97", "timestamp": "", "source": "github", "line_count": 342, "max_line_length": 155, "avg_line_length": 40.06140350877193, "alnum_prop": 0.6025837530107292, "repo_name": "zh-huan/mongodb", "id": "f1ece0f0cc2b52133e2d5b048f1d9274b70a22d0", "size": "13703", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "source/MongoDB/Configuration/Builders/AutoMappingProfileBuilder.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "1305070" } ], "symlink_target": "" }
<project name="org.eclipse.paho.client.mqttv3.internal.traceformat" default="full"> <taskdef resource="net/sf/antcontrib/antcontrib.properties"/> <property name="source.folder" value="./" /> <property name="output.folder" value="./target/work" /> <property name="ship.folder" value="./target/ship" /> <!-- classpath.folder = ship.folder of org.eclipse.paho.client.mqttv3 build.xml --> <property name="classpath.folder" value="../org.eclipse.paho.client.mqttv3/target/ship" /> <property name="client.release.version" value="0.9.0" /> <property name="bundleVersion" value="0.9.0" /> <property name="bundleVendor" value="Eclipse.org" /> <property name="build.level" value="LYYMMDD" /> <property name="paho-client-v3-trace-jar" value="org.eclipse.paho.client.mqttv3.trace.jar" /> <property name="paho-client-v3-trace-source-jar" value="org.eclipse.paho.client.mqttv3.tracesource.jar" /> <property name="javac_target" value="1.2" /> <property name="javac_source" value="1.2" /> <path id="classpath"> <fileset dir="${classpath.folder}"> <include name="*.jar" /> </fileset> </path> <pathconvert pathsep="${line.separator}| |-- " property="formatted.classpath" refid="classpath" /> <echo message="|-- classpath:" /> <echo message="| |-- ${formatted.classpath}" /> <target name="updateManifest" description="Update manifest file."> <manifest file="${folder}/META-INF/MANIFEST.MF" mode="update"> <attribute name="Bundle-Version" value="${bundleVersion}" /> <attribute name="Bundle-Vendor" value="${bundleVendor}" /> <attribute name="Bundle-ManifestVersion" value="2" /> <attribute name="Bundle-ClassPath" value="${bundle_classpath}" /> <attribute name="Implementation-Version" value="${client.release.version}" /> <attribute name="Build-Level" value="${build.level}" /> </manifest> <if><isset property="jar.copyright"/> <then> <manifest file="${folder}/META-INF/MANIFEST.MF" mode="update"> <attribute name="Bundle-Copyright" value="${jar.copyright}" /> </manifest> </then> </if> </target> <target name="compile"> <mkdir dir="${output.folder}/bin" /> <mkdir dir="${output.folder}/src" /> <copy overwrite="true" todir="${output.folder}/src"> <fileset dir="${source.folder}/src"> <exclude name="**/TestTrace.java" /> </fileset> <fileset dir="${source.folder}/src"> <include name="**/TracePoint.java" /> </fileset> </copy> <javac srcdir="${output.folder}/src" destdir="${output.folder}/bin" source="${javac_source}" target="${javac_target}" debug="true" includeantruntime="false"> <classpath refid="classpath" /> </javac> <delete file="${output.folder}/bin/org/eclipse/paho/client/mqttv3/internal/trace/TracePointExtractor.class" /> <mkdir dir="${output.folder}/bin/META-INF" /> <antcall target="updateManifest"> <param name="folder" value="${output.folder}/bin" /> <param name="bundle_classpath" value="." /> </antcall> </target> <target name="package"> <mkdir dir="${ship.folder}" /> <jar jarfile="${ship.folder}/${paho-client-v3-trace-jar}" basedir="${output.folder}/bin" manifest="${output.folder}/bin/META-INF/MANIFEST.MF" > </jar> <jar jarfile="${ship.folder}/${paho-client-v3-trace-source-jar}" basedir="${source.folder}/src"> </jar> </target> <target name="clean"> <delete dir="${output.folder}" /> <delete file="${ship.folder}/${paho-client-v3-trace-jar}" /> <delete file="${ship.folder}/${paho-client-v3-trace-source-jar}" /> </target> <target name="full" depends="compile,package" /> </project>
{ "content_hash": "c2b96aec388aafa343e3f045e4a9ff74", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 112, "avg_line_length": 36.21, "alnum_prop": 0.6575531621099144, "repo_name": "tuckervento/usTwo", "id": "1741a38f6ee3f897157e6bce33556b0c72c0e452", "size": "3621", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MQTT/org.eclipse.paho.client.mqttv3.internal.traceformat/build.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "1560" }, { "name": "Java", "bytes": "916780" } ], "symlink_target": "" }
author: slowe comments: true date: 2014-07-31 09:00:00+00:00 layout: post slug: installing-the-docker-plugin-for-heat title: Installing the Docker Plugin for Heat wordpress_id: 3480 categories: Tutorial tags: - Automation - Docker - OpenStack - Linux - Virtualization --- In this post, I'll share with you how I installed the Docker plugin for OpenStack Heat, so that Heat is able to orchestrate the creation of Docker containers in an OpenStack environment. I'm publishing this because I found [the default instructions](https://github.com/openstack/heat/tree/stable/icehouse/contrib/docker/docker) to be a bit too vague to be helpful. By sharing my experience, I hope that others interested in using Docker in their OpenStack environment will benefit. Here are the steps I used to make the Docker plugin work with Heat. These steps assume you are using Ubuntu and already have OpenStack Heat installed and working correctly: 1. If you are using the packaged version of Heat (in other words, you are installing Heat via a method like `apt-get install` on Ubuntu), then you'll want to use the "stable/icehouse" branch that contains the Docker container. In this case, you _don't_ want to use master---it won't work (either the plugin won't load or the Heat engine service won't start). Download a ZIP copy of the correct branch of Heat from GitHub (for "stable/icehouse", see [here](https://github.com/openstack/heat/tree/stable/icehouse)). 2. Extract the `contrib/docker` folder from the downloaded ZIP copy of Heat. 3. Delete the `contrib/docker/docker/tests` directory; in my testing, the plugin failed to load if you leave this directory present in the plugin. 4. Copy the `contrib/docker` folder to your OpenStack controller somewhere. On my controller, I chose to put it into an existing `/var/lib/heat` directory. When you're done, you should have a `docker` directory in your chosen destination, and that directory should container another subdirectory named `docker`. For example, on my system, the full path to the plugin was `/var/lib/heat/docker/docker`. Make note of the full path. 5. In the top-level `docker` folder, run `pip install -r requirements.txt`. Note that you might need to do an `apt-get install python-pip` first. This will install the docker-py Python module, which is required by the Docker plugin. 6. Modify your Heat configuration file (typically found at `/etc/heat/heat.conf`) and add the full path of the Docker plugin to the `plugin_dirs` setting. If you used `/var/lib/heat` as the base directory for the plugin, then the full path should be `/var/lib/heat/docker/docker`. 7. Restart the Heat engine (via something like `sudo service heat-engine restart` or similar). 8. Run `heat resource-type-list` and verify that DockerInc::Docker::Container is listed in the results. If not, verify that you have the correct path to the plugin specified in the Heat configuration file, and verify that you used the correct branch of the Docker plugin ("stable/icehouse" if you are using packaged versions of OpenStack). Review the Heat log files for any errors if the resource type still isn't listed. Assuming you were successful, then you are ready to start deploying Docker containers via Heat. Stay tuned for an example Heat template that shows how to deploy a Docker container. Until then, feel free to share any corrections, clarifications, or questions in the comments below.
{ "content_hash": "efcb40100d10441d9b1c7a7e6f1014ce", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 513, "avg_line_length": 92.37837837837837, "alnum_prop": 0.7814511410181393, "repo_name": "lowescott/lowescott.github.io", "id": "39fe56d6ab3a62dd77f72089786f301721ca7ca6", "size": "3422", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2014-07-31-installing-the-docker-plugin-for-heat.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "133971" }, { "name": "HTML", "bytes": "10990" }, { "name": "Ruby", "bytes": "48" } ], "symlink_target": "" }
package jscompiler.jsobject; public class JsUndefined extends JsObjectBase { @Override public int getCode() { return UNDEFINED; } @Override public Object getRealValue() { return "undefined"; } }
{ "content_hash": "5eab8d3fca49cfc2f4c7a6c23a56ee2b", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 47, "avg_line_length": 13.933333333333334, "alnum_prop": 0.7272727272727273, "repo_name": "geecodemonkeys/jscompiler", "id": "fd58a9f735f5e1482debb1f65b76a685812cbb64", "size": "209", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/jscompiler/jsobject/JsUndefined.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "205531" }, { "name": "JavaScript", "bytes": "1640" } ], "symlink_target": "" }
using System; using NUnit.Framework; using OpenSim.Data.Tests; using log4net; using System.Reflection; using OpenSim.Tests.Common; namespace OpenSim.Data.NHibernate.Tests { [TestFixture, DatabaseTest] public class NHibernateMsSqlAssetTest : BasicAssetTest { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); public string file; public NHibernateManager database; [TestFixtureSetUp] public void Init() { SuperInit(); // If we manage to connect to the database with the user // and password above it is our test database, and run // these tests. If anything goes wrong, ignore these // tests. try { string connect = "MsSql2005Dialect;SqlClientDriver;Data Source=127.0.0.1;Network Library=DBMSSOCN;Initial Catalog=opensim-nunit;User ID=opensim-nunit;Password=opensim-nunit"; db = new NHibernateAssetData(); db.Initialise(connect); database = ((NHibernateAssetData)db).Manager; } catch (Exception e) { m_log.Error(e.ToString()); Assert.Ignore(); } } [TestFixtureTearDown] public void Cleanup() { if (db != null) { db.Dispose(); } if (database != null) { database.DropSchema(); } } } }
{ "content_hash": "7be8eb2da65f68a1df5939d662bc40f8", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 190, "avg_line_length": 28.375, "alnum_prop": 0.5500314663310258, "repo_name": "zekizeki/agentservice", "id": "89c956766d034967861f4b1dee480238f95476d7", "size": "3206", "binary": false, "copies": "1", "ref": "refs/heads/august27merge", "path": "OpenSim/Data/NHibernate/Tests/NHibernateMsSqlAssetTest.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C#", "bytes": "14779415" }, { "name": "JavaScript", "bytes": "556" }, { "name": "PHP", "bytes": "1640" }, { "name": "Perl", "bytes": "5068" }, { "name": "Python", "bytes": "5053" }, { "name": "Shell", "bytes": "2450" } ], "symlink_target": "" }
// // UIView+MRView.h // test // // Created by 刘入徵 on 2017/4/15. // Copyright © 2017年 Mix_Reality. All rights reserved. // #import <UIKit/UIKit.h> @interface UIView (MRView) /** 添加轻击手势 Add a tap gesture With UIView */ - (void)tapActionWithBlock:(void (^)(void))block; @property (nonatomic) CGFloat left; ///< Shortcut for frame.origin.x. @property (nonatomic) CGFloat top; ///< Shortcut for frame.origin.y @property (nonatomic) CGFloat right; ///< Shortcut for frame.origin.x + frame.size.width @property (nonatomic) CGFloat bottom; ///< Shortcut for frame.origin.y + frame.size.height @property (nonatomic) CGFloat width; ///< Shortcut for frame.size.width. @property (nonatomic) CGFloat height; ///< Shortcut for frame.size.height. @property (nonatomic) CGFloat centerX; ///< Shortcut for center.x @property (nonatomic) CGFloat centerY; ///< Shortcut for center.y @property (nonatomic) CGPoint origin; ///< Shortcut for frame.origin. @property (nonatomic) CGSize size; ///< Shortcut for frame.size. @end
{ "content_hash": "bab7b9555a8fbcef99740d85ad8d7c79", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 95, "avg_line_length": 35.86666666666667, "alnum_prop": 0.6756505576208178, "repo_name": "MrReality/MRCommon", "id": "6a25463b33817bf94f75b723e1d9928b5e8c60a1", "size": "1097", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "Example/ChangeView(切换栏)/ChangeView(切换栏)/BaseClass/MRCommon/MRCategory/UIView/UIView+MRView.h", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "470890" }, { "name": "Objective-C", "bytes": "3069408" }, { "name": "Ruby", "bytes": "589" }, { "name": "Swift", "bytes": "20091" } ], "symlink_target": "" }
======= Summary ======= One key element of systems administration that is often overlooked is that end users are the reason systems administrators exist. Don't go the BOFH route and terminate every user who causes an alert to go off. Work with users to understand what they're trying to accomplish and see how your environment can better assist them in achieving their goals. Meet your users needs by organizing your users into projects, applying policies, managing quotas, and working with them.
{ "content_hash": "2882f72bf6b0a0a686d73015dd7f4f9a", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 72, "avg_line_length": 45.27272727272727, "alnum_prop": 0.7911646586345381, "repo_name": "AlekhyaMallina-Vedams/openstack-manuals", "id": "8c142ca605add5a2dc0194ed14cdd78f35e5b900", "size": "498", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/ops-guide/source/ops_projects_users_summary.rst", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "26828" }, { "name": "CSS", "bytes": "121997" }, { "name": "HTML", "bytes": "111435" }, { "name": "JavaScript", "bytes": "25447" }, { "name": "Python", "bytes": "9775" } ], "symlink_target": "" }
// Copyright (c) 2009 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/views/notifications/balloon_view_host.h" #include "base/string_util.h" #include "chrome/browser/browser_list.h" #include "chrome/browser/notifications/balloon.h" #include "chrome/browser/notifications/notification.h" #include "chrome/browser/profile.h" #include "chrome/browser/renderer_host/render_view_host.h" #include "chrome/browser/renderer_host/render_widget_host_view.h" #if defined(OS_WIN) #include "chrome/browser/renderer_host/render_widget_host_view_win.h" #endif #include "chrome/browser/renderer_host/site_instance.h" #include "chrome/common/notification_service.h" #include "chrome/common/notification_type.h" #include "chrome/common/render_messages.h" #include "chrome/common/renderer_preferences.h" #include "views/widget/widget.h" #include "views/widget/widget_win.h" BalloonViewHost::BalloonViewHost(Balloon* balloon) : balloon_(balloon), site_instance_(SiteInstance::CreateSiteInstance(balloon->profile())), render_view_host_(NULL), should_notify_on_disconnect_(false), initialized_(false) { DCHECK(balloon_); } void BalloonViewHost::Shutdown() { if (render_view_host_) { render_view_host_->Shutdown(); render_view_host_ = NULL; } } WebPreferences BalloonViewHost::GetWebkitPrefs() { WebPreferences prefs; prefs.allow_scripts_to_close_windows = true; return prefs; } void BalloonViewHost::Close(RenderViewHost* render_view_host) { balloon_->CloseByScript(); } void BalloonViewHost::RenderViewCreated(RenderViewHost* render_view_host) { render_view_host->Send(new ViewMsg_EnablePreferredSizeChangedMode( render_view_host->routing_id())); } void BalloonViewHost::RendererReady(RenderViewHost* /* render_view_host */) { should_notify_on_disconnect_ = true; NotificationService::current()->Notify( NotificationType::NOTIFY_BALLOON_CONNECTED, Source<Balloon>(balloon_), NotificationService::NoDetails()); } void BalloonViewHost::RendererGone(RenderViewHost* /* render_view_host */) { if (!should_notify_on_disconnect_) return; should_notify_on_disconnect_ = false; NotificationService::current()->Notify( NotificationType::NOTIFY_BALLOON_DISCONNECTED, Source<Balloon>(balloon_), NotificationService::NoDetails()); } // RenderViewHostDelegate::View methods implemented to allow links to // open pages in new tabs. void BalloonViewHost::CreateNewWindow(int route_id) { delegate_view_helper_.CreateNewWindow( route_id, balloon_->profile(), site_instance_.get(), DOMUIFactory::GetDOMUIType(balloon_->notification().content_url()), NULL); } void BalloonViewHost::ShowCreatedWindow(int route_id, WindowOpenDisposition disposition, const gfx::Rect& initial_pos, bool user_gesture, const GURL& creator_url) { // Don't allow pop-ups from notifications. if (disposition == NEW_POPUP) return; TabContents* contents = delegate_view_helper_.GetCreatedWindow(route_id); if (contents) { Browser* browser = BrowserList::GetLastActive(); browser->AddTabContents(contents, disposition, initial_pos, user_gesture); } } void BalloonViewHost::UpdatePreferredSize(const gfx::Size& new_size) { balloon_->SetContentPreferredSize(new_size); } void BalloonViewHost::Init(gfx::NativeView parent_hwnd) { DCHECK(!render_view_host_) << "BalloonViewHost already initialized."; RenderViewHost* rvh = new RenderViewHost(site_instance_.get(), this, MSG_ROUTING_NONE); render_view_host_ = rvh; // Pointer is owned by the RVH. RenderWidgetHostView* view = RenderWidgetHostView::CreateViewForWidget(rvh); rvh->set_view(view); // TODO(johnnyg): http://crbug.com/23954. Need a cross-platform solution. #if defined(OS_WIN) RenderWidgetHostViewWin* view_win = static_cast<RenderWidgetHostViewWin*>(view); // Create the HWND. HWND hwnd = view_win->Create(parent_hwnd); view_win->ShowWindow(SW_SHOW); Attach(hwnd); #else NOTIMPLEMENTED(); #endif // Start up the renderer and point it at the balloon contents URL. rvh->CreateRenderView(GetProfile()->GetRequestContext()); rvh->NavigateToURL(balloon_->notification().content_url()); initialized_ = true; } void BalloonViewHost::ViewHierarchyChanged(bool is_add, views::View* parent, views::View* child) { NativeViewHost::ViewHierarchyChanged(is_add, parent, child); if (is_add && GetWidget() && !initialized_) Init(GetWidget()->GetNativeView()); }
{ "content_hash": "34169cd434969c7095d6794fab8fd85d", "timestamp": "", "source": "github", "line_count": 135, "max_line_length": 80, "avg_line_length": 35.629629629629626, "alnum_prop": 0.7014553014553014, "repo_name": "rwatson/chromium-capsicum", "id": "e825399939a227a99bd0da9825218560baa7dd28", "size": "4810", "binary": false, "copies": "1", "ref": "refs/heads/chromium-capsicum", "path": "chrome/browser/views/notifications/balloon_view_host.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
using System; using System.Net.Security; using System.Net.Sockets; using System.Security.Authentication; using System.Security.Cryptography.X509Certificates; namespace Thrift.Transport { /// <summary> /// SSL Socket Wrapper class /// </summary> public class TTLSSocket : TStreamTransport { /// <summary> /// Internal TCP Client /// </summary> private TcpClient client; /// <summary> /// The host /// </summary> private string host; /// <summary> /// The port /// </summary> private int port; /// <summary> /// The timeout for the connection /// </summary> private int timeout; /// <summary> /// Internal SSL Stream for IO /// </summary> private SslStream secureStream; /// <summary> /// Defines wheter or not this socket is a server socket<br/> /// This is used for the TLS-authentication /// </summary> private bool isServer; /// <summary> /// The certificate /// </summary> private X509Certificate certificate; /// <summary> /// User defined certificate validator. /// </summary> private RemoteCertificateValidationCallback certValidator; /// <summary> /// The function to determine which certificate to use. /// </summary> private LocalCertificateSelectionCallback localCertificateSelectionCallback; /// <summary> /// The SslProtocols value that represents the protocol used for authentication.SSL protocols to be used. /// </summary> private readonly SslProtocols sslProtocols; /// <summary> /// Initializes a new instance of the <see cref="TTLSSocket"/> class. /// </summary> /// <param name="client">An already created TCP-client</param> /// <param name="certificate">The certificate.</param> /// <param name="isServer">if set to <c>true</c> [is server].</param> /// <param name="certValidator">User defined cert validator.</param> /// <param name="localCertificateSelectionCallback">The callback to select which certificate to use.</param> /// <param name="sslProtocols">The SslProtocols value that represents the protocol used for authentication.</param> public TTLSSocket( TcpClient client, X509Certificate certificate, bool isServer = false, RemoteCertificateValidationCallback certValidator = null, LocalCertificateSelectionCallback localCertificateSelectionCallback = null, // TODO: Enable Tls11 and Tls12 (TLS 1.1 and 1.2) by default once we start using .NET 4.5+. SslProtocols sslProtocols = SslProtocols.Tls) { this.client = client; this.certificate = certificate; this.certValidator = certValidator; this.localCertificateSelectionCallback = localCertificateSelectionCallback; this.sslProtocols = sslProtocols; this.isServer = isServer; if (isServer && certificate == null) { throw new ArgumentException("TTLSSocket needs certificate to be used for server", "certificate"); } if (IsOpen) { base.inputStream = client.GetStream(); base.outputStream = client.GetStream(); } } /// <summary> /// Initializes a new instance of the <see cref="TTLSSocket"/> class. /// </summary> /// <param name="host">The host, where the socket should connect to.</param> /// <param name="port">The port.</param> /// <param name="certificatePath">The certificate path.</param> /// <param name="certValidator">User defined cert validator.</param> /// <param name="localCertificateSelectionCallback">The callback to select which certificate to use.</param> /// <param name="sslProtocols">The SslProtocols value that represents the protocol used for authentication.</param> public TTLSSocket( string host, int port, string certificatePath, RemoteCertificateValidationCallback certValidator = null, LocalCertificateSelectionCallback localCertificateSelectionCallback = null, SslProtocols sslProtocols = SslProtocols.Tls) : this(host, port, 0, X509Certificate.CreateFromCertFile(certificatePath), certValidator, localCertificateSelectionCallback, sslProtocols) { } /// <summary> /// Initializes a new instance of the <see cref="TTLSSocket"/> class. /// </summary> /// <param name="host">The host, where the socket should connect to.</param> /// <param name="port">The port.</param> /// <param name="certificate">The certificate.</param> /// <param name="certValidator">User defined cert validator.</param> /// <param name="localCertificateSelectionCallback">The callback to select which certificate to use.</param> /// <param name="sslProtocols">The SslProtocols value that represents the protocol used for authentication.</param> public TTLSSocket( string host, int port, X509Certificate certificate = null, RemoteCertificateValidationCallback certValidator = null, LocalCertificateSelectionCallback localCertificateSelectionCallback = null, SslProtocols sslProtocols = SslProtocols.Tls) : this(host, port, 0, certificate, certValidator, localCertificateSelectionCallback, sslProtocols) { } /// <summary> /// Initializes a new instance of the <see cref="TTLSSocket"/> class. /// </summary> /// <param name="host">The host, where the socket should connect to.</param> /// <param name="port">The port.</param> /// <param name="timeout">The timeout.</param> /// <param name="certificate">The certificate.</param> /// <param name="certValidator">User defined cert validator.</param> /// <param name="localCertificateSelectionCallback">The callback to select which certificate to use.</param> /// <param name="sslProtocols">The SslProtocols value that represents the protocol used for authentication.</param> public TTLSSocket( string host, int port, int timeout, X509Certificate certificate, RemoteCertificateValidationCallback certValidator = null, LocalCertificateSelectionCallback localCertificateSelectionCallback = null, SslProtocols sslProtocols = SslProtocols.Tls) { this.host = host; this.port = port; this.timeout = timeout; this.certificate = certificate; this.certValidator = certValidator; this.localCertificateSelectionCallback = localCertificateSelectionCallback; this.sslProtocols = sslProtocols; InitSocket(); } /// <summary> /// Creates the TcpClient and sets the timeouts /// </summary> private void InitSocket() { client = TSocketVersionizer.CreateTcpClient(); client.ReceiveTimeout = client.SendTimeout = timeout; client.Client.NoDelay = true; } /// <summary> /// Sets Send / Recv Timeout for IO /// </summary> public int Timeout { set { this.client.ReceiveTimeout = this.client.SendTimeout = this.timeout = value; } } /// <summary> /// Gets the TCP client. /// </summary> public TcpClient TcpClient { get { return client; } } /// <summary> /// Gets the host. /// </summary> public string Host { get { return host; } } /// <summary> /// Gets the port. /// </summary> public int Port { get { return port; } } /// <summary> /// Gets a value indicating whether TCP Client is Cpen /// </summary> public override bool IsOpen { get { if (this.client == null) { return false; } return this.client.Connected; } } /// <summary> /// Validates the certificates!<br/> /// </summary> /// <param name="sender">The sender-object.</param> /// <param name="certificate">The used certificate.</param> /// <param name="chain">The certificate chain.</param> /// <param name="sslValidationErrors">An enum, which lists all the errors from the .NET certificate check.</param> /// <returns></returns> private bool DefaultCertificateValidator(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors sslValidationErrors) { return (sslValidationErrors == SslPolicyErrors.None); } /// <summary> /// Connects to the host and starts the routine, which sets up the TLS /// </summary> public override void Open() { if (IsOpen) { throw new TTransportException(TTransportException.ExceptionType.AlreadyOpen, "Socket already connected"); } if (string.IsNullOrEmpty(host)) { throw new TTransportException(TTransportException.ExceptionType.NotOpen, "Cannot open null host"); } if (port <= 0) { throw new TTransportException(TTransportException.ExceptionType.NotOpen, "Cannot open without port"); } if (client == null) { InitSocket(); } if (timeout == 0) // no timeout -> infinite { client.Connect(host, port); } else // we have a timeout -> use it { ConnectHelper hlp = new ConnectHelper(client); IAsyncResult asyncres = client.BeginConnect(host, port, new AsyncCallback(ConnectCallback), hlp); bool bConnected = asyncres.AsyncWaitHandle.WaitOne(timeout) && client.Connected; if (!bConnected) { lock (hlp.Mutex) { if (hlp.CallbackDone) { asyncres.AsyncWaitHandle.Close(); client.Close(); } else { hlp.DoCleanup = true; client = null; } } throw new TTransportException(TTransportException.ExceptionType.TimedOut, "Connect timed out"); } } setupTLS(); } /// <summary> /// Creates a TLS-stream and lays it over the existing socket /// </summary> public void setupTLS() { RemoteCertificateValidationCallback validator = this.certValidator ?? DefaultCertificateValidator; if (this.localCertificateSelectionCallback != null) { this.secureStream = new SslStream( this.client.GetStream(), false, validator, this.localCertificateSelectionCallback ); } else { this.secureStream = new SslStream( this.client.GetStream(), false, validator ); } try { if (isServer) { // Server authentication this.secureStream.AuthenticateAsServer(this.certificate, this.certValidator != null, sslProtocols, true); } else { // Client authentication X509CertificateCollection certs = certificate != null ? new X509CertificateCollection { certificate } : new X509CertificateCollection(); this.secureStream.AuthenticateAsClient(host, certs, sslProtocols, true); } } catch (Exception) { this.Close(); throw; } inputStream = this.secureStream; outputStream = this.secureStream; } static void ConnectCallback(IAsyncResult asyncres) { ConnectHelper hlp = asyncres.AsyncState as ConnectHelper; lock (hlp.Mutex) { hlp.CallbackDone = true; try { if (hlp.Client.Client != null) hlp.Client.EndConnect(asyncres); } catch (Exception) { // catch that away } if (hlp.DoCleanup) { try { asyncres.AsyncWaitHandle.Close(); } catch (Exception) { } try { if (hlp.Client is IDisposable) ((IDisposable)hlp.Client).Dispose(); } catch (Exception) { } hlp.Client = null; } } } private class ConnectHelper { public object Mutex = new object(); public bool DoCleanup = false; public bool CallbackDone = false; public TcpClient Client; public ConnectHelper(TcpClient client) { Client = client; } } /// <summary> /// Closes the SSL Socket /// </summary> public override void Close() { base.Close(); if (this.client != null) { this.client.Close(); this.client = null; } if (this.secureStream != null) { this.secureStream.Close(); this.secureStream = null; } } } }
{ "content_hash": "36203fa2b420b2dfb660bdd3213a52c2", "timestamp": "", "source": "github", "line_count": 428, "max_line_length": 156, "avg_line_length": 34.850467289719624, "alnum_prop": 0.5234647358541163, "repo_name": "gadLinux/thrift", "id": "06286dc8b502580f6b73962b88f42f6a12ad0a10", "size": "15720", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "lib/csharp/src/Transport/TTLSSocket.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "890" }, { "name": "ActionScript", "bytes": "75794" }, { "name": "Batchfile", "bytes": "53982" }, { "name": "C", "bytes": "909705" }, { "name": "C#", "bytes": "1263488" }, { "name": "C++", "bytes": "4827482" }, { "name": "CMake", "bytes": "130113" }, { "name": "CSS", "bytes": "1070" }, { "name": "Common Lisp", "bytes": "39679" }, { "name": "D", "bytes": "649593" }, { "name": "Dart", "bytes": "181338" }, { "name": "Dockerfile", "bytes": "57264" }, { "name": "Emacs Lisp", "bytes": "5361" }, { "name": "Erlang", "bytes": "322716" }, { "name": "Go", "bytes": "479435" }, { "name": "HTML", "bytes": "39395" }, { "name": "Haskell", "bytes": "141452" }, { "name": "Haxe", "bytes": "311348" }, { "name": "Java", "bytes": "1013199" }, { "name": "JavaScript", "bytes": "440028" }, { "name": "Lex", "bytes": "10881" }, { "name": "Lua", "bytes": "81257" }, { "name": "M4", "bytes": "170643" }, { "name": "Makefile", "bytes": "216894" }, { "name": "OCaml", "bytes": "39269" }, { "name": "PHP", "bytes": "351752" }, { "name": "Pascal", "bytes": "462629" }, { "name": "Perl", "bytes": "132516" }, { "name": "Python", "bytes": "466118" }, { "name": "Ruby", "bytes": "414945" }, { "name": "Rust", "bytes": "328307" }, { "name": "Shell", "bytes": "59140" }, { "name": "Smalltalk", "bytes": "22944" }, { "name": "Swift", "bytes": "143590" }, { "name": "Thrift", "bytes": "394052" }, { "name": "TypeScript", "bytes": "61760" }, { "name": "Vim script", "bytes": "2846" }, { "name": "Yacc", "bytes": "27391" } ], "symlink_target": "" }
package d.money.common.utils; import javax.servlet.http.HttpServletRequest; public class WebUtils { /** * * 方法名: getBasePath * 方法描述:获得html标签<base href>的base_path值 * 参数: @return * 返回值: String * @exception * @since 1.0.0 */ public static String getBasePath(HttpServletRequest request) { String scheme=request.getScheme()+"://"; String requestURL=request.getRequestURL().toString(); requestURL=requestURL.substring(scheme.length()); int last=requestURL.indexOf("/"); if(last==-1){ last=requestURL.length(); } requestURL=requestURL.substring(0,last); String basePath=scheme+requestURL+request.getContextPath()+"/"; return basePath; } /** * 取出比较地址中的参数 * @param url * @return */ public static String removeParm(String url){ int flag = url.indexOf("?"); if(flag==-1){ return url; } return url.substring(0,flag); } }
{ "content_hash": "18501f792b6e8762f590c66298586a0a", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 65, "avg_line_length": 21.58139534883721, "alnum_prop": 0.6411637931034483, "repo_name": "yandong3389/money2", "id": "fd4eaf032f301ca818a4494fe11a91824084bfd0", "size": "990", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/d/money/common/utils/WebUtils.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "74957" }, { "name": "HTML", "bytes": "31682" }, { "name": "Java", "bytes": "532105" }, { "name": "JavaScript", "bytes": "152152" } ], "symlink_target": "" }
<?php defined('BASEPATH') OR exit('No direct script access allowed'); ?> <div style="border:1px solid #990000;padding-left:20px;margin:0 0 10px 0;"> <h4>An uncaught Exception was encountered</h4> <p>Type: <?php echo get_class($exception); ?></p> <p>Message: <?php echo $message; ?></p> <p>Filename: <?php echo $exception->getFile(); ?></p> <p>Line Number: <?php echo $exception->getLine(); ?></p> <?php if (defined('SHOW_DEBUG_BACKTRACE') && SHOW_DEBUG_BACKTRACE === true) : ?> <p>Backtrace:</p> <?php foreach ($exception->getTrace() as $error): ?> <?php if (isset($error['file']) && strpos($error['file'], realpath(BASEPATH)) !== 0) : ?> <p style="margin-left:10px"> File: <?php echo $error['file']; ?><br /> Line: <?php echo $error['line']; ?><br /> Function: <?php echo $error['function']; ?> </p> <?php endif ?> <?php endforeach ?> <?php endif ?> </div>
{ "content_hash": "3812efa31c3248651b16f36bca227c99", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 93, "avg_line_length": 29.59375, "alnum_prop": 0.5681098204857444, "repo_name": "yassu/PomoBoard", "id": "9d9e9fa4e309ec9cca20bf1b0c8aab29b3530a69", "size": "947", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "codeigniter/application/views/errors/html/error_exception.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "12222" }, { "name": "CSS", "bytes": "127" }, { "name": "HTML", "bytes": "5633" }, { "name": "PHP", "bytes": "1817871" }, { "name": "Shell", "bytes": "2711" } ], "symlink_target": "" }
<html> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width,initial-scale=1.0" /> <!--[if IE]><meta http-equiv='X-UA-Compatible' content='IE=edge,chrome=1'><![endif]--> <title>Tah Docs</title> <!-- css --> <link type="text/css" rel="stylesheet" href="http://fast.fonts.net/cssapi/a6555b0e-9541-40d2-ab30-147c200fd4db.css"/> <link rel="stylesheet" href="/css/application.min.css"> <link rel="stylesheet" href="/css/menubar.css"> <link rel="stylesheet" href="/css/blogpagecss.css"> <!-- js --> <script src="/js/application.min.js"></script> <!-- Mobile Specific Metas ================================================== --> </head> <body> <header> <div class="topbar"> <div class="topbar-innerdiv"> <div class="col-lg-5 col-md-5 col-sm-7 col-xs-7"> <ul class="topbar-socialicons"> <li><a href="https://www.facebook.com/pages/Tah/633568926721880" target="_blank"><i class="fa fa-facebook"></i></a></li> <li><a href="https://twitter.com/tah_io" target="_blank"><i class="fa fa-twitter"></i></a></li> <li><a href="https://instagram.com/tah-io" target="_blank"><i class="fa fa-instagram"></i></a></li> <li><a href="https://plus.google.com/+TahIoble/posts" target="_blank"><i class="fa fa-google-plus"></i></a></li> <li><a href="https://www.youtube.com/channel/UCY7RNCi0S8jQDE3BZQw5Gwg" target="_blank"><i class="fa fa-youtube"></i></a></li> <li><a href="https://github.com/tah-io" target="_blank"><i class="fa fa-github"></i></a></li> </ul> </div> <!-- <div class="col-sm-3 col-md-3 col-sm-5 col-sm-5 pull-right searchbox"> <form id="frmSearch" class="search2" method="get" /> <input id="keywords" size="24" placeholder="Search..."> </div> --> </div> </div> <div class="logo_div"> <div class="logo_inner_div"> <div class="col-lg-12 col-md-12 col-sm-12 col-xs-12"> <div class="col-xs-6 col-sm-7 col-md-4 col-lg-4 "> <img src="/img/header-tah-logo-for-doc-page.png" class="header-logo"/><span class="header-text">Tah Docs</span> </div> <div class="col-xs-4 col-sm-5 col-md-8 col-lg-8"> <a href="https://www.crowdsupply.com/revealing-hour/tah-open-ble-arduino-board" target="_blank"> <img src="/img/buy-now.png" class="cta-buy-now"/> </a> </div> <div id="smallNav" class="col-xs-2 col-sm-0 col-md-0 col-lg-0"> <div id="nav-trigger"> <span class="">&#8801;</span> </div> </div> </div> </div> </div> <div class="menu_div_one"> <div class="menu_innerdiv_one"> <ul class="menu_list_one"> <li><a href="/index.html">Home</a></li> <li><a href="http://tah.io/start">Getting Started</a></li> <li><a href="/tutorials">Tutorials</a></li> <li><a href="/library">Library</a></li> <li><a href="/showcase">Showcase</a></li> <li><a href="/contributing">Contributing</a></li> <li><a href="/apps">Apps</a></li> <li><a href="http://blog.tah.io">Blog</a></li> <li><a href="http://discuss.tah.io">Forum</a></li> </ul> </div> </div> <nav id="nav-mobile"> <ul class="" style="display: none;"> <li><a href="/index.html">Home</a></li> <li><a href="http://tah.io/start">Getting Started</a></li> <li><a href="/tutorials">Tutorials</a></li> <li><a href="/library">Library</a></li> <li><a href="/showcase">Showcase</a></li> <li><a href="/contributing">Contributing</a></li> <li><a href="/apps">Apps</a></li> <li><a href="http://blog.tah.io">Blog</a></li> <li><a href="http://discuss.tah.io">Forum</a></li> </ul> </nav> </header> <div class="container containerdocs"> <div class="text-docs"> <h1 class="page_heading">Contributing</h1> <p>The source code for all the Tah smartphone apps is open source and is hosted on GitHub. If you are looking to contribute to our efforts of making Tah an awesome platform for building amazing things using Bluetooth, join us !</p> <h2 class="docshead2">Apps</h2> <p>Each app has been developed for iOS and Android, though some are still in the works. We have a git repo on GitHub for each app, with the iOS and Android source codes as git submodules.</p> <blockquote><p>Submodules allow you to keep a Git repository as a subdirectory of another repository</p></blockquote> <p>So for example, you'll find the source code for the Tah-iOS and the Tah-Android apps inside a repository called Tah.</p> <h2 class="docshead2">Sketches</h2> <p>You will notice that the above repository also has a folder called ArdSCL. This consists of the sketch that will be uploaded on the Tah when using the Tah app (iOS or Android).</p> <h2 class="docshead2">Begin Contributing</h2> <p>To contribute begin by forking the repo that you want to contribute to.</p> <ul> <li>Click on the repository that you want to contribute to : <img src="/img/Screen%20Shot%202015-02-05%20at%2014.11.30.png" alt="" /> </li> <li>Fork the repository, I'll be contributing to the Tah-iOS app in this example : <img src="/img/Screen%20Shot%202015-02-05%20at%2014.15.22.png" alt="" /> </li> <li><p>A copy of this repository will be created in your account. You can now make changes to it. <img src="/img/Screen%20Shot%202015-02-05%20at%2014.16.18.png" alt="" /></p> </li> <li><p>Clone this repo and you can make changes to the source code.</p></li> <li>Once you have made the changes, you can send a pull request. Click the marked button on your repo : <img src="/img/Screen%20Shot%202015-02-05%20at%2014.16.18%202.png" alt="" /> </li> </ul> </div> </div> <footer> <div class="bottombar"> <div class="bottombar-innerdiv"> <div class="col-lg-12 col-md-12 col-sm-12 col-xs-12 footnote"> &copy; Revealing Hour Creations. All rights reserved.<br/> The text of the Tah reference is licensed under a <a href="http://creativecommons.org/licenses/by-sa/3.0/">Creative Commons Attribution-ShareAlike 3.0 License</a>.<br/>Code samples in the reference are released into the public domain.<br/><a href="mailto:help@tah.io">help@tah.io</a> </div> </div> </div> </footer> <div id="totopscroller"></div> <script> $(function(){ $('#totopscroller').totopscroller({link:'http://www.jqueryscript.net'}); }) </script> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-36872186-2', 'auto'); ga('require', 'displayfeatures'); ga('send', 'pageview'); </script> <!-- search box --> <!-- <script type="text/javascript"> var myHilitor; document.addEventListener("DOMContentLoaded", function() { myHilitor = new Hilitor2("playground"); myHilitor.setMatchType("left"); }, false); document.getElementById("keywords").addEventListener("keyup", function() { myHilitor.apply(this.value); }, false); </script> --> </body> </html>
{ "content_hash": "67d2dc838f36a29ca2a2ac28ad5147ef", "timestamp": "", "source": "github", "line_count": 164, "max_line_length": 293, "avg_line_length": 46.48170731707317, "alnum_prop": 0.592548865276138, "repo_name": "tah-io/tah-docs.github.io", "id": "f42f941e293a01eea9db8ec456411568fed9eaa5", "size": "7623", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "contributing/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16865" }, { "name": "HTML", "bytes": "389610" }, { "name": "JavaScript", "bytes": "27242" }, { "name": "Ruby", "bytes": "130" } ], "symlink_target": "" }
@protocol MediaSelectionDelegate <NSObject> - (void)mediaWasSelected:(Media *)media; @end
{ "content_hash": "d7b6160c2a0af9f2d67313dc7276ab45", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 43, "avg_line_length": 18.4, "alnum_prop": 0.7717391304347826, "repo_name": "rahulbhirud/cast-ios-demo-player", "id": "8e2e514e8e704276fd572ee9bf87886714420dce", "size": "752", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DemoCastPlayer/MediaSelectionDelegate.h", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.wills.help.assist.adapter; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.TextView; import com.wills.help.R; import com.wills.help.db.bean.OrderTypeInfo; import java.util.List; /** * com.wills.help.assist.adapter * Created by lizhaoyong * 2017/3/9. */ public class OrderTypeListAdapter extends BaseAdapter{ private Context context; private ViewHolder viewHolder; private List<OrderTypeInfo> list; public OrderTypeListAdapter(Context context, List<OrderTypeInfo> list) { this.context = context; this.list = list; } @Override public int getCount() { return list.size(); } @Override public Object getItem(int position) { return list.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { viewHolder = new ViewHolder(); if (convertView == null) { convertView = LayoutInflater.from(context).inflate(R.layout.activity_select_item, null); } viewHolder.textView = (TextView) convertView.findViewById(R.id.tv_name); viewHolder.textView.setText(list.get(position).getOrdertype()); return convertView; } private class ViewHolder { private TextView textView; } }
{ "content_hash": "e4407c2e097269cd0a3fb23d9f925936", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 100, "avg_line_length": 23.984126984126984, "alnum_prop": 0.6843150231634679, "repo_name": "Lee-Wills/wills", "id": "592e3876a792e3b6d77970ed5eb889d1f0a358cc", "size": "1511", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bang/app/src/main/java/com/wills/help/assist/adapter/OrderTypeListAdapter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "1751698" } ], "symlink_target": "" }
package com.amazonaws.services.directconnect.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.directconnect.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * AssociateVirtualInterfaceRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class AssociateVirtualInterfaceRequestProtocolMarshaller implements Marshaller<Request<AssociateVirtualInterfaceRequest>, AssociateVirtualInterfaceRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("OvertureService.AssociateVirtualInterface").serviceName("AmazonDirectConnect").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public AssociateVirtualInterfaceRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<AssociateVirtualInterfaceRequest> marshall(AssociateVirtualInterfaceRequest associateVirtualInterfaceRequest) { if (associateVirtualInterfaceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<AssociateVirtualInterfaceRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller( SDK_OPERATION_BINDING, associateVirtualInterfaceRequest); protocolMarshaller.startMarshalling(); AssociateVirtualInterfaceRequestMarshaller.getInstance().marshall(associateVirtualInterfaceRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
{ "content_hash": "588ffead8e77f50919b7758dd1a9d017", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 140, "avg_line_length": 43.056603773584904, "alnum_prop": 0.7765118317265557, "repo_name": "aws/aws-sdk-java", "id": "27720d9fc1f673f4dab88028e3bea51139eb509c", "size": "2862", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-directconnect/src/main/java/com/amazonaws/services/directconnect/model/transform/AssociateVirtualInterfaceRequestProtocolMarshaller.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
module Rickshaw module Builders autoload :Graph, 'rickshaw/builders/graph' end end
{ "content_hash": "9175c0f9984fcdd25fbe98fd388e117b", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 46, "avg_line_length": 13.428571428571429, "alnum_prop": 0.723404255319149, "repo_name": "minmb/rickshaw", "id": "7902520803f86b9253b173a99a7b00e0cb93909c", "size": "94", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/rickshaw/builders.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "2053" } ], "symlink_target": "" }
usage="Usage: coordinator.sh (start|stop|status)" if [ $# -lt 1 ]; then echo $usage exit 1 fi cd $(dirname $0)/../ sh ./bin/node.sh coordinator $1
{ "content_hash": "7e81d7531107b80acf2b0c026bafe30b", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 49, "avg_line_length": 17, "alnum_prop": 0.6274509803921569, "repo_name": "solimant/druid", "id": "d7425b1bebe017563cba94103f2284aca95f7d09", "size": "170", "binary": false, "copies": "14", "ref": "refs/heads/master", "path": "examples/bin/coordinator.sh", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "1406" }, { "name": "CSS", "bytes": "11623" }, { "name": "HTML", "bytes": "26739" }, { "name": "Java", "bytes": "16545418" }, { "name": "JavaScript", "bytes": "295150" }, { "name": "Makefile", "bytes": "659" }, { "name": "PostScript", "bytes": "5" }, { "name": "Protocol Buffer", "bytes": "729" }, { "name": "R", "bytes": "17002" }, { "name": "Roff", "bytes": "3617" }, { "name": "Shell", "bytes": "4892" }, { "name": "TeX", "bytes": "399444" }, { "name": "Thrift", "bytes": "199" } ], "symlink_target": "" }
/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* Copyright (C) 2012 Peter Caspers This file is part of QuantLib, a free-software/open-source library for financial quantitative analysts and developers - http://quantlib.org/ QuantLib is free software: you can redistribute it and/or modify it under the terms of the QuantLib license. You should have received a copy of the license along with this program; if not, please email <quantlib-dev@lists.sf.net>. The license is also available online at <http://quantlib.org/license.shtml>. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the license for more details. */ /*! \file fdmtimedepdirichletboundary.hpp \brief time dependent Dirichlet boundary conditions */ #ifndef quantlib_fdm_time_dep_dirichlet_boundary_hpp #define quantlib_fdm_time_dep_dirichlet_boundary_hpp #include <ql/methods/finitedifferences/boundarycondition.hpp> #include <ql/methods/finitedifferences/operators/fdmlinearop.hpp> #include <boost/function.hpp> namespace QuantLib { class FdmMesher; class FdmLinearOpLayout; class FdmTimeDepDirichletBoundary : public BoundaryCondition<FdmLinearOp> { public: // types and enumerations typedef FdmLinearOp operator_type; typedef FdmLinearOp::array_type array_type; typedef BoundaryCondition<FdmLinearOp>::Side Side; FdmTimeDepDirichletBoundary( const boost::shared_ptr<FdmMesher>& mesher, const boost::function<Real (Real)>& valueOnBoundary, Size direction, Side side); FdmTimeDepDirichletBoundary( const boost::shared_ptr<FdmMesher>& mesher, const boost::function<Disposable<Array> (Real)>& valueOnBoundary, Size direction, Side side); void setTime(Time); void applyBeforeApplying(operator_type&) const {} void applyBeforeSolving(operator_type&, array_type&) const {} void applyAfterApplying(array_type&) const; void applyAfterSolving(array_type&) const; private: const std::vector<Size> indices_; const boost::function<Real (Real)> valueOnBoundary_; const boost::function<Disposable<Array>(Real)> valuesOnBoundary_; Array values_; }; } #endif
{ "content_hash": "577676f1896b59246f4e7fbb4c2b8a35", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 79, "avg_line_length": 34.714285714285715, "alnum_prop": 0.7111111111111111, "repo_name": "applehackfoxus/Quantum-Trading", "id": "910018cd61d14f006670cc9b31a55edf4036de45", "size": "2430", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "QuantLib-1.4/ql/methods/finitedifferences/utilities/fdmtimedepdirichletboundary.hpp", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
using System; using System.Threading; using System.Threading.Tasks; using Azure; using Azure.Core; using Azure.Core.Pipeline; namespace Azure.ResourceManager.EventHubs { /// <summary> Deletes an existing namespace. This operation also removes all associated resources under the namespace. </summary> public partial class NamespacesDeleteOperation : Operation<Response>, IOperationSource<Response> { private readonly ArmOperationHelpers<Response> _operation; internal NamespacesDeleteOperation(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, Request request, Response response) { _operation = new ArmOperationHelpers<Response>(this, clientDiagnostics, pipeline, request, response, OperationFinalStateVia.Location, "NamespacesDeleteOperation"); } /// <inheritdoc /> public override string Id => _operation.Id; /// <inheritdoc /> public override Response Value => _operation.Value; /// <inheritdoc /> public override bool HasCompleted => _operation.HasCompleted; /// <inheritdoc /> public override bool HasValue => _operation.HasValue; /// <inheritdoc /> public override Response GetRawResponse() => _operation.GetRawResponse(); /// <inheritdoc /> public override Response UpdateStatus(CancellationToken cancellationToken = default) => _operation.UpdateStatus(cancellationToken); /// <inheritdoc /> public override ValueTask<Response> UpdateStatusAsync(CancellationToken cancellationToken = default) => _operation.UpdateStatusAsync(cancellationToken); /// <inheritdoc /> public override ValueTask<Response<Response>> WaitForCompletionAsync(CancellationToken cancellationToken = default) => _operation.WaitForCompletionAsync(cancellationToken); /// <inheritdoc /> public override ValueTask<Response<Response>> WaitForCompletionAsync(TimeSpan pollingInterval, CancellationToken cancellationToken = default) => _operation.WaitForCompletionAsync(pollingInterval, cancellationToken); Response IOperationSource<Response>.CreateResult(Response response, CancellationToken cancellationToken) { return response; } async ValueTask<Response> IOperationSource<Response>.CreateResultAsync(Response response, CancellationToken cancellationToken) { return await new ValueTask<Response>(response).ConfigureAwait(false); } } }
{ "content_hash": "7d769a132c75ac7f33f4525f9b40e9ba", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 223, "avg_line_length": 45.56363636363636, "alnum_prop": 0.7230646448523543, "repo_name": "yugangw-msft/azure-sdk-for-net", "id": "bfef5d448b026ed92e1d75359705a4f69ce24825", "size": "2644", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "sdk/eventhub/Azure.ResourceManager.EventHubs/src/Generated/NamespacesDeleteOperation.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "118" }, { "name": "Batchfile", "bytes": "817" }, { "name": "C#", "bytes": "55680650" }, { "name": "CSS", "bytes": "685" }, { "name": "Cucumber", "bytes": "89597" }, { "name": "JavaScript", "bytes": "1719" }, { "name": "PowerShell", "bytes": "24329" }, { "name": "Shell", "bytes": "45" } ], "symlink_target": "" }
import { NgModule } from '@angular/core'; import { RouterModule } from '@angular/router'; import { PlacesRootComponent } from './components/Places.root.component'; import { PlacesComponent } from './components/Places.component'; import { PlacesCreateComponent } from './components/Places.create.component'; import { PlacesEditComponent } from './components/Places.edit.component'; @NgModule({ imports: [ RouterModule.forChild([ { path: 'places', component: PlacesRootComponent, children: [ { path: '', component: PlacesComponent }, { path: 'create', component: PlacesCreateComponent }, { path: 'edit/:id', component: PlacesEditComponent } ] } ]) ], exports: [ RouterModule ] }) export class PlacesRoutesModule { }
{ "content_hash": "f492918cee0a870eff044336e0bd01c8", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 77, "avg_line_length": 35.03703703703704, "alnum_prop": 0.5570824524312896, "repo_name": "ibrahimsaqr/blook2", "id": "6bcac8d37b5b1465b42bb2d6b12f31e38648a8c6", "size": "946", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/app/PlacesModule/Places.routes.module.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "9373" }, { "name": "HTML", "bytes": "10298" }, { "name": "JavaScript", "bytes": "201441" }, { "name": "TypeScript", "bytes": "28345" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in P. E. Boissier, Diagn. pl. orient. ser. 1, 9:18. 1849 #### Original name null ### Remarks null
{ "content_hash": "09571607eeaf661eec7fd0d4c97a6287", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 53, "avg_line_length": 14, "alnum_prop": 0.6703296703296703, "repo_name": "mdoering/backbone", "id": "ed6a518fd9fe90d55b1b91ea3a0339a0fc7176d1", "size": "242", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Medicago/Medicago heldreichii/ Syn. Trigonella polycarpa/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
// # Task automation for Ghost // // Run various tasks when developing for and working with Ghost. // // **Usage instructions:** can be found in the [Custom Tasks](#custom%20tasks) section or by running `grunt --help`. // // **Debug tip:** If you have any problems with any Grunt tasks, try running them with the `--verbose` command require('./core/server/overrides'); var config = require('./core/server/config'), urlService = require('./core/server/services/url'), _ = require('lodash'), chalk = require('chalk'), fs = require('fs-extra'), KnexMigrator = require('knex-migrator'), knexMigrator = new KnexMigrator({ knexMigratorFilePath: config.get('paths:appRoot') }), path = require('path'), escapeChar = process.platform.match(/^win/) ? '^' : '\\', cwd = process.cwd().replace(/( |\(|\))/g, escapeChar + '$1'), buildDirectory = path.resolve(cwd, '.build'), distDirectory = path.resolve(cwd, '.dist'), hasBuiltClient = false, logBuildingClient = function (grunt) { if (!hasBuiltClient) { grunt.log.writeln('Building admin client... (can take ~1min)'); setTimeout(logBuildingClient, 5000, grunt); } }, // ## Grunt configuration configureGrunt = function (grunt) { // #### Load all grunt tasks // // Find all of the task which start with `grunt-` and load them, rather than explicitly declaring them all require('matchdep').filterDev(['grunt-*', '!grunt-cli']).forEach(grunt.loadNpmTasks); var cfg = { // #### Common paths used by tasks paths: { build: buildDirectory, releaseBuild: path.join(buildDirectory, 'release'), dist: distDirectory, releaseDist: path.join(distDirectory, 'release') }, // Standard build type, for when we have nightlies again. buildType: 'Build', // Load package.json so that we can create correctly versioned releases. pkg: grunt.file.readJSON('package.json'), clientFiles: [ 'server/web/admin/views/default.html', 'built/assets/ghost.js', 'built/assets/ghost.css', 'built/assets/vendor.js', 'built/assets/vendor.css' ], // ### grunt-contrib-watch // Watch files and livereload in the browser during development. // See the [grunt dev](#live%20reload) task for how this is used. watch: grunt.option('no-server-watch') ? {files: []} : { livereload: { files: [ 'content/themes/casper/assets/css/*.css', 'content/themes/casper/assets/js/*.js' ], options: { livereload: true } }, express: { files: ['core/ghost-server.js', 'core/server/**/*.js', 'config.*.json', '!config.testing.json'], tasks: ['express:dev'], options: { spawn: false, livereload: true } } }, // ### grunt-express-server // Start a Ghost express server for use in development and testing express: { options: { script: 'index.js', output: 'Ghost is running' }, dev: { options: {} }, test: { options: { node_env: 'testing' } } }, // ### grunt-eslint // Linting rules, run as part of `grunt validate`. See [grunt validate](#validate) and its subtasks for // more information. eslint: { server: { options: { config: '.eslintrc.json' }, src: [ '*.js', 'core/*.js', 'core/server/*.js', 'core/server/**/*.js', '!core/server/public/**/*.js' ] }, test: { options: { config: './core/test/.eslintrc.json' }, src: [ 'core/test/*.js', 'core/test/**/*.js', '!core/test/coverage/**' ] } }, // ### grunt-mocha-cli // Configuration for the mocha test runner, used to run unit, integration and route tests as part of // `grunt validate`. See [grunt validate](#validate) and its sub tasks for more information. mochacli: { options: { ui: 'bdd', reporter: grunt.option('reporter') || 'spec', timeout: '30000', save: grunt.option('reporter-output'), require: ['core/server/overrides'], exit: true }, // #### All Unit tests unit: { src: [ 'core/test/unit/**/*_spec.js' ] }, // #### All Integration tests integration: { src: [ 'core/test/integration/**/*_spec.js' ] }, // #### All Route tests routes: { src: [ 'core/test/functional/routes/**/*_spec.js' ] }, // #### All Module tests module: { src: [ 'core/test/functional/module/**/*_spec.js' ] }, // #### Run single test (src is set dynamically, see grunt task 'test') single: {} }, // ### grunt-mocha-istanbul // Configuration for the mocha test coverage generator // `grunt coverage`. mocha_istanbul: { coverage: { // they can also have coverage generated for them & the order doesn't matter src: [ 'core/test/unit' ], options: { mask: '**/*_spec.js', coverageFolder: 'core/test/coverage/unit', mochaOptions: ['--timeout=15000', '--require', 'core/server/overrides', '--exit'], excludes: ['core/client', 'core/server/built'] } }, coverage_all: { src: [ 'core/test/integration', 'core/test/functional', 'core/test/unit' ], options: { coverageFolder: 'core/test/coverage/all', mask: '**/*_spec.js', mochaOptions: ['--timeout=15000', '--require', 'core/server/overrides', '--exit'], excludes: ['core/client', 'core/server/built'] } } }, bgShell: { client: { cmd: function () { logBuildingClient(grunt); return 'grunt subgrunt:watch'; }, bg: grunt.option('client') ? false : true, stdout: function (chunk) { // hide certain output to prevent confusion when running alongside server var filter = grunt.option('client') ? false : [ /> ghost-admin/, /^Livereload/, /^Serving on/ ].some(function (regexp) { return regexp.test(chunk); }); if (!filter) { grunt.log.write(chunk); } if (chunk.indexOf('Build successful') !== -1) { hasBuiltClient = true; } }, stderr: function (chunk) { hasBuiltClient = true; grunt.log.error(chunk); } } }, // ### grunt-shell // Command line tools where it's easier to run a command directly than configure a grunt plugin shell: { master: { command: function () { var upstream = grunt.option('upstream') || process.env.GHOST_UPSTREAM || 'upstream'; grunt.log.writeln('Pulling down the latest master from ' + upstream); return 'git checkout master; git pull ' + upstream + ' master; ' + 'yarn; git submodule foreach "git checkout master && git pull ' + upstream + ' master"'; } }, dbhealth: { command: 'knex-migrator health' } }, // ### grunt-docker // Generate documentation from code docker: { docs: { dest: 'docs', src: ['.'], options: { onlyUpdated: true, exclude: 'node_modules,bower_components,content,core/client,*test,*doc*,' + '*vendor,config.*.json,*buil*,.dist*,.idea,.git*,.travis.yml,.bower*,.editorconfig,.js*,*.md,' + 'MigratorConfig.js' } } }, // ### grunt-contrib-clean // Clean up files as part of other tasks clean: { built: { src: [ 'core/built/**' ] }, release: { src: ['<%= paths.releaseBuild %>/**'] }, test: { src: ['content/data/ghost-test.db'] }, tmp: { src: ['.tmp/**'] }, dependencies: { src: ['node_modules/**', 'core/client/bower_components/**', 'core/client/node_modules/**'] } }, // ### grunt-contrib-compress // Zip up files for builds / releases compress: { release: { options: { archive: '<%= paths.releaseDist %>/Ghost-<%= pkg.version %>.zip' }, expand: true, cwd: '<%= paths.releaseBuild %>/', src: ['**'] } }, // ### grunt-update-submodules // Grunt task to update git submodules update_submodules: { pinned: { options: { params: '--init' } } }, uglify: { prod: { options: { sourceMap: false }, files: { 'core/server/public/ghost-sdk.min.js': 'core/server/public/ghost-sdk.js' } } }, cssnano: { prod: { options: { sourcemap: false }, files: { 'core/server/public/ghost.min.css': 'core/server/public/ghost.css' } } }, // ### grunt-subgrunt // Run grunt tasks in submodule Gruntfiles subgrunt: { options: { npmInstall: false, npmPath: 'yarn' }, init: { options: { npmInstall: true }, projects: { 'core/client': 'init' } }, dev: { 'core/client': 'shell:ember:dev' }, prod: { 'core/client': 'shell:ember:prod' }, watch: { projects: { 'core/client': ['shell:ember:watch', '--live-reload-base-url="' + urlService.utils.getSubdir() + '/ghost/"'] } } }, // ### grunt-contrib-symlink // Create symlink for git hooks symlink: { githooks: { // Enable overwrite to delete symlinks before recreating them overwrite: false, // Enable force to overwrite symlinks outside the current working directory force: false, // Expand to all files in /hooks expand: true, cwd: '.github/hooks', src: ['*'], dest: '.git/hooks' } } }; // Load the configuration grunt.initConfig(cfg); // # Custom Tasks // Ghost has a number of useful tasks that we use every day in development. Tasks marked as *Utility* are used // by grunt to perform current actions, but isn't useful to developers. // // Skip ahead to the section on: // // * [Building assets](#building%20assets): // `grunt init`, `grunt` & `grunt prod` or live reload with `grunt dev` // * [Testing](#testing): // `grunt validate`, the `grunt test-*` sub-tasks or generate a coverage report with `grunt coverage`. // ### Help // Run `grunt help` on the commandline to get a print out of the available tasks and details of // what each one does along with any available options. This is an alias for `grunt --help` grunt.registerTask('help', 'Outputs help information if you type `grunt help` instead of `grunt --help`', function () { grunt.log.writeln('Type `grunt --help` to get the details of available grunt tasks.'); }); // ### Documentation // Run `grunt docs` to generate annotated source code using the documentation described in the code comments. grunt.registerTask('docs', 'Generate Docs', ['docker']); // Run `grunt watch-docs` to setup livereload & watch whilst you're editing the docs grunt.registerTask('watch-docs', function () { grunt.config.merge({ watch: { docs: { files: ['core/server/**/*', 'index.js', 'Gruntfile.js'], tasks: ['docker'], options: { livereload: true } } } }); grunt.task.run('watch:docs'); }); // ## Testing // Ghost has an extensive set of test suites. The following section documents the various types of tests // and how to run them. // // TLDR; run `grunt validate` // #### Set Test Env *(Utility Task)* // Set the NODE_ENV to 'testing' unless the environment is already set to TRAVIS. // This ensures that the tests get run under the correct environment, using the correct database, and // that they work as expected. Trying to run tests with no ENV set will throw an error to do with `client`. grunt.registerTask('setTestEnv', 'Use "testing" Ghost config; unless we are running on travis (then show queries for debugging)', function () { process.env.NODE_ENV = process.env.TRAVIS ? process.env.NODE_ENV : 'testing'; cfg.express.test.options.node_env = process.env.NODE_ENV; }); // ### Test // **Testing utility** // // `grunt test:unit/apps_spec.js` will run just the tests inside the apps_spec.js file // // It works for any path relative to the core/test folder. It will also run all the tests in a single directory // You can also run a test with grunt test:core/test/unit/... to get bash autocompletion // // `grunt test:integration/api` - runs the api integration tests // `grunt test:integration` - runs the integration tests in the root folder and excludes all api & model tests grunt.registerTask('test', 'Run a particular spec file from the core/test directory e.g. `grunt test:unit/apps_spec.js`', function (test) { if (!test) { grunt.fail.fatal('No test provided. `grunt test` expects a filename. e.g.: `grunt test:unit/apps_spec.js`. Did you mean `npm test` or `grunt validate`?'); } if (!test.match(/core\/test/) && !test.match(/core\/server/)) { test = 'core/test/' + test; } // CASE: execute folder if (!test.match(/.js/)) { test += '/**'; } else if (!fs.existsSync(test)) { grunt.fail.fatal('This file does not exist!'); } cfg.mochacli.single.src = [test]; grunt.initConfig(cfg); grunt.task.run('test-setup', 'mochacli:single'); }); // #### Stub out ghost files *(Utility Task)* // Creates stub files in the built directory and the views directory // so that the test environments do not need to build out the client files grunt.registerTask('stubClientFiles', function () { _.each(cfg.clientFiles, function (file) { var filePath = path.resolve(cwd + '/core/' + file); fs.ensureFileSync(filePath); }); }); /** * Ensures the target database get's automatically created. */ grunt.registerTask('knex-migrator', function () { return knexMigrator.init({noScripts: true}); }); // ### Validate // **Main testing task** // // `grunt validate` will either run all tests or run linting // `grunt validate` is called by `npm test` and is used by Travis. grunt.registerTask('validate', 'Run tests or lint code', function () { if (process.env.TEST_SUITE === 'lint') { return grunt.task.run(['lint']); } grunt.task.run(['test-all']); }); // ### Test-All // **Main testing task** // // `grunt test-all` will lint and test your pre-built local Ghost codebase. // grunt.registerTask('test-all', 'Run all server tests', ['test-routes', 'test-module', 'test-unit', 'test-integration']); // ### Lint // // `grunt lint` will run the linter grunt.registerTask('lint', 'Run the code style checks for server & tests', ['eslint'] ); // ### test-setup *(utility)( // `grunt test-setup` will run all the setup tasks required for running tests grunt.registerTask('test-setup', 'Setup ready to run tests', ['knex-migrator', 'clean:test', 'setTestEnv', 'stubClientFiles'] ); // ### Unit Tests *(sub task)* // `grunt test-unit` will run just the unit tests // // If you need to run an individual unit test file, you can use the `grunt test:<file_path>` task: // // `grunt test:unit/config_spec.js` // // This also works for folders (although it isn't recursive), E.g. // // `grunt test:unit/helpers` // // Unit tests are run with [mocha](http://mochajs.org/) using // [should](https://github.com/visionmedia/should.js) to describe the tests in a highly readable style. // Unit tests do **not** touch the database. // A coverage report can be generated for these tests using the `grunt test-coverage` task. grunt.registerTask('test-unit', 'Run unit tests (mocha)', ['test-setup', 'mochacli:unit'] ); // ### Integration tests *(sub task)* // `grunt test-integration` will run just the integration tests // // Provided you already have a `config.*.json` file, you can run just the model integration tests by running: // // `grunt test:integration/model` // // Or just the api integration tests by running: // // `grunt test:integration/api` // // Integration tests are run with [mocha](http://mochajs.org/) using // [should](https://github.com/visionmedia/should.js) to describe the tests in a highly readable style. // Integration tests are different to the unit tests because they make requests to the database. // // If you need to run an individual integration test file you can use the `grunt test:<file_path>` task: // // `grunt test:integration/api/api_tags_spec.js` // // Their purpose is to test that both the api and models behave as expected when the database layer is involved. // These tests are run against sqlite3 and mysql on travis and ensure that differences between the databases // don't cause bugs. // // A coverage report can be generated for these tests using the `grunt test-coverage` task. grunt.registerTask('test-integration', 'Run integration tests (mocha + db access)', ['test-setup', 'mochacli:integration'] ); // ### Route tests *(sub task)* // `grunt test-routes` will run just the route tests // // If you need to run an individual route test file, you can use the `grunt test:<file_path>` task: // // `grunt test:functional/routes/admin_spec.js` // // Route tests are run with [mocha](http://mochajs.org/) using // [should](https://github.com/visionmedia/should.js) and [supertest](https://github.com/visionmedia/supertest) // to describe and create the tests. // // Supertest enables us to describe requests that we want to make, and also describe the response we expect to // receive back. It works directly with express, so we don't have to run a server to run the tests. // // The purpose of the route tests is to ensure that all of the routes (pages, and API requests) in Ghost // are working as expected, including checking the headers and status codes received. It is very easy and // quick to test many permutations of routes / urls in the system. grunt.registerTask('test-routes', 'Run functional route tests (mocha)', ['test-setup', 'mochacli:routes'] ); // ### Module tests *(sub task)* // `grunt test-module` will run just the module tests // // The purpose of the module tests is to ensure that Ghost can be used as an npm module and exposes all // required methods to interact with it. grunt.registerTask('test-module', 'Run functional module tests (mocha)', ['test-setup', 'mochacli:module'] ); // ### Coverage // `grunt coverage` will generate a report for the Unit Tests. // // This is not currently done as part of CI or any build, but is a tool we have available to keep an eye on how // well the unit and integration tests are covering the code base. // Ghost does not have a minimum coverage level - we're more interested in ensuring important and useful areas // of the codebase are covered, than that the whole codebase is covered to a particular level. // // Key areas for coverage are: helpers and theme elements, apps / GDK, the api and model layers. grunt.registerTask('coverage', 'Generate unit and integration (mocha) tests coverage report', ['test-setup', 'mocha_istanbul:coverage'] ); grunt.registerTask('coverage-all', 'Generate unit and integration tests coverage report', ['test-setup', 'mocha_istanbul:coverage_all'] ); // #### Master Warning *(Utility Task)* // Warns git users not ot use the `master` branch in production. // `master` is an unstable branch and shouldn't be used in production as you run the risk of ending up with a // database in an unrecoverable state. Instead there is a branch called `stable` which is the equivalent of the // release zip for git users. grunt.registerTask('master-warn', 'Outputs a warning to runners of grunt prod, that master shouldn\'t be used for live blogs', function () { grunt.log.writeln(chalk.red( 'Use the ' + chalk.bold('stable') + ' branch for live blogs. ' + chalk.bold.underline('Never') + ' master!' )); grunt.log.writeln('>', 'Always two there are, no more, no less. A master and a ' + chalk.bold('stable') + '.'); }); // ## Building assets // // Ghost's GitHub repository contains the un-built source code for Ghost. If you're looking for the already // built release zips, you can get these from the [release page](https://github.com/TryGhost/Ghost/releases) on // GitHub or from https://ghost.org/download. These zip files are created using the [grunt release](#release) // task. // // If you want to work on Ghost core, or you want to use the source files from GitHub, then you have to build // the Ghost assets in order to make them work. // // There are a number of grunt tasks available to help with this. Firstly after fetching an updated version of // the Ghost codebase, after running `yarn install`, you will need to run [grunt init](#init%20assets). // // For production blogs you will need to run [grunt prod](#production%20assets). // // For updating assets during development, the tasks [grunt](#default%20asset%20build) and // [grunt dev](#live%20reload) are available. // ### Init assets // `grunt init` - will run an initial asset build for you // // Grunt init runs `yarn install && bower install` inside `core/client` as well as the standard asset build // tasks which occur when you run just `grunt`. This fetches the latest client-side dependencies. // // This task is very important, and should always be run when fetching down an updated code base just after // running `yarn install`. // // `bower` does have some quirks, such as not running as root. If you have problems please try running // `grunt init --verbose` to see if there are any errors. grunt.registerTask('init', 'Prepare the project for development', ['update_submodules:pinned', 'subgrunt:init', 'clean:tmp', 'default']); // ### Build assets // `grunt build` - will build client assets (without updating the submodule) // // This task is identical to `grunt init`, except it does not build client dependencies grunt.registerTask('build', 'Build client app', ['subgrunt:init', 'clean:tmp', 'default']); // ### Default asset build // `grunt` - default grunt task // // Build assets and dev version of the admin app. grunt.registerTask('default', 'Build JS & templates for development', ['subgrunt:dev']); // ### Production assets // `grunt prod` - will build the minified assets used in production. // // It is otherwise the same as running `grunt`, but is only used when running Ghost in the `production` env. grunt.registerTask('prod', 'Build JS & templates for production', ['subgrunt:prod', 'uglify:prod', 'cssnano:prod', 'master-warn']); // ### Live reload // `grunt dev` - build assets on the fly whilst developing // // If you want Ghost to live reload for you whilst you're developing, you can do this by running `grunt dev`. // This works hand-in-hand with the [livereload](http://livereload.com/) chrome extension. // // `grunt dev` manages starting an express server and restarting the server whenever core files change (which // require a server restart for the changes to take effect) and also manage reloading the browser whenever // frontend code changes. // // Note that the current implementation of watch only works with casper, not other themes. grunt.registerTask('dev', 'Dev Mode; watch files and restart server on changes', function () { if (grunt.option('client')) { grunt.task.run(['clean:built', 'bgShell:client']); } else if (grunt.option('server')) { grunt.task.run(['express:dev', 'watch']); } else { grunt.task.run(['clean:built', 'bgShell:client', 'express:dev', 'watch']); } }); // ### grunt master // This command helps you to bring your working directory back to current master. // It will also update your dependencies to master and shows you if your database is healthy. // It won't build the client! // // `grunt master` [`upstream` is the default upstream to pull from] // `grunt master --upstream=parent` grunt.registerTask('master', 'Update your current working folder to latest master.', ['shell:master', 'subgrunt:init', 'shell:dbhealth'] ); // ### Release // Run `grunt release` to create a Ghost release zip file. // Uses the files specified by `.npmignore` to know what should and should not be included. // Runs the asset generation tasks for production and duplicates default-prod.html to default.html // so it can be run in either production or development, and packages all the files up into a zip. grunt.registerTask('release', 'Release task - creates a final built zip\n' + ' - Do our standard build steps \n' + ' - Copy files to release-folder/#/#{version} directory\n' + ' - Clean out unnecessary files (travis, .git*, etc)\n' + ' - Zip files in release-folder to dist-folder/#{version} directory', function () { grunt.config.set('copy.release', { expand: true, // #### Build File Patterns // A list of files and patterns to include when creating a release zip. // This is read from the `.npmignore` file and all patterns are inverted as the `.npmignore` // file defines what to ignore, whereas we want to define what to include. src: fs.readFileSync('.npmignore', 'utf8').split('\n').filter(Boolean).map(function (pattern) { return pattern[0] === '!' ? pattern.substr(1) : '!' + pattern; }), dest: '<%= paths.releaseBuild %>/' }); grunt.config.set('copy.admin_html', { files: [{ cwd: '.', src: 'core/server/web/admin/views/default-prod.html', dest: 'core/server/web/admin/views/default.html' }] }); grunt.task.run(['update_submodules:pinned', 'subgrunt:init', 'clean:built', 'clean:tmp', 'prod', 'clean:release', 'copy:admin_html', 'copy:release', 'compress:release']); } ); }; module.exports = configureGrunt;
{ "content_hash": "ee23cb1ff56cbdd4f03597417f6d116b", "timestamp": "", "source": "github", "line_count": 769, "max_line_length": 186, "avg_line_length": 42.68400520156047, "alnum_prop": 0.4994516207652937, "repo_name": "tannermares/ghost", "id": "32699c07e71c24654986448e429111c7ed05417b", "size": "32824", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Gruntfile.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "67707" }, { "name": "HTML", "bytes": "195821" }, { "name": "JavaScript", "bytes": "1146806" }, { "name": "XSLT", "bytes": "7177" } ], "symlink_target": "" }
--- layout: global title: Performance Tuning displayTitle: Performance Tuning license: | Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --- * Table of contents {:toc} For some workloads, it is possible to improve performance by either caching data in memory, or by turning on some experimental options. ## Caching Data In Memory Spark SQL can cache tables using an in-memory columnar format by calling `spark.catalog.cacheTable("tableName")` or `dataFrame.cache()`. Then Spark SQL will scan only required columns and will automatically tune compression to minimize memory usage and GC pressure. You can call `spark.catalog.uncacheTable("tableName")` to remove the table from memory. Configuration of in-memory caching can be done using the `setConf` method on `SparkSession` or by running `SET key=value` commands using SQL. <table class="table"> <tr><th>Property Name</th><th>Default</th><th>Meaning</th><th>Since Version</th></tr> <tr> <td><code>spark.sql.inMemoryColumnarStorage.compressed</code></td> <td>true</td> <td> When set to true Spark SQL will automatically select a compression codec for each column based on statistics of the data. </td> <td>1.0.1</td> </tr> <tr> <td><code>spark.sql.inMemoryColumnarStorage.batchSize</code></td> <td>10000</td> <td> Controls the size of batches for columnar caching. Larger batch sizes can improve memory utilization and compression, but risk OOMs when caching data. </td> <td>1.1.1</td> </tr> </table> ## Other Configuration Options The following options can also be used to tune the performance of query execution. It is possible that these options will be deprecated in future release as more optimizations are performed automatically. <table class="table"> <tr><th>Property Name</th><th>Default</th><th>Meaning</th><th>Since Version</th></tr> <tr> <td><code>spark.sql.files.maxPartitionBytes</code></td> <td>134217728 (128 MB)</td> <td> The maximum number of bytes to pack into a single partition when reading files. This configuration is effective only when using file-based sources such as Parquet, JSON and ORC. </td> <td>2.0.0</td> </tr> <tr> <td><code>spark.sql.files.openCostInBytes</code></td> <td>4194304 (4 MB)</td> <td> The estimated cost to open a file, measured by the number of bytes could be scanned in the same time. This is used when putting multiple files into a partition. It is better to over-estimated, then the partitions with small files will be faster than partitions with bigger files (which is scheduled first). This configuration is effective only when using file-based sources such as Parquet, JSON and ORC. </td> <td>2.0.0</td> </tr> <tr> <td><code>spark.sql.broadcastTimeout</code></td> <td>300</td> <td> <p> Timeout in seconds for the broadcast wait time in broadcast joins </p> </td> <td>1.3.0</td> </tr> <tr> <td><code>spark.sql.autoBroadcastJoinThreshold</code></td> <td>10485760 (10 MB)</td> <td> Configures the maximum size in bytes for a table that will be broadcast to all worker nodes when performing a join. By setting this value to -1 broadcasting can be disabled. Note that currently statistics are only supported for Hive Metastore tables where the command <code>ANALYZE TABLE &lt;tableName&gt; COMPUTE STATISTICS noscan</code> has been run. </td> <td>1.1.0</td> </tr> <tr> <td><code>spark.sql.shuffle.partitions</code></td> <td>200</td> <td> Configures the number of partitions to use when shuffling data for joins or aggregations. </td> <td>1.1.0</td> </tr> </table> ## Join Strategy Hints for SQL Queries The join strategy hints, namely `BROADCAST`, `MERGE`, `SHUFFLE_HASH` and `SHUFFLE_REPLICATE_NL`, instruct Spark to use the hinted strategy on each specified relation when joining them with another relation. For example, when the `BROADCAST` hint is used on table 't1', broadcast join (either broadcast hash join or broadcast nested loop join depending on whether there is any equi-join key) with 't1' as the build side will be prioritized by Spark even if the size of table 't1' suggested by the statistics is above the configuration `spark.sql.autoBroadcastJoinThreshold`. When different join strategy hints are specified on both sides of a join, Spark prioritizes the `BROADCAST` hint over the `MERGE` hint over the `SHUFFLE_HASH` hint over the `SHUFFLE_REPLICATE_NL` hint. When both sides are specified with the `BROADCAST` hint or the `SHUFFLE_HASH` hint, Spark will pick the build side based on the join type and the sizes of the relations. Note that there is no guarantee that Spark will choose the join strategy specified in the hint since a specific strategy may not support all join types. <div class="codetabs"> <div data-lang="scala" markdown="1"> {% highlight scala %} spark.table("src").join(spark.table("records").hint("broadcast"), "key").show() {% endhighlight %} </div> <div data-lang="java" markdown="1"> {% highlight java %} spark.table("src").join(spark.table("records").hint("broadcast"), "key").show(); {% endhighlight %} </div> <div data-lang="python" markdown="1"> {% highlight python %} spark.table("src").join(spark.table("records").hint("broadcast"), "key").show() {% endhighlight %} </div> <div data-lang="r" markdown="1"> {% highlight r %} src <- sql("SELECT * FROM src") records <- sql("SELECT * FROM records") head(join(src, hint(records, "broadcast"), src$key == records$key)) {% endhighlight %} </div> <div data-lang="sql" markdown="1"> {% highlight sql %} -- We accept BROADCAST, BROADCASTJOIN and MAPJOIN for broadcast hint SELECT /*+ BROADCAST(r) */ * FROM records r JOIN src s ON r.key = s.key {% endhighlight %} </div> </div> For more details please refer to the documentation of [Join Hints](sql-ref-syntax-qry-select-hints.html). ## Coalesce Hints for SQL Queries Coalesce hints allows the Spark SQL users to control the number of output files just like the `coalesce`, `repartition` and `repartitionByRange` in Dataset API, they can be used for performance tuning and reducing the number of output files. The "COALESCE" hint only has a partition number as a parameter. The "REPARTITION" hint has a partition number, columns, or both of them as parameters. The "REPARTITION_BY_RANGE" hint must have column names and a partition number is optional. SELECT /*+ COALESCE(3) */ * FROM t SELECT /*+ REPARTITION(3) */ * FROM t SELECT /*+ REPARTITION(c) */ * FROM t SELECT /*+ REPARTITION(3, c) */ * FROM t SELECT /*+ REPARTITION_BY_RANGE(c) */ * FROM t SELECT /*+ REPARTITION_BY_RANGE(3, c) */ * FROM t ## Adaptive Query Execution Adaptive Query Execution (AQE) is an optimization technique in Spark SQL that makes use of the runtime statistics to choose the most efficient query execution plan. AQE is disabled by default. Spark SQL can use the umbrella configuration of `spark.sql.adaptive.enabled` to control whether turn it on/off. As of Spark 3.0, there are three major features in AQE, including coalescing post-shuffle partitions, converting sort-merge join to broadcast join, and skew join optimization. ### Coalescing Post Shuffle Partitions This feature coalesces the post shuffle partitions based on the map output statistics when both `spark.sql.adaptive.enabled` and `spark.sql.adaptive.coalescePartitions.enabled` configurations are true. This feature simplifies the tuning of shuffle partition number when running queries. You do not need to set a proper shuffle partition number to fit your dataset. Spark can pick the proper shuffle partition number at runtime once you set a large enough initial number of shuffle partitions via `spark.sql.adaptive.coalescePartitions.initialPartitionNum` configuration. <table class="table"> <tr><th>Property Name</th><th>Default</th><th>Meaning</th><th>Since Version</th></tr> <tr> <td><code>spark.sql.adaptive.coalescePartitions.enabled</code></td> <td>true</td> <td> When true and <code>spark.sql.adaptive.enabled</code> is true, Spark will coalesce contiguous shuffle partitions according to the target size (specified by <code>spark.sql.adaptive.advisoryPartitionSizeInBytes</code>), to avoid too many small tasks. </td> <td>3.0.0</td> </tr> <tr> <td><code>spark.sql.adaptive.coalescePartitions.minPartitionNum</code></td> <td>Default Parallelism</td> <td> The minimum number of shuffle partitions after coalescing. If not set, the default value is the default parallelism of the Spark cluster. This configuration only has an effect when <code>spark.sql.adaptive.enabled</code> and <code>spark.sql.adaptive.coalescePartitions.enabled</code> are both enabled. </td> <td>3.0.0</td> </tr> <tr> <td><code>spark.sql.adaptive.coalescePartitions.initialPartitionNum</code></td> <td>200</td> <td> The initial number of shuffle partitions before coalescing. By default it equals to <code>spark.sql.shuffle.partitions</code>. This configuration only has an effect when <code>spark.sql.adaptive.enabled</code> and <code>spark.sql.adaptive.coalescePartitions.enabled</code> are both enabled. </td> <td>3.0.0</td> </tr> <tr> <td><code>spark.sql.adaptive.advisoryPartitionSizeInBytes</code></td> <td>64 MB</td> <td> The advisory size in bytes of the shuffle partition during adaptive optimization (when <code>spark.sql.adaptive.enabled</code> is true). It takes effect when Spark coalesces small shuffle partitions or splits skewed shuffle partition. </td> <td>3.0.0</td> </tr> </table> ### Converting sort-merge join to broadcast join AQE converts sort-merge join to broadcast hash join when the runtime statistics of any join side is smaller than the broadcast hash join threshold. This is not as efficient as planning a broadcast hash join in the first place, but it's better than keep doing the sort-merge join, as we can save the sorting of both the join sides, and read shuffle files locally to save network traffic(if `spark.sql.adaptive.localShuffleReader.enabled` is true) ### Optimizing Skew Join Data skew can severely downgrade the performance of join queries. This feature dynamically handles skew in sort-merge join by splitting (and replicating if needed) skewed tasks into roughly evenly sized tasks. It takes effect when both `spark.sql.adaptive.enabled` and `spark.sql.adaptive.skewJoin.enabled` configurations are enabled. <table class="table"> <tr><th>Property Name</th><th>Default</th><th>Meaning</th><th>Since Version</th></tr> <tr> <td><code>spark.sql.adaptive.skewJoin.enabled</code></td> <td>true</td> <td> When true and <code>spark.sql.adaptive.enabled</code> is true, Spark dynamically handles skew in sort-merge join by splitting (and replicating if needed) skewed partitions. </td> <td>3.0.0</td> </tr> <tr> <td><code>spark.sql.adaptive.skewJoin.skewedPartitionFactor</code></td> <td>10</td> <td> A partition is considered as skewed if its size is larger than this factor multiplying the median partition size and also larger than <code>spark.sql.adaptive.skewedPartitionThresholdInBytes</code>. </td> <td>3.0.0</td> </tr> <tr> <td><code>spark.sql.adaptive.skewJoin.skewedPartitionThresholdInBytes</code></td> <td>256MB</td> <td> A partition is considered as skewed if its size in bytes is larger than this threshold and also larger than <code>spark.sql.adaptive.skewJoin.skewedPartitionFactor</code> multiplying the median partition size. Ideally this config should be set larger than <code>spark.sql.adaptive.advisoryPartitionSizeInBytes</code>. </td> <td>3.0.0</td> </tr> </table>
{ "content_hash": "6b6004ff5a49d0615defffa9b0357d10", "timestamp": "", "source": "github", "line_count": 271, "max_line_length": 570, "avg_line_length": 46.760147601476014, "alnum_prop": 0.7223011363636364, "repo_name": "skonto/spark", "id": "97f39b508b03cec2eaf104e7f15e22ffc7b878a9", "size": "12672", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/sql-performance-tuning.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "46871" }, { "name": "Batchfile", "bytes": "31352" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "26599" }, { "name": "Dockerfile", "bytes": "8863" }, { "name": "HTML", "bytes": "70407" }, { "name": "HiveQL", "bytes": "1823701" }, { "name": "Java", "bytes": "4054268" }, { "name": "JavaScript", "bytes": "201603" }, { "name": "Makefile", "bytes": "9397" }, { "name": "PLpgSQL", "bytes": "257276" }, { "name": "PowerShell", "bytes": "3867" }, { "name": "Python", "bytes": "2976348" }, { "name": "R", "bytes": "1186688" }, { "name": "Roff", "bytes": "15633" }, { "name": "SQLPL", "bytes": "9325" }, { "name": "Scala", "bytes": "30321574" }, { "name": "Shell", "bytes": "201878" }, { "name": "TSQL", "bytes": "438358" }, { "name": "Thrift", "bytes": "67610" }, { "name": "q", "bytes": "146878" } ], "symlink_target": "" }
class BufferFile { enum {BUFSIZE = 1024 * 16}; HANDLE file_handle_; int length_; char buffer_[BUFSIZE]; BufferFile(const BufferFile&); BufferFile& operator =(const BufferFile&); public: BufferFile(void); ~BufferFile(void); bool Open(const char* filename); void Close(void); int Write(const void* buffer, int length); }; #endif //! __BUFFER_FILE_HEADER_H__
{ "content_hash": "36ca396e4122a414e77debecd4eec470", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 44, "avg_line_length": 20.263157894736842, "alnum_prop": 0.6701298701298701, "repo_name": "ASMlover/study", "id": "53c6d9c7436076804417ad7779b730cecbe48945", "size": "1872", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cplusplus/fileoper/buffer-file-win/buffer_file.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Assembly", "bytes": "3055440" }, { "name": "Batchfile", "bytes": "4662" }, { "name": "Brainfuck", "bytes": "571" }, { "name": "C", "bytes": "13569580" }, { "name": "C#", "bytes": "3959" }, { "name": "C++", "bytes": "14741264" }, { "name": "CMake", "bytes": "543917" }, { "name": "CSS", "bytes": "11505" }, { "name": "Common Lisp", "bytes": "114" }, { "name": "Emacs Lisp", "bytes": "6042" }, { "name": "Go", "bytes": "105203" }, { "name": "Groovy", "bytes": "2907" }, { "name": "HTML", "bytes": "911945" }, { "name": "Lex", "bytes": "9370" }, { "name": "Lua", "bytes": "32829" }, { "name": "Makefile", "bytes": "1000611" }, { "name": "NASL", "bytes": "3609" }, { "name": "NewLisp", "bytes": "5805" }, { "name": "Perl", "bytes": "594" }, { "name": "Python", "bytes": "2752752" }, { "name": "SWIG", "bytes": "91" }, { "name": "Shell", "bytes": "9993" }, { "name": "Vim script", "bytes": "92204" }, { "name": "Yacc", "bytes": "6278" } ], "symlink_target": "" }
package com.streamsets.datacollector.restapi.bean; public enum StageTypeJson { SOURCE, PROCESSOR, TARGET, }
{ "content_hash": "cfd00cf881d3739bc61930243687e593", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 50, "avg_line_length": 14.375, "alnum_prop": 0.7652173913043478, "repo_name": "studanshu/datacollector", "id": "031362f35f0ccf8a88adfd3af773ebe61deb457e", "size": "961", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "container/src/main/java/com/streamsets/datacollector/restapi/bean/StageTypeJson.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "101978" }, { "name": "Groovy", "bytes": "15336" }, { "name": "HTML", "bytes": "365293" }, { "name": "Java", "bytes": "10957672" }, { "name": "JavaScript", "bytes": "871104" }, { "name": "Protocol Buffer", "bytes": "3463" }, { "name": "Python", "bytes": "28037" }, { "name": "Shell", "bytes": "60167" } ], "symlink_target": "" }
! ! The Laboratory of Algorithms ! ! The MIT License ! ! Copyright 2011-2015 Andrey Pudov. ! ! Permission is hereby granted, free of charge, to any person obtaining a copy ! of this software and associated documentation files (the 'Software'), to deal ! in the Software without restriction, including without limitation the rights ! to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ! copies of the Software, and to permit persons to whom the Software is ! furnished to do so, subject to the following conditions: ! ! The above copyright notice and this permission notice shall be included in ! all copies or substantial portions of the Software. ! ! THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ! IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ! FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ! AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ! LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ! OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ! THE SOFTWARE. ! module MInsertionSort use MSort use MBinarySearch implicit none private type, extends(TSort), public :: TInsertionSort contains procedure, nopass :: sort => sortBinary procedure, nopass :: sortOriginal procedure, nopass :: sortBinary end type contains subroutine sortOriginal(array) integer, dimension(:), intent(in out) :: array integer index integer location integer key do index = 2, size(array) key = array(index) location = index - 1 do while ((location > 0) .and. (array(location) > key)) array(location + 1) = array(location) location = location - 1 end do array(location + 1) = key end do end subroutine subroutine sortBinary(array) integer, dimension(:), intent(in out) :: array type(TBinarySearch) :: BinarySearch integer index integer jndex integer location integer key do index = 2, size(array) key = array(index) location = BinarySearch%search(array, key, 1, index) if (location < 1) then location = -location end if !array(location + 1:index) = array(location:index - 1) do jndex = index - 1, location, -1 array(jndex + 1) = array(jndex) end do array(location) = key end do end subroutine end module
{ "content_hash": "b72ef6868f30dcbc7d0a6b024842505f", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 79, "avg_line_length": 30.582417582417584, "alnum_prop": 0.6194753862738053, "repo_name": "andreypudov/Algorithms", "id": "68cf09b55746316971b2ccd51c530cbbed619714", "size": "2783", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Sorts/InsertionSort.f", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "14611" }, { "name": "Fortran", "bytes": "384339" }, { "name": "Makefile", "bytes": "2307" } ], "symlink_target": "" }
package com.streamsets.pipeline.stage.destination.coap; import com.streamsets.pipeline.api.ConfigDef; import com.streamsets.pipeline.api.ConfigDefBean; import com.streamsets.pipeline.api.ValueChooserModel; import com.streamsets.pipeline.config.DataFormat; import com.streamsets.pipeline.lib.el.RecordEL; import com.streamsets.pipeline.lib.http.HttpMethod; import com.streamsets.pipeline.stage.destination.http.DataFormatChooserValues; import com.streamsets.pipeline.stage.destination.lib.DataGeneratorFormatConfig; /** * Bean specifying the configuration for an CoapClientTarget instance. */ public class CoapClientTargetConfig { @ConfigDefBean(groups = {"DATA_FORMAT"}) public DataGeneratorFormatConfig dataGeneratorFormatConfig; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "JSON", label = "Data Format", description = "Data Format of the response. Response will be parsed before being placed in the record.", displayPosition = 1, group = "DATA_FORMAT" ) @ValueChooserModel(DataFormatChooserValues.class) public DataFormat dataFormat = DataFormat.JSON; @ConfigDef( required = true, type = ConfigDef.Type.STRING, label = "Resource URL", defaultValue = "coap://localhost:5683/sdc", description = "The CoAP resource URL", elDefs = RecordEL.class, evaluation = ConfigDef.Evaluation.EXPLICIT, displayPosition = 60, displayMode = ConfigDef.DisplayMode.BASIC, group = "COAP" ) public String resourceUrl = ""; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, label = "CoAP Method", defaultValue = "POST", description = "CoAP method to send", elDefs = RecordEL.class, evaluation = ConfigDef.Evaluation.EXPLICIT, displayPosition = 70, displayMode = ConfigDef.DisplayMode.BASIC, group = "COAP" ) @ValueChooserModel(CoapMethodChooserValues.class) public HttpMethod coapMethod = HttpMethod.POST; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, label = "Request Type", defaultValue = "NONCONFIRMABLE", description = "Specify the type of requests.", displayPosition = 80, displayMode = ConfigDef.DisplayMode.BASIC, group = "COAP" ) @ValueChooserModel(RequestTypeChooserValues.class) public RequestType requestType = RequestType.NONCONFIRMABLE; @ConfigDef( required = true, type = ConfigDef.Type.NUMBER, label = "Connect Timeout", defaultValue = "2000", description = "CoAP connection timeout in milliseconds. 0 means no timeout.", displayPosition = 90, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "COAP" ) public int connectTimeoutMillis = 2000; }
{ "content_hash": "18297c009c46b93b66fb5366c633df5d", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 110, "avg_line_length": 32.206896551724135, "alnum_prop": 0.706281227694504, "repo_name": "kunickiaj/datacollector", "id": "1bd3f48f95d5e264d2c117b7a1fb6a434cbb4643", "size": "3400", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "basic-lib/src/main/java/com/streamsets/pipeline/stage/destination/coap/CoapClientTargetConfig.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "101291" }, { "name": "CSS", "bytes": "120603" }, { "name": "Groovy", "bytes": "11876" }, { "name": "HTML", "bytes": "529009" }, { "name": "Java", "bytes": "20332072" }, { "name": "JavaScript", "bytes": "1070828" }, { "name": "Python", "bytes": "7413" }, { "name": "Scala", "bytes": "6347" }, { "name": "Shell", "bytes": "30088" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"/> <meta http-equiv="x-ua-compatible" content="IE=edge"/> <title>Test results - Class EbaySearchSpec</title> <link href="../css/base-style.css" rel="stylesheet" type="text/css"/> <link href="../css/style.css" rel="stylesheet" type="text/css"/> <script src="../js/report.js" type="text/javascript"></script> </head> <body> <div id="content"> <h1>Class EbaySearchSpec</h1> <div class="breadcrumbs"> <a href="../index.html">all</a> &gt; <a href="../packages/default-package.html">default-package</a> &gt; EbaySearchSpec</div> <div id="summary"> <table> <tr> <td> <div class="summaryGroup"> <table> <tr> <td> <div class="infoBox" id="tests"> <div class="counter">1</div> <p>tests</p> </div> </td> <td> <div class="infoBox" id="failures"> <div class="counter">1</div> <p>failures</p> </div> </td> <td> <div class="infoBox" id="ignored"> <div class="counter">0</div> <p>ignored</p> </div> </td> <td> <div class="infoBox" id="duration"> <div class="counter">21.322s</div> <p>duration</p> </div> </td> </tr> </table> </div> </td> <td> <div class="infoBox failures" id="successRate"> <div class="percent">0%</div> <p>successful</p> </div> </td> </tr> </table> </div> <div id="tabs"> <ul class="tabLinks"> <li> <a href="#tab0">Failed tests</a> </li> <li> <a href="#tab1">Tests</a> </li> <li> <a href="#tab2">Standard error</a> </li> </ul> <div id="tab0" class="tab"> <h2>Failed tests</h2> <div class="test"> <a name="can get to the eBay search results page and display the sort menu"></a> <h3 class="failures">can get to the eBay search results page and display the sort menu</h3> <span class="code"> <pre>org.openqa.selenium.ElementNotVisibleException: element not visible (Session info: chrome=58.0.3029.110) (Driver info: chromedriver=2.24.417412 (ac882d3ce7c0d99292439bf3405780058fcca0a6),platform=Mac OS X 10.12.4 x86_64) (WARNING: The server did not provide any stacktrace information) Command duration or timeout: 22 milliseconds Build info: version: '2.53.0', revision: '35ae25b1534ae328c771e0856c93e187490ca824', time: '2016-03-15 10:43:46' System info: host: 'Brian.local', ip: '10.0.1.5', os.name: 'Mac OS X', os.arch: 'x86_64', os.version: '10.12.4', java.version: '1.7.0_80' Session ID: 5631006d38156639609c8ee3636eaaaf Driver info: org.openqa.selenium.chrome.ChromeDriver Capabilities [{platform=MAC, acceptSslCerts=true, javascriptEnabled=true, browserName=chrome, chrome={userDataDir=/var/folders/29/91v8554j79jckpm03g4qj_dh0000gn/T/.org.chromium.Chromium.TnmDfU, chromedriverVersion=2.24.417412 (ac882d3ce7c0d99292439bf3405780058fcca0a6)}, networkConnectionEnabled=false, rotatable=false, locationContextEnabled=true, mobileEmulationEnabled=false, version=58.0.3029.110, pageLoadStrategy=normal, takesHeapSnapshot=true, cssSelectorsEnabled=true, databaseEnabled=false, handlesAlerts=true, browserConnectionEnabled=false, webStorageEnabled=true, nativeEvents=true, hasTouchScreen=false, applicationCacheEnabled=false, takesScreenshot=true}] at org.openqa.selenium.remote.ErrorHandler.createThrowable(ErrorHandler.java:206) at org.openqa.selenium.remote.ErrorHandler.throwIfResponseFailed(ErrorHandler.java:158) at org.openqa.selenium.remote.RemoteWebDriver.execute(RemoteWebDriver.java:678) at org.openqa.selenium.remote.RemoteWebElement.execute(RemoteWebElement.java:327) at org.openqa.selenium.remote.RemoteWebElement.click(RemoteWebElement.java:85) at geb.navigator.NonEmptyNavigator.click(NonEmptyNavigator.groovy:432) at geb.content.TemplateDerivedPageContent.click(TemplateDerivedPageContent.groovy:71) at EbaySearchSpec.can get to the eBay search results page and display the sort menu_closure4(EbaySearchSpec.groovy:31) at EbaySearchSpec.can get to the eBay search results page and display the sort menu_closure4(EbaySearchSpec.groovy) at geb.interaction.DefaultInteractionsSupport.interact(DefaultInteractionsSupport.groovy:34) at geb.Page.interact(Page.groovy:123) at geb.Browser.methodMissing(Browser.groovy:209) at geb.spock.GebSpec.methodMissing(GebSpec.groovy:56) at EbaySearchSpec.can get to the eBay search results page and display the sort menu(EbaySearchSpec.groovy:30) </pre> </span> </div> </div> <div id="tab1" class="tab"> <h2>Tests</h2> <table> <thead> <tr> <th>Test</th> <th>Duration</th> <th>Result</th> </tr> </thead> <tr> <td class="failures">can get to the eBay search results page and display the sort menu</td> <td>21.322s</td> <td class="failures">failed</td> </tr> </table> </div> <div id="tab2" class="tab"> <h2>Standard error</h2> <span class="code"> <pre>Starting ChromeDriver 2.24.417412 (ac882d3ce7c0d99292439bf3405780058fcca0a6) on port 46321 Only local connections are allowed. </pre> </span> </div> </div> <div id="footer"> <p> <div> <label class="hidden" id="label-for-line-wrapping-toggle" for="line-wrapping-toggle">Wrap lines <input id="line-wrapping-toggle" type="checkbox" autocomplete="off"/> </label> </div>Generated by <a href="http://www.gradle.org">Gradle 3.3</a> at 14-May-2017 00:12:16</p> </div> </div> </body> </html>
{ "content_hash": "407bc32c6d81d32227d52b6fa10dbf87", "timestamp": "", "source": "github", "line_count": 143, "max_line_length": 670, "avg_line_length": 35.62937062937063, "alnum_prop": 0.7432777232580962, "repo_name": "bwantest/Test1", "id": "3ce92ac78fe14ebf4f4e25bc7a77e4cceaa7ebbf", "size": "5095", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build/reports/chromeTest/tests/classes/EbaySearchSpec.html", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "7560" }, { "name": "Groovy", "bytes": "8167" }, { "name": "HTML", "bytes": "5688996" }, { "name": "JavaScript", "bytes": "10504" } ], "symlink_target": "" }
using Autodesk.AutoCAD.DatabaseServices; using Autodesk.AutoCAD.EditorInput; using Autodesk.AutoCAD.Runtime; using System.Collections.Generic; using System.Windows.Forms; namespace XCOM.Commands.Dimension { public class Command_RESETDIMVIEWTWIST { [CommandMethod("RESETDIMVIEWTWIST", CommandFlags.UsePickSet)] public void ResetViewTwist() { var doc = Autodesk.AutoCAD.ApplicationServices.Application.DocumentManager.MdiActiveDocument; var db = doc.Database; List<TypedValue> tvs = new List<TypedValue>(); tvs.Add(new TypedValue((int)DxfCode.Start, "DIMENSION")); SelectionFilter filter = new SelectionFilter(tvs.ToArray()); PromptSelectionResult selRes = doc.Editor.GetSelection(filter); if (selRes.Status == PromptStatus.OK) { using (Transaction tr = db.TransactionManager.StartTransaction()) using (ViewTableRecord view = doc.Editor.GetCurrentView()) { try { foreach (ObjectId id in selRes.Value.GetObjectIds()) { var dim = (Autodesk.AutoCAD.DatabaseServices.Dimension)tr.GetObject(id, OpenMode.ForWrite); dim.HorizontalRotation = view.ViewTwist; } } catch (System.Exception ex) { MessageBox.Show("Error: " + ex.ToString(), "XCOM", MessageBoxButtons.OK, MessageBoxIcon.Error); } tr.Commit(); } } } } }
{ "content_hash": "d94071764485d46b2b645c13c41eec91", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 119, "avg_line_length": 38.13333333333333, "alnum_prop": 0.5536130536130536, "repo_name": "oozcitak/XCOM", "id": "9108103701c85235456ea9a334a2b53f65da729b", "size": "1718", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "XCOM/Dimension/Command_RESETDIMVIEWTWIST.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "4466" }, { "name": "C#", "bytes": "616165" }, { "name": "Inno Setup", "bytes": "3092" } ], "symlink_target": "" }
monkey payload(function(boy, dog, cat) { a;sfdjnsdafg; });
{ "content_hash": "91e5486d3a0ffb57a07a4a4cdb3752d2", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 40, "avg_line_length": 19.666666666666668, "alnum_prop": 0.6949152542372882, "repo_name": "mgrahamjo/grunt-payload", "id": "a1d1a90167f50c9cd5624fd861ee40a688a989c6", "size": "59", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mock/folder/one.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "6988" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <title>Dialog Visual Test : Dialog option buttons OK Cancel</title> <link rel="stylesheet" href="../visual.css" type="text/css" /> <link rel="stylesheet" href="../../../themes/base/jquery.ui.all.css" type="text/css"> <script type="text/javascript" src="../../../jquery-1.7.1.js"></script> <script type="text/javascript" src="../../../external/jquery.bgiframe-2.1.2.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.core.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.widget.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.draggable.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.position.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.resizable.js"></script> <script type="text/javascript" src="../../../ui/jquery.ui.dialog.js"></script> <script type="text/javascript"> $(function() { $("#dialog").dialog({ buttons: { "OK": function() { $(this).dialog('close'); }, "Cancel": function() { $(this).dialog('close'); } } }); }); </script> </head> <body> <div id="dialog" title="Dialog Title"> <p> Dialog Content </p> </div> </body> </html>
{ "content_hash": "92ca6b6a2ed5321711ad19d840a22b0a", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 90, "avg_line_length": 35.05555555555556, "alnum_prop": 0.6164817749603804, "repo_name": "coachcly/Dashboard", "id": "a64822b7f8a2972071617e617c9604fb591c054f", "size": "1262", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "server-scripts/javascript/jquery-ui-1.8.18/tests/visual/dialog/dialog_option_buttons_OK_Cancel.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "257832" }, { "name": "JavaScript", "bytes": "1841527" }, { "name": "PHP", "bytes": "854398" } ], "symlink_target": "" }
package org.elasticsearch.xpack.security.rest.action.oauth2; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.RestApiVersion; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequestFilter; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; import org.elasticsearch.xpack.core.security.action.token.RefreshTokenAction; import org.elasticsearch.xpack.security.authc.kerberos.KerberosAuthenticationToken; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.POST; /** * An implementation of a OAuth2-esque API for retrieval of an access token. * This API does not conform to the RFC completely as it uses XContent for the request body * instead for form encoded data. This is a relatively common modification of the OAuth2 * specification as this aspect does not make the most sense since the response body is * expected to be JSON */ public final class RestGetTokenAction extends TokenBaseRestHandler implements RestRequestFilter { static final ConstructingObjectParser<CreateTokenRequest, Void> PARSER = new ConstructingObjectParser<>("token_request", a -> new CreateTokenRequest((String) a[0], (String) a[1], (SecureString) a[2], (SecureString) a[3], (String) a[4], (String) a[5])); static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("grant_type")); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("username")); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), parser -> new SecureString( Arrays.copyOfRange(parser.textCharacters(), parser.textOffset(), parser.textOffset() + parser.textLength())), new ParseField("password"), ValueType.STRING); PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), parser -> new SecureString( Arrays.copyOfRange(parser.textCharacters(), parser.textOffset(), parser.textOffset() + parser.textLength())), new ParseField("kerberos_ticket"), ValueType.STRING); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("scope")); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("refresh_token")); } public RestGetTokenAction(Settings settings, XPackLicenseState xPackLicenseState) { super(settings, xPackLicenseState); } @Override public List<Route> routes() { return List.of( Route.builder(POST, "/_security/oauth2/token") .replaces(POST, "/_xpack/security/oauth2/token", RestApiVersion.V_7).build() ); } @Override public String getName() { return "security_get_token_action"; } @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client)throws IOException { try (XContentParser parser = request.contentParser()) { final CreateTokenRequest tokenRequest = PARSER.parse(parser, null); final ActionType<CreateTokenResponse> action = "refresh_token".equals(tokenRequest.getGrantType()) ? RefreshTokenAction.INSTANCE : CreateTokenAction.INSTANCE; return channel -> client.execute(action, tokenRequest, // this doesn't use the RestBuilderListener since we need to override the // handling of failures in some cases. new CreateTokenResponseActionListener(channel, request, logger)); } } static class CreateTokenResponseActionListener implements ActionListener<CreateTokenResponse> { private final RestChannel channel; private final RestRequest request; private final Logger logger; CreateTokenResponseActionListener(RestChannel restChannel, RestRequest restRequest, Logger logger) { this.channel = restChannel; this.request = restRequest; this.logger = logger; } @Override public void onResponse(CreateTokenResponse createTokenResponse) { try (XContentBuilder builder = channel.newBuilder()) { channel.sendResponse(new BytesRestResponse(RestStatus.OK, createTokenResponse.toXContent(builder, request))); } catch (IOException e) { onFailure(e); } } @Override public void onFailure(Exception e) { logger.debug("Failed to create token", e); if (e instanceof ActionRequestValidationException) { ActionRequestValidationException validationException = (ActionRequestValidationException) e; final TokenRequestError error; if (validationException.validationErrors().stream().anyMatch(s -> s.contains("grant_type"))) { error = TokenRequestError.UNSUPPORTED_GRANT_TYPE; } else { error = TokenRequestError.INVALID_REQUEST; } sendTokenErrorResponse(error, validationException.getMessage(), e); } else if (e instanceof ElasticsearchSecurityException && "invalid_grant".equals(e.getMessage()) && ((ElasticsearchSecurityException) e).getHeader("error_description").size() == 1) { sendTokenErrorResponse(TokenRequestError.INVALID_GRANT, ((ElasticsearchSecurityException) e).getHeader("error_description").get(0), e); } else if (e instanceof ElasticsearchSecurityException && "failed to authenticate user, gss context negotiation not complete".equals(e.getMessage())) { sendTokenErrorResponse(TokenRequestError._UNAUTHORIZED, extractBase64EncodedToken((ElasticsearchSecurityException) e), e); } else { sendFailure(e); } } private String extractBase64EncodedToken(ElasticsearchSecurityException e) { String base64EncodedToken = null; List<String> values = e.getHeader(KerberosAuthenticationToken.WWW_AUTHENTICATE); if (values != null && values.size() == 1) { final String wwwAuthenticateHeaderValue = values.get(0); // it may contain base64 encoded token that needs to be sent to client if Spnego GSS context negotiation failed if (wwwAuthenticateHeaderValue.startsWith(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX)) { base64EncodedToken = wwwAuthenticateHeaderValue .substring(KerberosAuthenticationToken.NEGOTIATE_AUTH_HEADER_PREFIX.length()).trim(); } } return base64EncodedToken; } void sendTokenErrorResponse(TokenRequestError error, String description, Exception e) { try (XContentBuilder builder = channel.newErrorBuilder()) { // defined by https://tools.ietf.org/html/rfc6749#section-5.2 builder.startObject() .field("error", error.toString().toLowerCase(Locale.ROOT)) .field("error_description", description) .endObject(); channel.sendResponse(new BytesRestResponse(RestStatus.BAD_REQUEST, builder)); } catch (IOException ioe) { ioe.addSuppressed(e); sendFailure(e); } } void sendFailure(Exception e) { try { channel.sendResponse(new BytesRestResponse(channel, e)); } catch (Exception inner) { inner.addSuppressed(e); logger.error("failed to send failure response", inner); } } } // defined by https://tools.ietf.org/html/rfc6749#section-5.2 enum TokenRequestError { /** * The request is missing a required parameter, includes an unsupported * parameter value (other than grant type), repeats a parameter, * includes multiple credentials, utilizes more than one mechanism for * authenticating the client, or is otherwise malformed. */ INVALID_REQUEST, /** * Client authentication failed (e.g., unknown client, no client * authentication included, or unsupported authentication method). The * authorization server MAY return an HTTP 401 (Unauthorized) status * code to indicate which HTTP authentication schemes are supported. If * the client attempted to authenticate via the "Authorization" request * header field, the authorization server MUST respond with an HTTP 401 * (Unauthorized) status code and include the "WWW-Authenticate" * response header field matching the authentication scheme used by the * client. */ INVALID_CLIENT, /** * The provided authorization grant (e.g., authorization code, resource * owner credentials) or refresh token is invalid, expired, revoked, * does not match the redirection URI used in the authorization request, * or was issued to another client. */ INVALID_GRANT, /** * The authenticated client is not authorized to use this authorization * grant type. */ UNAUTHORIZED_CLIENT, /** * The authorization grant type is not supported by the authorization * server. */ UNSUPPORTED_GRANT_TYPE, /** * The requested scope is invalid, unknown, malformed, or exceeds the * scope granted by the resource owner. */ INVALID_SCOPE, // Custom error code /** * When the request for authentication fails using custom grant type for given * credentials. * If the client attempted to authenticate via the "Authorization" request * the authorization server MAY respond with an HTTP 401 * (Unauthorized) status code and include the "WWW-Authenticate" * response header field */ _UNAUTHORIZED, } private static final Set<String> FILTERED_FIELDS = Set.of("password", "kerberos_ticket", "refresh_token"); @Override public Set<String> getFilteredFields() { return FILTERED_FIELDS; } }
{ "content_hash": "685580a02b8798ad852e69f3732e01f2", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 138, "avg_line_length": 47.453061224489794, "alnum_prop": 0.6715981420953037, "repo_name": "robin13/elasticsearch", "id": "6cf106cd9dfba3193d161b179dd78fabc1b908a0", "size": "11878", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "11082" }, { "name": "Batchfile", "bytes": "14049" }, { "name": "Emacs Lisp", "bytes": "3341" }, { "name": "FreeMarker", "bytes": "45" }, { "name": "Groovy", "bytes": "315863" }, { "name": "HTML", "bytes": "3399" }, { "name": "Java", "bytes": "40107206" }, { "name": "Perl", "bytes": "7271" }, { "name": "Python", "bytes": "54437" }, { "name": "Shell", "bytes": "108937" } ], "symlink_target": "" }
(function () { 'use strict'; angular .module('pasalo92.explore', [ 'ui.router', 'placeholders', 'ui.bootstrap' ]) .config(['$stateProvider', function exploreConfig($stateProvider) { $stateProvider.state('explore', { url: '/explore', views: { "main": { templateUrl: 'explore/explore.tpl.html' } } }); }]) .factory('exploreService', ['$http', '$q', '__env', function exploreService($http, $q, __env) { return { setNewUser: setNewUser }; function setNewUser(user) { var deferred = $q.defer(); $http.post(__env.serverUrl + 'users', {usr: user}).then(function (response) { deferred.resolve({ id_token: response.data.id_token }); }).catch(function (error) { console.log(error.data); alert("There was an error signing up: " + error.data); }); return deferred.promise; } }]) .controller('ExploreController', ['$uibModal', function ExploreController($uibModal) { var vm = this; }]) .directive('pslExplore', function pslExplore() { return { restrict: 'E', templateUrl: 'explore/pslExplore.tpl.html', controller: 'ExploreController', controllerAs: 'exploreCtrl', link: function (scope, element, attrs, exploreCtrl) { } }; }); })();
{ "content_hash": "7693f805f357279cbcec9bc16a871994", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 97, "avg_line_length": 27.333333333333332, "alnum_prop": 0.4093319194061506, "repo_name": "pasalo92/demowork", "id": "8f54e64d98025ca9609770d517c29b24b3dedad0", "size": "1886", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/explore/explore.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7233" }, { "name": "HTML", "bytes": "11958" }, { "name": "JavaScript", "bytes": "74865" }, { "name": "Smarty", "bytes": "798" } ], "symlink_target": "" }
<div class="main"> <!-- name --> <div class="section name"> <input class="superlong bold" type="text" name="attr_character_name" /> <h6>Hero Name</h6> <br /> <input class="superlong bold" type="text" name="attr_real_name" /> <h6>Real Name</h6> </div> <!-- logo --> <div class="section logo"> <span>Sheet Version 1.2</span> </div> <!-- resistances --> <div class="section resistances border"> <div class="flex-row"> <h2>RESISTANCES</h2> </div> <div class="res-grid"> <div><!-- empty div placeholder --></div> <span class="bold">MAX</span> <span class="bold">Current</span> <div><!-- empty div placeholder --></div> <span class="small">Notes</span> <div><span class="resistance bold">&nbsp;COMPOSURE&nbsp;&nbsp;</span> </div> <div> <input class="ref bold" type="number" name="attr_composure_max"> </div> <div> <input class="ref bold" type="number" name="attr_composure"> </div> <div> <button class="d6" type='roll' name='roll_composure' value='&{template:default} {{name=@{character_name} **Composure** check}} {{result=[[@{composure}d6]]}}'></button> </div> <div> <input class="notes" type="text" name="attr_composurenotes" /> </div> <div> <span class="resistance bold">&nbsp;FORTITUDE</span> </div> <div> <input class="ref bold" type="number" name="attr_fortitude_max"> </div> <div> <input class="ref bold" type="number" name="attr_fortitude"> </div> <div> <button class="d6" type='roll' name='roll_fortitude' value='&{template:default} {{name=@{character_name} **Fortitude** check}} {{result=[[@{fortitude}d6]]}}'></button> </div> <div> <input class="notes" type="text" name="attr_fortitudenotes" /> </div> <div><span class="resistance bold">&nbsp;REACTION</span> </div> <div> <input class="ref bold" type="number" name="attr_reaction_max"> </div> <div> <input class="ref bold" type="number" name="attr_reaction"> </div> <div> <button class="d6" type='roll' name='roll_reaction' value='&{template:default} {{name=@{character_name} **Reaction** check}} {{result=[[@{reaction}d6]]}}'></button> </div> <div> <input class="notes" type="text" name="attr_reactionnotes" /> </div> <div><span class="resistance bold">&nbsp;WILL</span> </div> <div> <input class="ref bold" type="number" name="attr_will_max"> </div> <div> <input class="ref bold" type="number" name="attr_will"> </div> <div> <button class="d6" type='roll' name='roll_will' value='&{template:default} {{name=@{character_name} **Will** check}} {{result=[[@{will}d6]]}}'></button> </div> <div> <input class="notes" type="text" name="attr_willnotes" /> </div> </div> <br /> <div class="flex-row"> <h2>INITIATIVE</h2> </div> <br /> <div class="initiative"> <input class="ref bold" type="number" name="attr_initiative"> <button name="roll_init" class="d6" type="roll" value="&{template:default} {{name=@{character_name} **Initiative** check}} {{result=[[@{initiative}d6 &{tracker}]]}}"></button> </div> </div> <!-- aptitudes --> <div class="section aptitudes border"> <h2>APTITUDES</h2> <label class="aptitudes"> <fieldset class="repeating_aptitudes"> <input class="medium bold" type="text" name="attr_aptitudename" /> <input class="ref bold" type="number" name="attr_aptitude"> <button class="d6" type='roll' name='roll_aptitude' value='&{template:default} {{name=@{character_name} **@{aptitudename} Check}} {{result=[[@{aptitude}d6k3]]}}'></button> <input class="longap" type="text" name="attr_aptitudenotes" /> </fieldset> </label> </div> <!-- advantages --> <div class="section advantages border"> <h2>ADVANTAGES</h2> <fieldset class="repeating_advantages"> <input class="mediumadv bold" type="text" name="attr_advantage" /> <input class="bold ref" name="attr_advantage-rank" type="number" value="1"/> <span class="bold">&nbsp;</span> <input class="notes" type="text" name="attr_advantagenotes" /> </fieldset> </div> <!-- disadvantages --> <div class="section disadvantages border"> <h2>DISADVANTAGES</h2> <fieldset class="repeating_disadvantages"> <input class="mediumdis bold" type="text" name="attr_disadvantage" /> <input class="bold ref" name="attr_disadvantage-rank" type="number" value="1"/> <span class="bold">&nbsp;</span> <input class="notes" type="text" name="attr_disadvantagenotes" /> </fieldset> </div> <!-- powers --> <div class="section powers border"> <h2>POWERS</h2> <label class="powers"> <fieldset class="repeating_powers"> <input class="mediumpow bold" type="text" name="attr_powernamename" /> <input class="ref bold" type="number" name="attr_power"> <button class="d6" type='roll' name='roll_power' value='&{template:default} {{name=@{character_name} **@{powernamename}** Check}} {{result=[[@{power}d6]]}}'></button> <input class="longpow" type="text" name="attr_powernotes" style="width:688px" /> </fieldset> </label> </div> <!-- competency dice --> <div class="section comp-dice flex-down flex-center border"> <h2>COMPETENCY DICE</h2> <br /> <br /> <div> <span class="bold">Competency Dice:</span> <input class="bold ref" type="number" name="attr_comp" />/<input class="bold ref" type="number" name="attr_comp_max" /> </div> <div> <span class="bold">Temp Comp Dice:</span> <input class="bold ref" type="number" name="attr_temp_comp" />/<input class="bold ref" type="number" name="attr_temp_comp_max" /> </div> </div> <!-- tracker --> <div class="section tracker"> <span>Use Alternative tracker</span> <input type="checkbox" class="sheet-tracker-switch" name="attr_tracker_switch" value="1"> <div class="def-tracker"></div> <div class="alt-tracker"></div> </div> <!-- alt tracker --> <!-- <div class="section alt-tracker"> --> <!-- cost --> <div class="section cost border flex-down flex-center"> <div class="flex-row"> <span class="bold">RESISTANCES</span> <input class="bold" type="number" name="attr_resist-cost" value="0" /> <span class="bold">+ APTITUDES </span> <input class="bold" type="number" name="attr_apt-cost" value="0" /> <span class="bold">+ POWERS</span> <input class="bold" type="number" name="attr_power-cost" value="0" /> <span class="bold">+ ADVANTAGES</span> <input class="bold" type="number" name="attr_adv-cost" value="0" /> <span class="bold">+ DISADVANTAGES</span> <input class="bold" type="number" name="attr_disadv-cost" value="0" /> </div> <div> <span class="bold">= TOTAL COST</span> <input class="bold ref" type="number" name="attr_cost" value="@{resist-cost}+@{apt-cost}+@{power-cost}+@{adv-cost}+@{disadv-cost}" disabled="true"/> </div> </div> </div> <!-- ROLL TEMPLATE --> <rolltemplate class="sheet-rolltemplate-black"> <div> <caption>{{name}}</caption> {{#allprops() }} <tr><td>{{key}}</td><td>{{value}}</td> {{/allprops() }} </div> </rolltemplate>
{ "content_hash": "288d173a30f6b7820913758c6e68f0fc", "timestamp": "", "source": "github", "line_count": 250, "max_line_length": 191, "avg_line_length": 34.56, "alnum_prop": 0.5077546296296296, "repo_name": "Polaris-Dust/roll20-character-sheets", "id": "4e78dad17d1722a37ab486c369737adc5570faf9", "size": "8640", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "Supers-Revised-Edition/supers-revised.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4874491" }, { "name": "HTML", "bytes": "47932600" }, { "name": "JavaScript", "bytes": "667160" }, { "name": "Makefile", "bytes": "50" } ], "symlink_target": "" }
package org.apache.struts2.convention.actions.result; import org.apache.struts2.convention.annotation.Result; import org.apache.struts2.convention.annotation.Results; /** * <p> * This is a test action with multiple results. * </p> */ @Results({ @Result(name="error", location="error.jsp", params={"key", "ann-value", "key1", "ann-value1"}), @Result(name="input", location="foo.action", type="redirectAction"), @Result(name="success", location="/WEB-INF/location/namespace/action-success.jsp"), @Result(name="failure", location="/WEB-INF/location/namespace/action-failure.jsp") }) public class ClassLevelResultsAction { public String execute() { return null; } }
{ "content_hash": "fcaa3a830ad57178a1924b2ca68105af", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 99, "avg_line_length": 31.818181818181817, "alnum_prop": 0.7, "repo_name": "WillJiang/WillJiang", "id": "67ca4ddcadd0107753edbc7fe45462ed231e324f", "size": "1588", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/plugins/convention/src/test/java/org/apache/struts2/convention/actions/result/ClassLevelResultsAction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ActionScript", "bytes": "22970" }, { "name": "CSS", "bytes": "74192" }, { "name": "Java", "bytes": "9706375" }, { "name": "JavaScript", "bytes": "4013533" }, { "name": "XSLT", "bytes": "8414" } ], "symlink_target": "" }
package org.geometerplus.fbreader.network.opds; import org.geometerplus.zlibrary.core.xml.ZLStringMap; import org.geometerplus.fbreader.network.atom.ATOMFeedHandler; abstract class AbstractOPDSFeedHandler implements ATOMFeedHandler<OPDSFeedMetadata,OPDSEntry>, OPDSConstants { public OPDSFeedMetadata createFeed(ZLStringMap attributes) { return new OPDSFeedMetadata(attributes); } public OPDSEntry createEntry(ZLStringMap attributes) { return new OPDSEntry(attributes); } public OPDSLink createLink(ZLStringMap attributes) { return new OPDSLink(attributes); } }
{ "content_hash": "25be0add75629a25c6b6b462c9172f04", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 110, "avg_line_length": 27.666666666666668, "alnum_prop": 0.8209982788296041, "repo_name": "zhqhzhqh/FbreaderJ", "id": "db13bf6d031ebafdbb1c670e158d83ff33f0bc2a", "size": "1382", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/org/geometerplus/fbreader/network/opds/AbstractOPDSFeedHandler.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "553" }, { "name": "C", "bytes": "613666" }, { "name": "C++", "bytes": "1059037" }, { "name": "CMake", "bytes": "13838" }, { "name": "CSS", "bytes": "13797" }, { "name": "HTML", "bytes": "374361" }, { "name": "Inno Setup", "bytes": "4312" }, { "name": "Java", "bytes": "2422129" }, { "name": "M4", "bytes": "4638" }, { "name": "Makefile", "bytes": "78973" }, { "name": "Module Management System", "bytes": "2080" }, { "name": "Objective-C", "bytes": "1741" }, { "name": "Roff", "bytes": "7895" }, { "name": "Shell", "bytes": "448355" } ], "symlink_target": "" }
from neutron.tests.tempest.api import base class BaseRouterTest(base.BaseAdminNetworkTest): # NOTE(salv-orlando): This class inherits from BaseAdminNetworkTest # as some router operations, such as enabling or disabling SNAT # require admin credentials by default def _cleanup_router(self, router): self.delete_router(router) self.routers.remove(router) def _create_router(self, name, admin_state_up=False, external_network_id=None, enable_snat=None): # associate a cleanup with created routers to avoid quota limits router = self.create_router(name, admin_state_up, external_network_id, enable_snat) self.addCleanup(self._cleanup_router, router) return router def _delete_router(self, router_id, network_client=None): client = network_client or self.client client.delete_router(router_id) # Asserting that the router is not found in the list # after deletion list_body = self.client.list_routers() routers_list = list() for router in list_body['routers']: routers_list.append(router['id']) self.assertNotIn(router_id, routers_list)
{ "content_hash": "e79b61497d566e346448b3f5aa64998b", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 72, "avg_line_length": 41.46666666666667, "alnum_prop": 0.655144694533762, "repo_name": "igor-toga/local-snat", "id": "8b0b5a477051b70151f956c5512214212294be72", "size": "1880", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "neutron/tests/tempest/api/base_routers.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1047" }, { "name": "Python", "bytes": "9636936" }, { "name": "Shell", "bytes": "14072" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <title>Login Page - Photon Admin Panel Theme</title> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=0, minimum-scale=1.0, maximum-scale=1.0"> <link rel="shortcut icon" href="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/favicon.ico" /> <link rel="apple-touch-icon" href="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/iosicon.png" /> <!-- DEVELOPMENT LESS --> <!-- <link rel="stylesheet/less" href="css/photon.less" media="all" /> <link rel="stylesheet/less" href="css/photon-responsive.less" media="all" /> --> <!-- PRODUCTION CSS --> <link rel="stylesheet" href="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/css/css_compiled/photon-min.css?v1.1" media="all" /> <link rel="stylesheet" href="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/css/css_compiled/photon-min-part2.css?v1.1" media="all" /> <link rel="stylesheet" href="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/css/css_compiled/photon-responsive-min.css?v1.1" media="all" /> <!--[if IE]> <link rel="stylesheet" type="text/css" href="css/css_compiled/ie-only-min.css?v1.1" /> <![endif]--> <!--[if lt IE 9]> <link rel="stylesheet" type="text/css" href="css/css_compiled/ie8-only-min.css?v1.1" /> <script type="text/javascript" src="js/plugins/excanvas.js"></script> <script type="text/javascript" src="js/plugins/html5shiv.js"></script> <script type="text/javascript" src="js/plugins/respond.min.js"></script> <script type="text/javascript" src="js/plugins/fixFontIcons.js"></script> <![endif]--> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jqueryui/1.10.0/jquery-ui.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/bootstrap/bootstrap.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/modernizr.custom.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.pnotify.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/less-1.3.1.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/xbreadcrumbs.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.maskedinput-1.3.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.autotab-1.1b.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/charCount.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.textareaCounter.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/elrte.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/elrte.en.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/select2.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery-picklist.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.validate.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/additional-methods.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.form.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.metadata.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.mockjax.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.uniform.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.tagsinput.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.rating.pack.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/farbtastic.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.timeentry.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.dataTables.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.jstree.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/dataTables.bootstrap.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.mousewheel.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.mCustomScrollbar.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.flot.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.flot.stack.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.flot.pie.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.flot.resize.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/raphael.2.1.0.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/justgage.1.0.1.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.qrcode.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.clock.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.countdown.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.jqtweet.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/jquery.cookie.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/bootstrap-fileupload.min.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/prettify/prettify.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/bootstrapSwitch.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/plugins/mfupload.js"></script> <script type="text/javascript" src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/js/common.js"></script> </head> <body class="body-login"> <div class="nav-fixed-topright" style="visibility: hidden"> <ul class="nav nav-user-menu"> <li class="user-sub-menu-container"> <a href="javascript:;"> <i class="user-icon"></i><span class="nav-user-selection">Theme Options</span><i class="icon-menu-arrow"></i> </a> <ul class="nav user-sub-menu"> <li class="light"> <a href="javascript:;"> <i class='icon-photon stop'></i>Light Version </a> </li> <li class="dark"> <a href="javascript:;"> <i class='icon-photon stop'></i>Dark Version </a> </li> </ul> </li> <li> <a href="javascript:;"> <i class="icon-photon mail"></i> </a> </li> <li> <a href="javascript:;"> <i class="icon-photon comment_alt2_stroke"></i> <div class="notification-count">12</div> </a> </li> </ul> </div> <script> $(function(){ setTimeout(function(){ $('.nav-fixed-topright').removeAttr('style'); }, 300); $(window).scroll(function(){ if($('.breadcrumb-container').length){ var scrollState = $(window).scrollTop(); if (scrollState > 0) $('.nav-fixed-topright').addClass('nav-released'); else $('.nav-fixed-topright').removeClass('nav-released') } }); $('.user-sub-menu-container').on('click', function(){ $(this).toggleClass('active-user-menu'); }); $('.user-sub-menu .light').on('click', function(){ if ($('body').is('.light-version')) return; $('body').addClass('light-version'); setTimeout(function() { $.cookie('themeColor', 'light', { expires: 7, path: '/' }); }, 500); }); $('.user-sub-menu .dark').on('click', function(){ if ($('body').is('.light-version')) { $('body').removeClass('light-version'); $.cookie('themeColor', 'dark', { expires: 7, path: '/' }); } }); }); </script> <div class="container-login"> <div class="form-centering-wrapper"> <div class="form-window-login"> <div class="form-window-login-logo"> <div class="login-logo"> <img src="http://photonui.orangehilldev.com/css/css_compiled/@%7BphotonImagePath%7Dplugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/images/photon/login-logo@2x.png" alt="Photon UI"/> </div> <h2 class="login-title">Welcome to Photon UI!</h2> <div class="login-member">Not a Member?&nbsp;<a href="less-1.3.1.min.js.html#">Sign Up &#187;</a> <a href="less-1.3.1.min.js.html#" class="btn btn-facebook"><i class="icon-fb"></i>Login with Facebook<i class="icon-fb-arrow"></i></a> </div> <div class="login-or">Or</div> <div class="login-input-area"> <form method="POST" action="dashboard.php"> <span class="help-block">Login With Your Photon Account</span> <input type="text" name="email" placeholder="Email"> <input type="password" name="password" placeholder="Password"> <button type="submit" class="btn btn-large btn-success btn-login">Login</button> </form> <a href="less-1.3.1.min.js.html#" class="forgot-pass">Forgot Your Password?</a> </div> </div> </div> </div> </div> <script type="text/javascript"> var _gaq = _gaq || []; _gaq.push(['_setAccount', 'UA-1936460-27']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })(); </script> </body> </html>
{ "content_hash": "827527d378a1e1dffee8fd796c3a474c", "timestamp": "", "source": "github", "line_count": 196, "max_line_length": 234, "avg_line_length": 83.91326530612245, "alnum_prop": 0.6859609655256278, "repo_name": "user-tony/photon-rails", "id": "e3456bc71c301700ed25133e1f48f32100ce71dd", "size": "16447", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/assets/css/css_compiled/@{photonImagePath}plugins/elrte/js/plugins/js/plugins/images/photon/js/plugins/less-1.3.1.min.js.html", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "291750913" }, { "name": "JavaScript", "bytes": "59305" }, { "name": "Ruby", "bytes": "203" }, { "name": "Shell", "bytes": "99" } ], "symlink_target": "" }
namespace Mosa.Compiler.Framework.IR { /// <summary> /// Loads a value from a memory pointer. /// </summary> /// <remarks> /// The load instruction is used to load a value from a memory pointer and an offset. The types must be compatible. /// </remarks> public sealed class LoadZeroExtended : ThreeOperandInstruction { #region Construction /// <summary> /// Initializes a new instance of <see cref="Load"/>. /// </summary> public LoadZeroExtended() { } #endregion Construction } }
{ "content_hash": "545e81007aec1d58826f0bc9bf89745a", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 116, "avg_line_length": 23.09090909090909, "alnum_prop": 0.6791338582677166, "repo_name": "kiootic/MOSA-Project", "id": "f3060f84b5edd159391e3aafdf0ed47c4638a687", "size": "576", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Mosa.Compiler.Framework/IR/LoadZeroExtended.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "3292" }, { "name": "C#", "bytes": "3851964" }, { "name": "Shell", "bytes": "2494" }, { "name": "Smalltalk", "bytes": "1560" } ], "symlink_target": "" }
require 'helper' require 'json' describe RoxClient::Client do include Capture::Helpers include FakeFS::SpecHelpers NO_SERVER_MSG = /no server/i PUBLISHING_DISABLED_MSG = /publishing disabled/i PRINTING_PAYLOAD_MSG = /printing payload/i CANNOT_SAVE_MSG = /cannot save payload/i SENDING_PAYLOAD_MSG = /sending payload/i LOCAL_MODE_MSG = /local mode/i DONE_MSG = /done/i UPLOAD_FAILED_MSG = /upload failed/i DUMPING_RESPONSE_MSG = /response body/i ALL_MESSAGES = [ NO_SERVER_MSG, PUBLISHING_DISABLED_MSG, PRINTING_PAYLOAD_MSG, CANNOT_SAVE_MSG, SENDING_PAYLOAD_MSG, LOCAL_MODE_MSG, DONE_MSG, UPLOAD_FAILED_MSG, DUMPING_RESPONSE_MSG ] API_URL = 'http://example.com/api' WORKSPACE = '/tmp' let(:cache_double){ double } let(:payload_to_h){ { 'foo' => 'bar' } } let(:payload_double){ double to_h: payload_to_h } let(:uid_double){ double load_uid: '42' } let(:run_double){ double :uid= => nil } let(:server_options){ { name: 'A server', api_url: API_URL, project_api_id: '0123456789', payload_options: {}, upload: nil } } let(:server){ double server_options } let(:client_options){ { publish: true, workspace: WORKSPACE, client_name: 'rspec' } } let(:client){ RoxClient::Client.new server, client_options } subject{ client } before :each do RoxClient::Cache.stub new: cache_double RoxClient::TestPayload.stub new: payload_double RoxClient::UID.stub new: uid_double end describe "when created" do subject{ RoxClient::Client } it "should create a cache manager" do expect(RoxClient::Cache).to receive(:new).with(workspace: WORKSPACE, client_name: 'rspec', server_name: server_options[:name], project_api_id: server_options[:project_api_id]) RoxClient::Client.new server, client_options end it "should not raise an error if the server is missing" do expect(RoxClient::Cache).to receive(:new).with(workspace: WORKSPACE, client_name: 'rspec') expect{ RoxClient::Client.new nil, client_options }.not_to raise_error end it "should create an uid manager" do expect(RoxClient::UID).to receive(:new).with(workspace: WORKSPACE) RoxClient::Client.new server, client_options end end it "should upload the results payload" do expect(RoxClient::TestPayload).to receive(:new).with(run_double) expect(payload_double).to receive(:to_h).with({}) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG end it "should set the test run uid" do expect(run_double).to receive(:uid=).with('42') expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG end describe "in local mode" do let(:client_options){ super().merge local_mode: true } it "should not upload results" do expect(server).not_to receive(:upload) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, LOCAL_MODE_MSG, DONE_MSG end end describe "with no server" do let(:server){ nil } it "should warn that there is no server" do expect_processed false, stderr: [ NO_SERVER_MSG ] end end describe "when the payload cannot be serialized" do before(:each){ payload_double.stub(:to_h).and_raise(RoxClient::PayloadError.new('bug')) } it "should output the error to stderr" do expect_processed false, stderr: [ 'bug' ] end end describe "when publishing is disabled" do let(:client_options){ super().merge publish: false } it "should not upload the payload" do expect(server).not_to receive(:upload) expect_processed false, PUBLISHING_DISABLED_MSG end end describe "when publishing fails due to a configuration error" do it "should output the error message to stderr" do server.stub(:upload).and_raise(RoxClient::Server::Error.new("bug")) expect_processed false, SENDING_PAYLOAD_MSG, API_URL, stderr: [ UPLOAD_FAILED_MSG, 'bug' ] end end describe "when publishing fails due to a server error" do it "should output the error message and response body to stderr" do server.stub(:upload).and_raise(RoxClient::Server::Error.new("bug", double(body: 'fubar'))) expect_processed false, SENDING_PAYLOAD_MSG, API_URL, stderr: [ UPLOAD_FAILED_MSG, 'bug', DUMPING_RESPONSE_MSG, 'fubar' ] end end describe "with payload caching enabled" do let(:client_options){ super().merge cache_payload: true } before :each do cache_double.stub load: cache_double, save: cache_double end it "should load the cache" do expect(cache_double).to receive(:load) expect(payload_double).to receive(:to_h).with(cache: cache_double) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG end it "should output any error to stderr and not save the cache" do cache_double.stub(:load).and_raise(RoxClient::Cache::Error.new('bug')) expect(cache_double).not_to receive(:save) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG, stderr: [ 'bug' ] end it "should not save the cache if publishing fails" do server.stub(:upload).and_raise(RoxClient::Server::Error.new("bug")) expect(cache_double).to receive(:load) expect(cache_double).not_to receive(:save) expect_processed false, SENDING_PAYLOAD_MSG, API_URL, stderr: [ UPLOAD_FAILED_MSG, 'bug' ] end end describe "with payload printing enabled" do let(:client_options){ super().merge print_payload: true } it "should print the payload" do expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG, PRINTING_PAYLOAD_MSG, JSON.pretty_generate(payload_to_h) end it "should use inspect if the payload can't be pretty-printed" do JSON.stub(:pretty_generate).and_raise(StandardError.new('bug')) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG, PRINTING_PAYLOAD_MSG, payload_to_h.inspect end end describe "with payload saving enabled" do let(:client_options){ super().merge save_payload: true } let(:payload_file){ File.join WORKSPACE, 'rspec', 'servers', server_options[:name], 'payload.json' } it "should save the payload" do FileUtils.mkdir_p File.dirname(payload_file) expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG expect_payload_to_be_saved end it "should create the workspace directory" do expect_processed true, SENDING_PAYLOAD_MSG, API_URL, DONE_MSG expect(File.directory?(File.dirname(payload_file))).to be_true expect_payload_to_be_saved end def expect_payload_to_be_saved expect(File.read(payload_file)).to eq(Oj.dump(payload_to_h, mode: :strict)) end end def expect_processed result, *args options = args.last.kind_of?(Hash) ? args.pop : {} messages = args warnings = options[:stderr] || [] capture_process.tap do |c| expect(c.result).to be(result) if messages.empty? expect(c.stdout.strip).to eq('') else messages.each{ |m| expect(c.stdout).to match(m) } end if warnings.empty? expect(c.stderr).to eq('') else warnings.each{ |m| expect(c.stderr).to match(m) } end ensure_no_match c.output, *(ALL_MESSAGES - messages - warnings) yield c if block_given? end end def ensure_no_match string, *matches matches.each{ |m| expect(string).not_to match(m) } end def capture_process capture{ client.process run_double } end end
{ "content_hash": "fa8be11d964cec344420e18ea3391bd2", "timestamp": "", "source": "github", "line_count": 210, "max_line_length": 181, "avg_line_length": 35.357142857142854, "alnum_prop": 0.6797306397306397, "repo_name": "lotaris/rox-client-ruby", "id": "2701b0419a0e5c9a5c4f17edc9cbfaf0d7759123", "size": "7425", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/client_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "78961" } ], "symlink_target": "" }
import decimal import datetime import logging from pyramid.renderers import JSON from pyramid.events import NewRequest logger = logging.getLogger('seth.tenancy') class ValidationError(Exception): pass def _register_resource(config, view, path, *args, **kwargs): route_name = getattr(view, '__qualname__', view.__name__) attr = 'dispatch' renderer = kwargs.pop('renderer', 'json') web = kwargs.pop('web', False) config.add_route(route_name, path) if not web: config.add_view( view, route_name=route_name, attr=attr, *args, renderer=renderer, **kwargs ) else: # if this is a web resource we optionally register json renderer # which renders context as json object if not renderer == 'json': config.add_view( view, route_name=route_name, attr=attr, *args, renderer=renderer, accept="text/html", **kwargs ) config.add_view( view, route_name=route_name, attr=attr, *args, renderer='json', accept="application/json", **kwargs ) def _register_export(config, view, path, *args, **kwargs): route_name = getattr(view, '__qualname__', view.__name__) config.add_route(route_name, path) config.add_view( view, route_name=route_name, attr='get', *args, renderer='json', **kwargs ) for renderer in ['pdf', 'csv']: if path.endswith('/'): path_ = '{0}{1}/'.format(path, renderer) else: path_ = '{0}/{1}/'.format(path, renderer) route_name_ = "{0}_{1}".format(route_name, renderer) config.add_route(route_name_, path_) config.add_view( view, route_name=route_name_, attr=renderer, *args, renderer=renderer, **kwargs ) def get_adapted_json_renderer(): json_renderer = JSON() def datetime_adapter(obj, request): return obj.isoformat() def decimal_adapter(obj, request): return str(obj) json_renderer.add_adapter(datetime.datetime, datetime_adapter) json_renderer.add_adapter(datetime.date, datetime_adapter) json_renderer.add_adapter(datetime.time, datetime_adapter) json_renderer.add_adapter(decimal.Decimal, decimal_adapter) return json_renderer def _register_query_listener(config, engine, threshold=10): from seth.ext.sa import setup_query_listener setup_query_listener(engine, threshold) def _register_tenancy(config, TenantModel): from seth import db from seth import tenancy tenancy.Meta.TenantModel = TenantModel session = db.get_session() dialect = session.connection().engine.url.get_dialect() if dialect.name in tenancy.supported_dialects: config.add_subscriber(tenancy.set_search_path, NewRequest) else: msg = 'Cannot register tenancy. Dialect: {0} is not supported'.format( dialect.name ) logger.error(msg) raise RuntimeError(msg)
{ "content_hash": "6b8972f63054956981dee3355f1b4c6d", "timestamp": "", "source": "github", "line_count": 112, "max_line_length": 78, "avg_line_length": 27.348214285714285, "alnum_prop": 0.6173685928827947, "repo_name": "jnosal/seth", "id": "71007895cc58768979df1eea8bf8de304d900ea7", "size": "3063", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "seth/core.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "155107" } ], "symlink_target": "" }
package blue.lapis.pore.ap.event; import static javax.tools.Diagnostic.Kind.ERROR; import static javax.tools.Diagnostic.Kind.NOTE; import static javax.tools.Diagnostic.Kind.WARNING; import com.google.common.collect.Sets; import com.google.common.io.CharStreams; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.Iterator; import java.util.Set; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.SupportedAnnotationTypes; import javax.annotation.processing.SupportedSourceVersion; import javax.lang.model.SourceVersion; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.QualifiedNameable; import javax.lang.model.element.TypeElement; import javax.tools.FileObject; import javax.tools.StandardLocation; @SupportedAnnotationTypes("blue.lapis.pore.event.RegisterEvent") @SupportedSourceVersion(SourceVersion.RELEASE_8) public class EventProcessor extends AbstractProcessor { private final Set<String> events = Sets.newHashSet(); @Override public synchronized void init(ProcessingEnvironment processingEnv) { super.init(processingEnv); try { FileObject file = processingEnv.getFiler() .getResource(StandardLocation.CLASS_OUTPUT, "blue.lapis.pore.event", "events.txt"); try (Reader reader = file.openReader(false)) { events.addAll(CharStreams.readLines(reader)); } processingEnv.getMessager().printMessage(NOTE, "Found " + events.size() + " events in " + file.getName()); } catch (IOException ignored) { } } @Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { try { FileObject file = processingEnv.getFiler() .createResource(StandardLocation.CLASS_OUTPUT, "blue.lapis.pore.event", "events.txt"); Iterator<String> itr = events.iterator(); while (itr.hasNext()) { String event = itr.next(); int pos = event.lastIndexOf(':'); if (pos >= 0) { if (!validateMethod(event.substring(0, pos), event.substring(pos + 1), annotations)) { itr.remove(); } } else { if (!validateClass(event, annotations)) { itr.remove(); } } } processingEnv.getMessager().printMessage(NOTE, "Writing " + events.size() + " events to " + file.getName()); try (Writer w = file.openWriter()) { for (String event : events) { w.write(event); w.write('\n'); } } } catch (IOException e) { processingEnv.getMessager().printMessage(ERROR, "Failed to write events to events.txt"); e.printStackTrace(); } return false; } for (TypeElement anno : annotations) { for (Element element : roundEnv.getElementsAnnotatedWith(anno)) { this.events.add(getQualifiedName(element)); } } return true; } private boolean validateClass(String name, Set<? extends TypeElement> annotations) { name = name.replace('$', '.'); // Mirror API sucks, replace inner class signs TypeElement element = processingEnv.getElementUtils().getTypeElement(name); if (element != null) { return element.getAnnotationMirrors().containsAll(annotations); } else { processingEnv.getMessager().printMessage(WARNING, "Skipping unknown class: " + name); return false; } } private boolean validateMethod(String owner, String name, Set<? extends TypeElement> annotations) { TypeElement element = processingEnv.getElementUtils().getTypeElement(owner); if (element != null) { for (Element enclosed : element.getEnclosedElements()) { if (enclosed.getKind() == ElementKind.METHOD && enclosed.getSimpleName().toString().equals(name) && enclosed.getAnnotationMirrors().containsAll(annotations)) { return true; } } } else { processingEnv.getMessager().printMessage(WARNING, "Skipping unknown method owner: " + owner); } return false; } private static String getQualifiedName(Element element) { switch (element.getKind()) { case CLASS: // Mirror API sucks, return proper qualified name for inner classes if (element.getEnclosingElement().getKind() == ElementKind.CLASS) { return getQualifiedName(element.getEnclosingElement()) + '$' + element.getSimpleName(); } else { return ((QualifiedNameable) element).getQualifiedName().toString(); } case METHOD: return getQualifiedName(element.getEnclosingElement()) + ':' + element.getSimpleName().toString(); default: throw new UnsupportedOperationException(element.getClass().toString()); } } }
{ "content_hash": "06caced8d07b53d2c3aa5d60df85294d", "timestamp": "", "source": "github", "line_count": 144, "max_line_length": 118, "avg_line_length": 39.1875, "alnum_prop": 0.6005670742512847, "repo_name": "LapisBlue/Pore", "id": "b6bb8cd226a37fe6b99a7472dfdcfefaf457b4ea", "size": "6819", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ap/src/main/java/blue/lapis/pore/ap/event/EventProcessor.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "1396298" }, { "name": "Shell", "bytes": "4381" } ], "symlink_target": "" }
<?php namespace AvalancheDevelopment\Approach\Builder; use AvalancheDevelopment\Approach\SchemaObjectFactory; use PHPUnit_Framework_TestCase; use Psr\Log\NullLogger; class AbstractBuilderTest extends PHPUnit_Framework_TestCase { public function testConstructSetsFactories() { $schemaObjectFactory = $this->createMock(SchemaObjectFactory::class); $abstractBuilder = $this->getMockBuilder(AbstractBuilder::class) ->setConstructorArgs([ $schemaObjectFactory ]) ->getMock(); $this->assertAttributeSame($schemaObjectFactory, 'schemaObjectFactory', $abstractBuilder); } public function testConstructSetsNullLogger() { $schemaObjectFactory = $this->createMock(SchemaObjectFactory::class); $abstractBuilder = $this->getMockBuilder(AbstractBuilder::class) ->setConstructorArgs([ $schemaObjectFactory ]) ->getMock(); $this->assertAttributeInstanceOf(NullLogger::class, 'logger', $abstractBuilder); } }
{ "content_hash": "0fbce643e5a1776bbe6fa427d84fd583", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 98, "avg_line_length": 30.939393939393938, "alnum_prop": 0.713026444662096, "repo_name": "avalanche-development/approach", "id": "77c3a51cfbd2f3a492616e3a232283f609eb42d7", "size": "1021", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/unit/src/Builder/AbstractBuilderTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "298124" }, { "name": "Shell", "bytes": "240" } ], "symlink_target": "" }
#include "client/async_log_client.h" #include <algorithm> #include <event2/http.h> #include <glog/logging.h> #include <iterator> #include <memory> #include "log/cert.h" #include "proto/serializer.h" #include "util/json_wrapper.h" using cert_trans::AsyncLogClient; using cert_trans::Cert; using cert_trans::CertChain; using cert_trans::PreCertChain; using cert_trans::URL; using cert_trans::UrlFetcher; using ct::DigitallySigned; using ct::MerkleAuditProof; using ct::SignedCertificateTimestamp; using ct::SignedTreeHead; using std::back_inserter; using std::bind; using std::move; using std::placeholders::_1; using std::string; using std::to_string; using std::unique_ptr; using std::vector; namespace { string UriEncode(const string& input) { const unique_ptr<char, void (*)(void*)> output( evhttp_uriencode(input.data(), input.size(), false), &free); return output.get(); } // Do some common checks, calls the callback with the appropriate // error if something is wrong. bool SanityCheck(UrlFetcher::Response* resp, const AsyncLogClient::Callback& done, util::Task* task) { // TODO(pphaneuf): We should report errors better. The easiest way // would be for this to use util::Task as well, so it could simply // pass on the status. if (!task->status().ok() || resp->status_code != HTTP_OK) { done(AsyncLogClient::UNKNOWN_ERROR); return false; } return true; } void DoneGetSTH(UrlFetcher::Response* resp, SignedTreeHead* sth, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); LOG_IF(INFO, !task->status().ok()) << "DoneGetSTH: " << task->status(); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonInt tree_size(jresponse, "tree_size"); if (!tree_size.Ok() || tree_size.Value() < 0) return done(AsyncLogClient::BAD_RESPONSE); JsonInt timestamp(jresponse, "timestamp"); if (!timestamp.Ok() || timestamp.Value() < 0) return done(AsyncLogClient::BAD_RESPONSE); JsonString root_hash(jresponse, "sha256_root_hash"); if (!root_hash.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonString jsignature(jresponse, "tree_head_signature"); if (!jsignature.Ok()) return done(AsyncLogClient::BAD_RESPONSE); DigitallySigned signature; if (Deserializer::DeserializeDigitallySigned(jsignature.FromBase64(), &signature) != Deserializer::OK) return done(AsyncLogClient::BAD_RESPONSE); sth->Clear(); sth->set_version(ct::V1); sth->set_tree_size(tree_size.Value()); sth->set_timestamp(timestamp.Value()); sth->set_sha256_root_hash(root_hash.FromBase64()); sth->mutable_signature()->CopyFrom(signature); return done(AsyncLogClient::OK); } void DoneGetRoots(UrlFetcher::Response* resp, vector<unique_ptr<Cert>>* roots, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonArray jroots(jresponse, "certificates"); if (!jroots.Ok()) return done(AsyncLogClient::BAD_RESPONSE); vector<unique_ptr<Cert>> retval; for (int i = 0; i < jroots.Length(); ++i) { JsonString jcert(jroots, i); if (!jcert.Ok()) return done(AsyncLogClient::BAD_RESPONSE); unique_ptr<Cert> cert(new Cert); const util::Status status(cert->LoadFromDerString(jcert.FromBase64())); if (!status.ok()) { return done(AsyncLogClient::BAD_RESPONSE); } retval.push_back(move(cert)); } roots->swap(retval); return done(AsyncLogClient::OK); } void DoneGetEntries(UrlFetcher::Response* resp, vector<AsyncLogClient::Entry>* entries, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonArray jentries(jresponse, "entries"); if (!jentries.Ok()) return done(AsyncLogClient::BAD_RESPONSE); vector<AsyncLogClient::Entry> new_entries; new_entries.reserve(jentries.Length()); for (int n = 0; n < jentries.Length(); ++n) { JsonObject entry(jentries, n); if (!entry.Ok()) { return done(AsyncLogClient::BAD_RESPONSE); } JsonString leaf_input(entry, "leaf_input"); if (!leaf_input.Ok()) { return done(AsyncLogClient::BAD_RESPONSE); } AsyncLogClient::Entry log_entry; if (Deserializer::DeserializeMerkleTreeLeaf(leaf_input.FromBase64(), &log_entry.leaf) != Deserializer::OK) { return done(AsyncLogClient::BAD_RESPONSE); } JsonString extra_data(entry, "extra_data"); if (!extra_data.Ok()) { return done(AsyncLogClient::BAD_RESPONSE); } // This is an optional non-standard extension, used only by the log // internally when running in clustered mode. JsonString sct_data(entry, "sct"); if (sct_data.Ok()) { unique_ptr<SignedCertificateTimestamp> sct( new SignedCertificateTimestamp); if (Deserializer::DeserializeSCT(sct_data.FromBase64(), sct.get()) != Deserializer::OK) { return done(AsyncLogClient::BAD_RESPONSE); } log_entry.sct.reset(sct.release()); } if (log_entry.leaf.timestamped_entry().entry_type() == ct::X509_ENTRY) { Deserializer::DeserializeX509Chain(extra_data.FromBase64(), log_entry.entry.mutable_x509_entry()); } else if (log_entry.leaf.timestamped_entry().entry_type() == ct::PRECERT_ENTRY) { Deserializer::DeserializePrecertChainEntry( extra_data.FromBase64(), log_entry.entry.mutable_precert_entry()); } else { LOG(FATAL) << "Don't understand entry type: " << log_entry.leaf.timestamped_entry().entry_type(); } new_entries.emplace_back(move(log_entry)); } entries->reserve(entries->size() + new_entries.size()); move(new_entries.begin(), new_entries.end(), back_inserter(*entries)); return done(AsyncLogClient::OK); } void DoneQueryInclusionProof(UrlFetcher::Response* resp, const SignedTreeHead& sth, MerkleAuditProof* proof, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonInt leaf_index(jresponse, "leaf_index"); if (!leaf_index.Ok() || leaf_index.Value() < 0) return done(AsyncLogClient::BAD_RESPONSE); JsonArray audit_path(jresponse, "audit_path"); if (!audit_path.Ok()) return done(AsyncLogClient::BAD_RESPONSE); vector<string> path_nodes; for (int n = 0; n < audit_path.Length(); ++n) { JsonString path_node(audit_path, n); CHECK(path_node.Ok()); path_nodes.push_back(path_node.FromBase64()); } proof->Clear(); proof->set_version(ct::V1); proof->set_tree_size(sth.tree_size()); proof->set_timestamp(sth.timestamp()); proof->mutable_tree_head_signature()->CopyFrom(sth.signature()); proof->set_leaf_index(leaf_index.Value()); for (vector<string>::const_iterator it = path_nodes.begin(); it != path_nodes.end(); ++it) { proof->add_path_node(*it); } return done(AsyncLogClient::OK); } void DoneGetSTHConsistency(UrlFetcher::Response* resp, vector<string>* proof, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonArray jproof(jresponse, "consistency"); if (!jproof.Ok()) return done(AsyncLogClient::BAD_RESPONSE); vector<string> entries; for (int i = 0; i < jproof.Length(); ++i) { JsonString entry(jproof, i); if (!entry.Ok()) return done(AsyncLogClient::BAD_RESPONSE); entries.push_back(entry.FromBase64()); } proof->reserve(proof->size() + entries.size()); move(entries.begin(), entries.end(), back_inserter(*proof)); return done(AsyncLogClient::OK); } void DoneInternalAddChain(UrlFetcher::Response* resp, SignedCertificateTimestamp* sct, const AsyncLogClient::Callback& done, util::Task* task) { unique_ptr<UrlFetcher::Response> resp_deleter(CHECK_NOTNULL(resp)); unique_ptr<util::Task> task_deleter(CHECK_NOTNULL(task)); if (!SanityCheck(resp, done, task)) { return; } JsonObject jresponse(resp->body); if (!jresponse.Ok()) return done(AsyncLogClient::BAD_RESPONSE); if (!jresponse.IsType(json_type_object)) return done(AsyncLogClient::BAD_RESPONSE); JsonString id(jresponse, "id"); if (!id.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonInt timestamp(jresponse, "timestamp"); if (!timestamp.Ok() || timestamp.Value() < 0) return done(AsyncLogClient::BAD_RESPONSE); JsonString extensions(jresponse, "extensions"); if (!extensions.Ok()) return done(AsyncLogClient::BAD_RESPONSE); JsonString jsignature(jresponse, "signature"); if (!jsignature.Ok()) return done(AsyncLogClient::BAD_RESPONSE); DigitallySigned signature; if (Deserializer::DeserializeDigitallySigned(jsignature.FromBase64(), &signature) != Deserializer::OK) return done(AsyncLogClient::BAD_RESPONSE); sct->Clear(); sct->set_version(ct::V1); sct->mutable_id()->set_key_id(id.FromBase64()); sct->set_timestamp(timestamp.Value()); sct->set_extensions(extensions.FromBase64()); sct->mutable_signature()->CopyFrom(signature); return done(AsyncLogClient::OK); } URL NormalizeURL(const string& server_url) { URL retval(server_url); string newpath(retval.Path()); if (newpath.empty() || newpath.back() != '/') newpath.append("/"); newpath.append("ct/v1/"); retval.SetPath(newpath); return retval; } } // namespace namespace cert_trans { AsyncLogClient::AsyncLogClient(util::Executor* const executor, UrlFetcher* fetcher, const string& server_url) : executor_(CHECK_NOTNULL(executor)), fetcher_(CHECK_NOTNULL(fetcher)), server_url_(NormalizeURL(server_url)) { } void AsyncLogClient::GetSTH(SignedTreeHead* sth, const Callback& done) { UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(GetURL("get-sth"), resp, new util::Task(bind(DoneGetSTH, resp, sth, done, _1), executor_)); } void AsyncLogClient::GetRoots(vector<unique_ptr<Cert>>* roots, const Callback& done) { UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(GetURL("get-roots"), resp, new util::Task(bind(DoneGetRoots, resp, roots, done, _1), executor_)); } void AsyncLogClient::GetEntries(int first, int last, vector<Entry>* entries, const Callback& done) { return InternalGetEntries(first, last, entries, false /* request_scts */, done); } void AsyncLogClient::GetEntriesAndSCTs(int first, int last, vector<Entry>* entries, const Callback& done) { return InternalGetEntries(first, last, entries, true /* request_scts */, done); } void AsyncLogClient::InternalGetEntries(int first, int last, vector<Entry>* entries, bool request_scts, const Callback& done) { CHECK_GE(first, 0); CHECK_GE(last, 0); if (last < first) { done(INVALID_INPUT); return; } URL url(GetURL("get-entries")); url.SetQuery("start=" + to_string(first) + "&end=" + to_string(last) + (request_scts ? "&include_scts=true" : "")); UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(url, resp, new util::Task(bind(DoneGetEntries, resp, entries, done, _1), executor_)); } void AsyncLogClient::QueryInclusionProof(const SignedTreeHead& sth, const std::string& merkle_leaf_hash, MerkleAuditProof* proof, const Callback& done) { CHECK_GE(sth.tree_size(), 0); URL url(GetURL("get-proof-by-hash")); url.SetQuery("hash=" + UriEncode(util::ToBase64(merkle_leaf_hash)) + "&tree_size=" + to_string(sth.tree_size())); UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(url, resp, new util::Task(bind(DoneQueryInclusionProof, resp, sth, proof, done, _1), executor_)); } void AsyncLogClient::GetSTHConsistency(int64_t first, int64_t second, vector<string>* proof, const Callback& done) { CHECK_GE(first, 0); CHECK_GE(second, 0); URL url(GetURL("get-sth-consistency")); url.SetQuery("first=" + to_string(first) + "&second=" + to_string(second)); UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(url, resp, new util::Task(bind(DoneGetSTHConsistency, resp, proof, done, _1), executor_)); } void AsyncLogClient::AddCertChain(const CertChain& cert_chain, SignedCertificateTimestamp* sct, const Callback& done) { InternalAddChain(cert_chain, sct, false, done); } void AsyncLogClient::AddPreCertChain(const PreCertChain& pre_cert_chain, SignedCertificateTimestamp* sct, const Callback& done) { InternalAddChain(pre_cert_chain, sct, true, done); } URL AsyncLogClient::GetURL(const std::string& subpath) const { URL retval(server_url_); CHECK(!retval.Path().empty()); CHECK_EQ(retval.Path().back(), '/'); retval.SetPath(retval.Path() + subpath); return retval; } void AsyncLogClient::InternalAddChain(const CertChain& cert_chain, SignedCertificateTimestamp* sct, bool pre_cert, const Callback& done) { if (!cert_chain.IsLoaded()) return done(INVALID_INPUT); JsonArray jchain; for (size_t n = 0; n < cert_chain.Length(); ++n) { string cert; CHECK_EQ(util::Status::OK, cert_chain.CertAt(n)->DerEncoding(&cert)); jchain.AddBase64(cert); } JsonObject jsend; jsend.Add("chain", jchain); UrlFetcher::Request req(GetURL(pre_cert ? "add-pre-chain" : "add-chain")); req.verb = UrlFetcher::Verb::POST; req.body = jsend.ToString(); UrlFetcher::Response* const resp(new UrlFetcher::Response); fetcher_->Fetch(req, resp, new util::Task(bind(DoneInternalAddChain, resp, sct, done, _1), executor_)); } } // namespace cert_trans
{ "content_hash": "45720e72c1ebaf13dc12e4fd57ac1a12", "timestamp": "", "source": "github", "line_count": 519, "max_line_length": 79, "avg_line_length": 31.44508670520231, "alnum_prop": 0.617953431372549, "repo_name": "aeijdenberg/certificate-transparency", "id": "d010bd87f8fd21b5a85076e241f09e6e225dd09f", "size": "16320", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cpp/client/async_log_client.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2898" }, { "name": "C++", "bytes": "1365968" }, { "name": "Go", "bytes": "463212" }, { "name": "HTML", "bytes": "1195" }, { "name": "Java", "bytes": "106756" }, { "name": "Makefile", "bytes": "3207" }, { "name": "Protocol Buffer", "bytes": "23795" }, { "name": "Python", "bytes": "721746" }, { "name": "Shell", "bytes": "60588" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.OpenGI</groupId> <artifactId>AWTomation</artifactId> <version>1.0-SNAPSHOT</version> <packaging>jar</packaging> <build> <resources> <resource> <targetPath>META-INF</targetPath> <directory>src</directory> <includes> <include>jax-ws-catalog.xml</include> <include>wsdl/**</include> </includes> </resource> <resource> <directory>src/main/resources</directory> </resource> </resources> <plugins> <plugin> <groupId>org.jvnet.jax-ws-commons</groupId> <artifactId>jaxws-maven-plugin</artifactId> <version>2.3</version> <executions> <execution> <goals> <goal>wsimport</goal> </goals> <configuration> <wsdlFiles> <wsdlFile>localhost_1970/RemoteService/RemoteControl.wsdl</wsdlFile> </wsdlFiles> <packageName>com.opengi.awtomation.client</packageName> <wsdlLocation>http://localhost:1970/RemoteService/RemoteControl?wsdl</wsdlLocation> <staleFile>${project.build.directory}/jaxws/stale/RemoteControl.stale</staleFile> </configuration> <id>wsimport-generate-RemoteControl</id> <phase>generate-sources</phase> </execution> </executions> <dependencies> <dependency> <groupId>javax.xml</groupId> <artifactId>webservices-api</artifactId> <version>2.0</version> </dependency> </dependencies> <configuration> <sourceDestDir>${project.build.directory}/generated-sources/jaxws-wsimport</sourceDestDir> <xnocompile>true</xnocompile> <verbose>true</verbose> <extension>true</extension> <catalog>${basedir}/src/jax-ws-catalog.xml</catalog> <!-- Needed with JAXP 1.5 --> <vmArgs> <vmArg>-Djavax.xml.accessExternalSchema=all</vmArg> </vmArgs> </configuration> </plugin> </plugins> </build> <dependencies> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.12</version> </dependency> <dependency> <groupId>org.jmock</groupId> <artifactId>jmock-junit4</artifactId> <version>2.8.1</version> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>1.10.19</version> </dependency> </dependencies> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <maven.compiler.source>1.7</maven.compiler.source> <maven.compiler.target>1.7</maven.compiler.target> </properties> </project>
{ "content_hash": "97ae7782fa81a3d1a144c6b88aa66d6e", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 204, "avg_line_length": 40.91208791208791, "alnum_prop": 0.5020145044319098, "repo_name": "computamike/SwingDriver", "id": "3763a9d25d34ca25e2dc43f8fc256705260ae0ec", "size": "3723", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AWTomation/pom.xml", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "48019" }, { "name": "Java", "bytes": "102619" }, { "name": "PowerShell", "bytes": "3137" } ], "symlink_target": "" }
<?php // src/Acme/TestBundle/AcmeTestBundle.php namespace Acme\TestBundle; use Symfony\Component\HttpKernel\Bundle\Bundle; class AcmeTestBundle extends Bundle { }
{ "content_hash": "1487fbe558c98cf413e80ddee37b1098", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 47, "avg_line_length": 16.5, "alnum_prop": 0.806060606060606, "repo_name": "oganinalex/newtask", "id": "7dc014d8d8834925d1ff407bc8f2769de6575aa8", "size": "165", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Acme/TestBundle/AcmeTestBundle.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "58516" }, { "name": "PHP", "bytes": "83769" } ], "symlink_target": "" }
package com.percolate.sdk.rxjava.request.translate; import com.percolate.sdk.api.config.Endpoints; import com.percolate.sdk.dto.Translation; import java.util.Map; import retrofit2.http.GET; import retrofit2.http.QueryMap; import rx.Observable; /** * Percolate v5/translate API definition. */ interface TranslateServiceRx { @GET(Endpoints.API_V5_PATH + "/translate/") Observable<Translation> get(@QueryMap Map<String, Object> params); }
{ "content_hash": "15836822ecd8ac9838d2e99544cb6d90", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 70, "avg_line_length": 23.736842105263158, "alnum_prop": 0.7649667405764967, "repo_name": "percolate/percolate-java-sdk", "id": "7ac81399ac3510c489a0a42223e672ccf9715721", "size": "451", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rxjava/src/main/java/com/percolate/sdk/rxjava/request/translate/TranslateServiceRx.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "1517508" }, { "name": "Kotlin", "bytes": "73015" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "5e843189b2165eea442c06ae0f847f26", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "004b4b5208f9bfcf1a83888600231953077d6fe0", "size": "210", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Bacteria/Cyanobacteria/Oscillatoriales/Oscillatoriaceae/Lyngbya/Lyngbya brasiliensis/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
Enabling SSL on original client daemon ====================================== By default, Exclusivecoin allows JSON-RPC commands to be sent to http://localhost:17171 and accepts connections only from the localhost. JSON-RPC Over SSL Setup ----------------------- It can be configured to allow HTTPS connections. You must follow the steps below for this to work properly. 1. Setup a certificate and private key. A self signed certificate will work. * cd ~/.exclusivecoin * openssl genrsa -out server.pem 4096 * openssl req -new -x509 -nodes -sha1 -days 3650 -key server.pem > server.cert (NOTE: you should NOT enter a passphrase) 2. Configure NeosCoin to use SSL * Stop your current exclusivecoind or exclusivecoin-qt * Edit the exclusivecoin.conf and add rpcssl=1 3. Restart Exclusivecoin to make these changes take effect. 4. Optionally you can test SSL functionality using the openssl s_client command * openssl s_client -connect localhost:15004
{ "content_hash": "731ad4a0dbd2005a94c06716d7aa7ad4", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 87, "avg_line_length": 39.56, "alnum_prop": 0.7108190091001011, "repo_name": "exclfork/ExclusiveCoin", "id": "7745fdfcbe7cec34a50dfd043901869da4bb2fa6", "size": "989", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/ssl.md", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1325906" }, { "name": "C++", "bytes": "4743406" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50635" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "18274" }, { "name": "Makefile", "bytes": "16767" }, { "name": "NSIS", "bytes": "5922" }, { "name": "Objective-C", "bytes": "858" }, { "name": "Objective-C++", "bytes": "5844" }, { "name": "Python", "bytes": "89065" }, { "name": "QMake", "bytes": "58003" }, { "name": "Shell", "bytes": "348283" } ], "symlink_target": "" }
EasySerialProtocol::EasySerialProtocol() { _serial = NULL; } EasySerialProtocol::~EasySerialProtocol() { // stop(); // _serial->flush(true, true); // in, out // _serial->close(); } void EasySerialProtocol::setSerial(Stream &serial) { _serial = &serial; // setup(&serial); // threaded serial // start(); // _serial->flush(true, true); // in, out } void EasySerialProtocol::sendPacket(void *data, size_t sizeofData, int dataId) { serialUnion32* sendBuffer = (serialUnion32*)data; uint8_t checkSum = 0x00; int packetSize = sizeofData/sizeof(serialUnion32); sendByte((uint8_t)STARTBYTE, false); // disable escape sequence sendByte((uint8_t)dataId); sendByte((uint8_t)sizeofData); for (int i=0; i<packetSize; i++) { for (int j=sizeof(serialUnion32)-1; j>=0; j--) { // MSB First sendByte(sendBuffer[i].b[j]); checkSum += sendBuffer[i].b[j]; } } sendByte((uint8_t)checkSum); } void EasySerialProtocol::readPacket() { if (getAvailableSize() <= 0) return; static packetStruct buffer = {0, 0, {0}}; static serialState state = byteStart; static uint8_t checkSum = 0; static bool bUnderParsing = false; static int count = 0; while (getAvailableSize()) { uint16_t buff = readByte(); uint8_t data = 0; if (buff == 0xFFFF) { #ifdef DEBUG_PRINT Serial.println("wait for next data of escape byte"); #endif break; } else { data = (uint8_t)(buff & 0x00FF); } if (!bUnderParsing) { for (int i=0; i<MAX_BODY_SIZE; i++) buffer.body[i].l = 0; buffer.id = 0; buffer.size = 0; checkSum = 0; count = 0; bUnderParsing = true; state = byteStart; } switch(state) { case byteStart: { if (data == (uint8_t)STARTBYTE) { state = bytePacketId; } else { state = byteStart; bUnderParsing = false; #ifdef DEBUG_PRINT Serial.print("invalid start byte!! "); Serial.println(data, HEX); #endif } break; } case bytePacketId: { buffer.id = (int)data; state = bytePacketSize; break; } case bytePacketSize: { if ((int)data > MAX_BODY_SIZE) { bUnderParsing = false; state = byteStart; #ifdef DEBUG_PRINT Serial.print("too many data size: "); Serial.println(data, HEX); #endif } else { buffer.size = (int)data; state = bytesData; } break; } case bytesData: { int nthData = count/sizeof(serialUnion32); int nthByte = (sizeof(serialUnion32)-1)-count%sizeof(serialUnion32); buffer.body[nthData].b[nthByte] = data; checkSum += data; ++count; if (count >= buffer.size) { count = 0; state = byteCheckSum; } break; } case byteCheckSum: { if (checkSum == data) { _readBuffer.push(buffer); } else { #ifdef DEBUG_PRINT Serial.print("invalid checksum: "); Serial.println(data, HEX); #endif } state = byteStart; bUnderParsing = false; break; } default: { state = byteStart; bUnderParsing = false; break; } } } } void EasySerialProtocol::sendByte(uint8_t data, bool isEscape) { uint8_t byteOut = data; if (isEscape && ((data == STARTBYTE) || (data == ESCAPEBYTE))) { setByte(ESCAPEBYTE); byteOut = (uint8_t)(data ^ ESCAPEMASK); } setByte(byteOut); } uint16_t EasySerialProtocol::readByte() { uint16_t byteIn = (uint16_t)getFirstData(); static bool bNeedEscape = false; if (byteIn == (uint16_t)ESCAPEBYTE) { if (getAvailableSize() > 0) { byteIn = (uint16_t)getFirstData(); byteIn = (uint16_t)(byteIn ^ ESCAPEMASK); bNeedEscape = false; } else { #ifdef DEBUG_PRINT Serial.print("no data after ESCAPE BYTE : "); Serial.println(byteIn, HEX); #endif byteIn = 0xFFFF; bNeedEscape = true; } } else { if (bNeedEscape) { byteIn = (uint16_t)(byteIn ^ ESCAPEMASK); bNeedEscape = false; } } return byteIn; } int EasySerialProtocol::available() { return _readBuffer.size(); } int EasySerialProtocol::getId() { return _readBuffer.front().id; } int EasySerialProtocol::getArraySize() { return (_readBuffer.front().size)/sizeof(serialUnion32); } int EasySerialProtocol::getByteSize() { return _readBuffer.front().size; } void EasySerialProtocol::readDataTo(float *array) { if (_readBuffer.size()) { memcpy(array, &(_readBuffer.front().body), _readBuffer.front().size); _readBuffer.pop(); } else { #ifdef DEBUG_PRINT Serial.println("no serial packet exists!!"); #endif } } void EasySerialProtocol::readDataTo(long *array) { if (_readBuffer.size()) { memcpy(array, &(_readBuffer.front().body), _readBuffer.front().size); _readBuffer.pop(); } else { #ifdef DEBUG_PRINT Serial.println("no serial packet exists!!"); #endif } }
{ "content_hash": "f39dc472eb8e5d2b32c4f7b97e1c3e03", "timestamp": "", "source": "github", "line_count": 230, "max_line_length": 84, "avg_line_length": 24.8, "alnum_prop": 0.5229663394109397, "repo_name": "hideakitai/ofxEasySerialProtocol", "id": "f27344a23eca0e57cf04dacfddedea5e14e6a1e0", "size": "5761", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Arduino/EasySerialProtocol/EasySerialProtocol.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Arduino", "bytes": "1786" }, { "name": "C++", "bytes": "22022" }, { "name": "Makefile", "bytes": "382" }, { "name": "Max", "bytes": "136224" } ], "symlink_target": "" }
Practice-Repository =================== A repository to practice Forks and Pull Requests
{ "content_hash": "9d80eab7d9b405dfe76fbecd9162d6bb", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 48, "avg_line_length": 22.5, "alnum_prop": 0.6555555555555556, "repo_name": "codyflood/Practice-Repository", "id": "cfcd61fe3d32edfae25dbadb114e88ef92f1d416", "size": "90", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "138" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_24) on Wed Apr 17 10:23:34 UTC 2013 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> org.encog.ml.ea.opp.selection (Encog Core 3.2.0-SNAPSHOT API) </TITLE> <META NAME="date" CONTENT="2013-04-17"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style"> </HEAD> <BODY BGCOLOR="white"> <FONT size="+1" CLASS="FrameTitleFont"> <A HREF="../../../../../../org/encog/ml/ea/opp/selection/package-summary.html" target="classFrame">org.encog.ml.ea.opp.selection</A></FONT> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Interfaces</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="SelectionOperator.html" title="interface in org.encog.ml.ea.opp.selection" target="classFrame"><I>SelectionOperator</I></A></FONT></TD> </TR> </TABLE> <TABLE BORDER="0" WIDTH="100%" SUMMARY=""> <TR> <TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont"> Classes</FONT>&nbsp; <FONT CLASS="FrameItemFont"> <BR> <A HREF="TournamentSelection.html" title="class in org.encog.ml.ea.opp.selection" target="classFrame">TournamentSelection</A> <BR> <A HREF="TruncationSelection.html" title="class in org.encog.ml.ea.opp.selection" target="classFrame">TruncationSelection</A></FONT></TD> </TR> </TABLE> </BODY> </HTML>
{ "content_hash": "e0a2608f6fe170b0004936d525618232", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 168, "avg_line_length": 41.91304347826087, "alnum_prop": 0.5285269709543569, "repo_name": "ladygagapowerbot/bachelor-thesis-implementation", "id": "ad6bf31944bb67b063836c0d032f0d91cfe69de1", "size": "1928", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/Encog/apidocs/org/encog/ml/ea/opp/selection/package-frame.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
module RailsAdmin module Extensions module Pundit # This adapter is for the Pundit[https://github.com/elabs/pundit] authorization library. # You can create another adapter for different authorization behavior, just be certain it # responds to each of the public methods here. class AuthorizationAdapter # This method is called first time only and used for setup def self.setup RailsAdmin::ApplicationController.class_eval do include ::Pundit end unless RailsAdmin::ApplicationController.ancestors.include? 'Pundit' end # See the +authorize_with+ config method for where the initialization happens. def initialize(controller) @controller = controller end # This method is called in every controller action and should raise an exception # when the authorization fails. The first argument is the name of the controller # action as a symbol (:create, :bulk_delete, etc.). The second argument is the # AbstractModel instance that applies. The third argument is the actual model # instance if it is available. def authorize(action, abstract_model = nil, model_object = nil) record = model_object || abstract_model && abstract_model.model if action && !policy(record).send(action_for_pundit(action)) raise ::Pundit::NotAuthorizedError.new("not allowed to #{action} this #{record}") end @controller.instance_variable_set(:@_pundit_policy_authorized, true) end # This method is called primarily from the view to determine whether the given user # has access to perform the action on a given model. It should return true when authorized. # This takes the same arguments as +authorize+. The difference is that this will # return a boolean whereas +authorize+ will raise an exception when not authorized. def authorized?(action, abstract_model = nil, model_object = nil) record = model_object || abstract_model && abstract_model.model policy(record).send(action_for_pundit(action)) if action end # This is called when needing to scope a database query. It is called within the list # and bulk_delete/destroy actions and should return a scope which limits the records # to those which the user can perform the given action on. def query(_action, abstract_model) @controller.send(:policy_scope, abstract_model.model.all) rescue ::Pundit::NotDefinedError abstract_model.model.all end # This is called in the new/create actions to determine the initial attributes for new # records. It should return a hash of attributes which match what the user # is authorized to create. def attributes_for(action, abstract_model) record = abstract_model && abstract_model.model policy(record).try(:attributes_for, action) || {} end private def policy(record) @controller.send(:policy, record) rescue ::Pundit::NotDefinedError ::ApplicationPolicy.new(@controller.send(:pundit_user), record) end def action_for_pundit(action) action[-1, 1] == '?' ? action : "#{action}?" end end end end end
{ "content_hash": "a92ea68d19d07d88ba0356a283ab61f4", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 99, "avg_line_length": 46.3013698630137, "alnum_prop": 0.6618343195266272, "repo_name": "engel/rails_admin", "id": "18758b32645c11b620537fbea162b3f28bd31f67", "size": "3380", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "lib/rails_admin/extensions/pundit/authorization_adapter.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "96834" }, { "name": "CoffeeScript", "bytes": "20559" }, { "name": "HTML", "bytes": "49479" }, { "name": "JavaScript", "bytes": "77608" }, { "name": "Ruby", "bytes": "719491" } ], "symlink_target": "" }
class <%= migration_name %> < ActiveRecord::Migration def self.up create_table "<%= table_name %>", :force => true do |t| t.column :name, :string t.column :contact_name, :string t.column :number, :string t.column :phonable_id, :integer t.column :phonable_type, :string end end def self.down drop_table "<%= table_name %>" end end
{ "content_hash": "7ff9e2a4c3cec7cfac38d1a4afc7265e", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 59, "avg_line_length": 30.333333333333332, "alnum_prop": 0.5054945054945055, "repo_name": "datadecisions/acts_as_contactable", "id": "ea3c6b8af1bc008e869ca2a4ed9b3995ae45a0a9", "size": "455", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "generators/contactable/templates/phone_migration.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "4243" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- PHP_Codesniffer config file. @link http://pear.php.net/package/PHP_CodeSniffer --> <ruleset name="Custom Standard"> <!-- Exclude sniff rule. @see /usr/lib/php/pear/PHP/CodeSniffer/Standards/PSR1/Sniffs/Classes/ClassDeclarationSniff.php @see /usr/lib/php/pear/PHP/CodeSniffer/Standards/PSR1/Sniffs/Files/SideEffectsSniff.php --> <rule ref="PSR2"> <exclude name="PSR1.Classes.ClassDeclaration"/> <exclude name="PSR1.Files.SideEffects"/> <exclude name="Generic.Files.LineLength"/> <exclude name="PSR1.Methods.CamelCapsMethodName.NotCamelCaps" /> </rule> <file>app/</file> <extensions>php</extensions> <!-- Exclude file and directory. --> <exclude-pattern>*.js</exclude-pattern> <exclude-pattern>*.css</exclude-pattern> <exclude-pattern>*database.php</exclude-pattern> <exclude-pattern>*i18n.php</exclude-pattern> <exclude-pattern>*core.php</exclude-pattern> <exclude-pattern>*/Config/Migration/*</exclude-pattern> <exclude-pattern>*/Config/Schema/*</exclude-pattern> <exclude-pattern>*/Console/Templates/*</exclude-pattern> <exclude-pattern>*/Lib/*</exclude-pattern> <exclude-pattern>*/Plugin/*</exclude-pattern> <exclude-pattern>*/tmp/*</exclude-pattern> <exclude-pattern>*/Vendor/*</exclude-pattern> <exclude-pattern>*/webroot/*</exclude-pattern> </ruleset>
{ "content_hash": "9de91215b06da5140c66091fd33ddc12", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 102, "avg_line_length": 41.31428571428572, "alnum_prop": 0.6756569847856155, "repo_name": "jaambee/open-slideshare", "id": "1767ba5760ae417bdb28e9916396144bfbdcb76e", "size": "1446", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "phpcs.xml", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "449" }, { "name": "Batchfile", "bytes": "936" }, { "name": "CSS", "bytes": "27936" }, { "name": "JavaScript", "bytes": "82058" }, { "name": "PHP", "bytes": "320149" }, { "name": "Shell", "bytes": "1383" } ], "symlink_target": "" }
""" This is a module that contains the tool for python to interact with our JFrog artifactory API. """ import hashlib import json import os import requests class JFrogArtifactory(object): """ This is a class that holds the interacting method for JFrog Artifactory """ def __init__(self, user_cred, artifactory_loc= "http://afeossand1.cec.lab.emc.com/artifactory"): """ Initialize this artifact interaction class :param artifactory_loc: the url for artifactory :param user_cred: the credential that is enough to execute the work required :return: """ self.__artifactory_base = artifactory_loc self.__credential = user_cred self.__session = requests.session() self.__session.auth = self.__credential def __del__(self): """ Object destroyer, close file IO and HTTP session handler on destroy :return: """ self.__session.close() def __str__(self): """ String representation of this class :return: "Interface to JFrog artifactory with {url}." """ str = "Interface to JFrog artifactory at: {url}".\ format(url=self.__artifactory_base) return str def get_package(self, repo_name, package_type, package_name): """ Return the packages list with specific package name :param repo_name: Artifactory Repo name :param package_type: example, for debian package, it's "deb" :param package_name: example, on-http """ uri = "{uri_base}/api/search/prop?{pkg_type}.name={pkg_name}&repos={repo_name}".format(uri_base=self.__artifactory_base, pkg_type=package_type, pkg_name=package_name,repo_name=repo_name) response = self.__session.get(uri) if response.status_code != 200: print "Did not get a 200 in your request: ", uri return None list = response.json() #print "repo list is:\n{0}".format(list) return list def is_version_exist( self, repo_name, package_type, package_name, version_string ): """ Check if a version for specific package exist, by checking remote file names """ ret_json = self.get_package( repo_name, package_type, package_name ) if ret_json is None: return False pkg_list = ret_json['results'] desired_ver = package_name+"_"+version_string # this should align the package file name , instead of the version naming for p in pkg_list: if 'uri' in p.keys() : if desired_ver in str(p['uri']): return True return False def get_repo_list(self): uri = "{uri_base}/api/repositories".format(uri_base=self.__artifactory_base) response = self.__session.get(uri) if response.status_code != 200: print "Did not get a 200 in your request: ", uri return None list = response.json() #print "repo list is:\n{0}".format(list) return list def get_artifactory_url(self): """ Getter for artifactory base url :return: string based artifactory url """ return self.__artifactory_base def repo_exists(self, rname): """ Return the existence status of the named repository :param rname: name of the repo to check :return: True (if rname exists), False otherwise """ repolist = self.get_repo_list(); for repo in repolist: if 'key' in repo and repo['key'] == rname: return True return False def new_local_repo(self, rname, description, repo_type="debian"): """ Creates a local repo at pre-given artifactory :param rname: repository name :param description: description of the artifactory :param repo_type: optional -- the type of artifactory default to debian :return: return response instance raise and return any other errors if encounters """ dict_artifact_config = { "key": rname, "rclass": "local", "packageType": repo_type, "description": description, "enableDebianSupport": True, "snapshotVersionBehavior": "unique", "propertySets":["artifactory"] } uri = "{uri_base}/api/repositories/{repo_name}".format( uri_base=self.__artifactory_base, repo_name=rname) print "Trying to PUT\n{data}\nto\n{uri}.\n". \ format(data=json.dumps(dict_artifact_config), uri=uri) try: response = self.__session.put( uri, data=json.dumps(dict_artifact_config), headers={"Content-Type": "application/json"} ) if response.status_code != 200: print "Did not get a 200 in your request: " finally: print "Successfully created new repo at artifactory." return response def upload_one_file(self, file_path, repository, dir_path,distribution, component, architecture ): """ This function uploads one file to target repository in artifactory. :param file_path: The path to the file to be uploaded :param repository: The repository folder name that the file will be uploaded to :param dir_path: The directory path that will have in artifactory repository :return: instance of response """ if os.path.exists(file_path): file_name = os.path.basename(file_path) else: raise ValueError("The file path provided\n\t{path}\n" "is not a file.".format(path=file_path)) url = "{uri_base}/{rname}/{dir_path}/{fname}"\ .format(uri_base=self.__artifactory_base, rname=repository, dir_path=dir_path, fname=file_name) # Only let debians have metadata if file_path.endswith(".deb"): url += ";deb.distribution={dist};deb.component={comp};" \ "deb.architecture={arch};".format(dist=distribution, comp=component, arch=architecture) print "Trying to PUT\n{data}\n\tto\n{uri}".format( data=file_path, uri=url) try: with open(file_path, 'rb') as fp: file_data = fp.read() finally: fp.close() response = self.__session.put(url, file_data) if response.status_code != 201: print "Did not get a 201 (Successfully Created) in upload request: " return response # There is successfully created code returned, verify the hashcodes res_content = response.json() md5 = hashlib.md5(file_data).hexdigest() sha1 = hashlib.sha1(file_data).hexdigest() if res_content['checksums']['md5'] != md5 or \ res_content['checksums']['sha1'] != sha1: raise ValueError( 'Upload failure, the md5 or sha1 code returned' ' does not match the local version.') else: print "{file} is uploaded successfully.".format(file=file_name) return response def remove_repository(self, repo_name): """ remove all the contents under repository. :param repo_name: the repository that will be deleted :return: instance of response Raise any exceptions if encountered """ url = "{base}/api/repositories/{repo}"\ .format(base=self.__artifactory_base, repo=repo_name) response = self.__session.delete(url) if response.status_code == 200: print "Repository {repo} deleted successfully."\ .format(repo=repo_name) else: print "Did not delete the repository successfully." return response
{ "content_hash": "c809671eb4fee8419ba03e823e8d1b94", "timestamp": "", "source": "github", "line_count": 233, "max_line_length": 194, "avg_line_length": 34.61802575107296, "alnum_prop": 0.5771138110587651, "repo_name": "sunnyqianzhang/on-build-config", "id": "c20396215f29c822928f207c704ad1307c971faf", "size": "8066", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "build-release-tools/lib/ArtifactoryTools.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "83363" }, { "name": "Python", "bytes": "312494" }, { "name": "Ruby", "bytes": "3801" }, { "name": "Shell", "bytes": "146040" }, { "name": "XSLT", "bytes": "1843" } ], "symlink_target": "" }
import warnings from sympy.matrices.expressions import Identity from ..utils import PyDyUserWarning from .visualization_frame import VisualizationFrame __all__ = ['PerspectiveCamera', 'OrthoGraphicCamera'] warnings.simplefilter('once', PyDyUserWarning) class PerspectiveCamera(VisualizationFrame): """Creates a Perspective Camera for visualization. The camera is inherited from VisualizationFrame. It can be attached to dynamics objects, hence we can get a moving camera. All the transformation matrix generation methods are applicable to a Perspective Camera. Like VisualizationFrame, it can also be initialized using: 1. Rigidbody 2. ReferenceFrame, Point 3. ReferenceFrame, Particle Either one of these must be supplied during initialization. Unlike VisualizationFrame, it does not require a Shape argument. Parameters ========== name : str A name for the PerspectiveCamera(optional). Default is 'unnamed' fov : float, default=45.0 Field Of View, It determines the angle between the top and bottom of the viewable area (in degrees). near : float The distance of near plane of the PerspectiveCamera. All objects closer to this distance are not displayed. far : int or float The distance of far plane of the PerspectiveCamera. All objects farther than this distance are not displayed. """ def __init__(self, *args, **kwargs): """Initialises a PerspectiveCamera object. To initialize a visualization frame, one needs to supply a name (optional), a reference frame, a point, field of view (fov) (optional), near plane distance (optional) and far plane distance (optional). Examples ======== >>> from pydy.viz import VisualizationFrame, Shape >>> from sympy.physics.mechanics import (ReferenceFrame, Point, ... RigidBody, Particle, ... inertia) >>> from sympy import symbols >>> I = ReferenceFrame('I') >>> O = Point('O') >>> shape = Shape() >>> # initializing with reference frame, point >>> camera1 = PerspectiveCamera('frame1', I, O) >>> Ixx, Iyy, Izz, mass = symbols('Ixx Iyy Izz mass') >>> i = inertia(I, Ixx, Iyy, Izz) >>> rbody = RigidBody('rbody', O, I, mass, (inertia, O)) >>> # Initializing with a rigidbody .. >>> camera2 = PerspectiveCamera('frame2', rbody) >>> Pa = Particle('Pa', O, mass) >>> # initializing with Particle, reference_frame ... >>> camera3 = PerspectiveCamera('frame3', I, Pa) """ msg = ("Rotation of Perspective Camera does not work " "properly in the visualiser.") warnings.warn(msg, PyDyUserWarning) try: self._fov = kwargs['fov'] except KeyError: self._fov = 45.0 try: self._near = kwargs['near'] except KeyError: self._near = 1.0 try: self._far = kwargs['far'] except KeyError: self._far = 1000.0 # Now we use same approach as in VisualizationFrame for setting # reference_frame and origin i = 0 # If first arg is not str, name the visualization frame 'unnamed' if isinstance(args[i], str): self._name = args[i] i += 1 else: self._name = 'unnamed' try: self._reference_frame = args[i].get_frame() self._origin = args[i].get_masscenter() except AttributeError: # It is not a rigidbody, hence this arg should be a reference # frame try: # TODO : dcm is never used. dcm = args[i]._dcm_dict self._reference_frame = args[i] i += 1 except AttributeError: raise TypeError('A ReferenceFrame is to be supplied ' 'before a Particle/Point.') # Now next arg can either be a Particle or point try: self._origin = args[i].get_point() except AttributeError: self._origin = args[i] # basic thing required, transform matrix self._transform = Identity(4).as_mutable() def __str__(self): return 'PerspectiveCamera: ' + self._name def __repr__(self): return 'PerspectiveCamera' @property def fov(self): """ attribute for Field Of view of a PerspectiveCamera. Default value is 45 degrees """ return self._fov @fov.setter def fov(self, new_fov): if not isinstance(new_fov, (int, str)): raise TypeError('fov should be supplied in int or float') else: self._fov = new_fov @property def near(self): """ attribute for Near Plane distance of a PerspectiveCamera. Default value is 1 """ return self._near @near.setter def near(self, new_near): if not isinstance(new_near, (int, str)): raise TypeError('near should be supplied in int or float') else: self._near = new_near @property def far(self): """ Attribute for Far Plane distance of a PerspectiveCamera. The default value is ``1000.0``. """ return self._far @far.setter def far(self, new_far): if not isinstance(new_far, (int, str)): raise TypeError('far should be supplied in int or float') else: self._far = new_far def generate_scene_dict(self): """This method generates information for a static visualization in the initial conditions, in the form of dictionary. This contains camera parameters followed by an init_orientation Key. Before calling this method, all the transformation matrix generation methods should be called, or it will give an error. Returns ======= A dict with following Keys: 1. name: name for the camera 2. fov: Field of View value of the camera 3. near: near value of the camera 4. far: far value of the camera 5. init_orientation: Initial orientation of the camera """ scene_dict = {id(self): {}} scene_dict[id(self)]['name'] = self.name scene_dict[id(self)]['type'] = self.__repr__() scene_dict[id(self)]['fov'] = self.fov scene_dict[id(self)]['near'] = self.near scene_dict[id(self)]['far'] = self.far scene_dict[id(self)]["simulation_id"] = id(self) scene_dict[id(self)]["init_orientation"] = self._visualization_matrix[0] return scene_dict def generate_simulation_dict(self): """Generates the simulation information for this visualization frame. It maps the simulation data information to the scene information via a unique id. Before calling this method, all the transformation matrix generation methods should be called, or it will give an error. Returns ======= simulation_dict : dictionary A dictionary containing list of 4x4 matrices mapped to the unique id as the key. """ simulation_dict = {} try: simulation_dict[id(self)] = self._visualization_matrix except: raise RuntimeError("Please call the numerical " "transformation methods, " "before generating visualization dict.") return simulation_dict class OrthoGraphicCamera(VisualizationFrame): """Creates a OrthoGraphic Camera for visualization. The camera is inherited from ``VisualizationFrame``. It can be attached to dynamics objects, hence we can get a moving camera. All the transformation matrix generation methods are applicable to a Perspective Camera. Like VisualizationFrame, it can also be initialized using: 1. :role:`Rigidbody` 2. ReferenceFrame, Point 3. ReferenceFrame, Particle Either one of these must be supplied during initialization. Unlike VisualizationFrame, it doesnt require a Shape argument. Parameters ========== name : str, optional, default='unnamed' A name for the PerspectiveCamera. near : float, optional, default= The distance of near plane of the PerspectiveCamera. All objects closer to this distance are not displayed. far : float, optional, default= The distance of far plane of the PerspectiveCamera. All objects farther than this distance are not displayed. """ def __init__(self, *args, **kwargs): """ Initialises an OrthoGraphicCamera object. To initialize a visualization frame, we need to supply a name (optional), a reference frame, a point, near plane distance (optional) and far plane distance (optional). Examples ======== >>> from pydy.viz import OrthoGraphicCamera >>> from sympy.physics.mechanics import (ReferenceFrame, Point, ... RigidBody, Particle, ... inertia) >>> from sympy import symbols >>> I = ReferenceFrame('I') >>> O = Point('O') >>> shape = Shape() >>> # Initializing with ReferenceFrame, Point >>> camera1 = OrthoGraphicCamera('frame1', I, O) >>> Ixx, Iyy, Izz, mass = symbols('Ixx Iyy Izz mass') >>> i = inertia(I, Ixx, Iyy, Izz) >>> rbody = RigidBody('rbody', O, I, mass, (inertia, O)) >>> # Initializing with a Rigidbody >>> camera2 = OrthoGraphicCamera('frame2', rbody) >>> Pa = Particle('Pa', O, mass) >>> # Initializing with Particle, ReferenceFrame >>> camera3 = OrthoGraphicCamera('frame3', I, Pa) """ try: self._near = kwargs['near'] except KeyError: self._near = 1 try: self._far = kwargs['far'] except KeyError: self._far = 1000 # Now we use same approach as in VisualizationFrame for setting # reference_frame and origin i = 0 # If first arg is not str, name the visualization frame 'unnamed' if isinstance(args[i], str): self._name = args[i] i += 1 else: self._name = 'unnamed' try: self._reference_frame = args[i].get_frame() self._origin = args[i].get_masscenter() except AttributeError: # It is not a rigidbody, hence this arg should be a reference # frame. self._reference_frame = args[i] i += 1 # Now next arg can either be a Particle or point try: self._origin = args[i].get_point() except AttributeError: self._origin = args[i] # basic thing required, transform matrix self._transform = Identity(4).as_mutable() def __str__(self): return 'OrthoGraphicCamera: ' + self._name def __repr__(self): return 'OrthoGraphicCamera' @property def near(self): """Attribute for Near Plane distance of an OrthoGraphicCamera. Default value is 1.0 """ return self._near @near.setter def near(self, new_near): if not isinstance(new_near, (int, str)): raise TypeError('near should be supplied in int or float') else: self._near = new_near @property def far(self): """Attribute for Far Plane distance of an OrthoGraphicCamera. Default value is 1000.0. """ return self._far @far.setter def far(self, new_far): if not isinstance(new_far, (int, str)): raise TypeError('far should be supplied in int or float') else: self._far = new_far def generate_scene_dict(self): """ This method generates information for a static visualization in the initial conditions, in the form of dictionary. This contains camera parameters followed by an init_orientation Key. Returns ======= scene_dict : dictionary A dict with following Keys: 1. name: name for the camera 2. near: near value of the camera 3. far: far value of the camera 4. init_orientation: Initial orientation of the camera """ scene_dict = {id(self): {}} scene_dict[id(self)]['name'] = self.name scene_dict[id(self)]['type'] = self.__repr__() scene_dict[id(self)]['near'] = self.near scene_dict[id(self)]['far'] = self.far scene_dict[id(self)]["simulation_id"] = id(self) scene_dict[id(self)]["init_orientation"] = self._visualization_matrix[0] return scene_dict def generate_simulation_dict(self): """Generates the simulation information for this visualization frame. It maps the simulation data information to the scene information via a unique id. Returns ======= A dictionary containing list of 4x4 matrices mapped to the unique id as the key. """ simulation_dict = {} try: simulation_dict[id(self)] = self._visualization_matrix except: raise RuntimeError("Please call the numerical ", "transformation methods, ", "before generating visualization dict") return simulation_dict
{ "content_hash": "5afef5f7d5b88478e7877f9ea97ee988", "timestamp": "", "source": "github", "line_count": 410, "max_line_length": 80, "avg_line_length": 33.86829268292683, "alnum_prop": 0.5774161025493303, "repo_name": "Shekharrajak/pydy", "id": "8eb86b71515766d800b2893d598cd3b83d5ec6e3", "size": "13886", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "pydy/viz/camera.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "102" }, { "name": "CSS", "bytes": "14810" }, { "name": "HTML", "bytes": "15405" }, { "name": "JavaScript", "bytes": "49934" }, { "name": "Python", "bytes": "279080" }, { "name": "Shell", "bytes": "76" } ], "symlink_target": "" }
package org.basex.query.func.archive; import static org.basex.query.QueryError.*; import static org.basex.util.Token.*; import java.io.*; import java.util.zip.*; import org.basex.query.*; import org.basex.query.iter.*; import org.basex.query.value.*; import org.basex.query.value.item.*; import org.basex.util.hash.*; import org.basex.util.list.*; /** * Function implementation. * * @author BaseX Team 2005-16, BSD License * @author Christian Gruen */ public class ArchiveExtractBinary extends ArchiveFn { @Override public Iter iter(final QueryContext qc) throws QueryException { return value(qc).iter(); } @Override public Value value(final QueryContext qc) throws QueryException { final ValueBuilder vb = new ValueBuilder(); for(final byte[] b : extract(qc)) vb.add(new B64(b)); return vb.value(); } /** * Extracts entries from the archive. * @param qc query context * @return text entries * @throws QueryException query exception */ final TokenList extract(final QueryContext qc) throws QueryException { final B64 archive = toB64(exprs[0], qc, false); final TokenSet hs = entries(1, qc); final TokenList tl = new TokenList(); try(final ArchiveIn in = ArchiveIn.get(archive.input(info), info)) { while(in.more()) { final ZipEntry ze = in.entry(); if(!ze.isDirectory() && (hs == null || hs.delete(token(ze.getName())) != 0)) tl.add(in.read()); } } catch(final IOException ex) { throw ARCH_FAIL_X.get(info, ex); } return tl; } }
{ "content_hash": "1332c863dae01058a38a292d06bf487d", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 84, "avg_line_length": 27.403508771929825, "alnum_prop": 0.6670934699103713, "repo_name": "JensErat/basex", "id": "1a37b2195483b84eb6928b66d21ce2f5ec1c22fe", "size": "1562", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "basex-core/src/main/java/org/basex/query/func/archive/ArchiveExtractBinary.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ActionScript", "bytes": "9372" }, { "name": "Batchfile", "bytes": "2502" }, { "name": "C", "bytes": "17146" }, { "name": "C#", "bytes": "15568" }, { "name": "C++", "bytes": "7796" }, { "name": "CSS", "bytes": "3386" }, { "name": "Common Lisp", "bytes": "3211" }, { "name": "HTML", "bytes": "1057" }, { "name": "Haskell", "bytes": "4065" }, { "name": "Java", "bytes": "6906951" }, { "name": "JavaScript", "bytes": "7990" }, { "name": "Makefile", "bytes": "1234" }, { "name": "PHP", "bytes": "8690" }, { "name": "Perl", "bytes": "7801" }, { "name": "Python", "bytes": "26123" }, { "name": "QMake", "bytes": "377" }, { "name": "Rebol", "bytes": "4731" }, { "name": "Ruby", "bytes": "7359" }, { "name": "Scala", "bytes": "11692" }, { "name": "Shell", "bytes": "3557" }, { "name": "Visual Basic", "bytes": "11957" }, { "name": "XQuery", "bytes": "217517" }, { "name": "XSLT", "bytes": "172" } ], "symlink_target": "" }
<<<<<<< HEAD # Copyright (c) 2014 The Dogecoin Core developers # Distributed under the MIT/X11 software license, see the accompanying ======= # Distributed under the MIT software license, see the accompanying >>>>>>> f568462ca04b73485d7e41266a2005155ff69707 # file COPYING or http://www.opensource.org/licenses/mit-license.php. # Functions used by more than one test function echoerr { echo "$@" 1>&2; } # Usage: ExtractKey <key> "<json_object_string>" # Warning: this will only work for the very-well-behaved # JSON produced by bitcoind, do NOT use it to try to # parse arbitrary/nested/etc JSON. function ExtractKey { echo $2 | tr -d ' "{}\n' | awk -v RS=',' -F: "\$1 ~ /$1/ { print \$2}" } function CreateDataDir { DIR=$1 mkdir -p $DIR CONF=$DIR/dogecoin.conf echo "regtest=1" >> $CONF echo "keypool=2" >> $CONF echo "rpcuser=rt" >> $CONF echo "rpcpassword=rt" >> $CONF echo "rpcwait=1" >> $CONF echo "walletnotify=${SENDANDWAIT} -STOP" >> $CONF shift while (( "$#" )); do echo $1 >> $CONF shift done } function AssertEqual { if (( $( echo "$1 == $2" | bc ) == 0 )) then echoerr "AssertEqual: $1 != $2" declare -f CleanUp > /dev/null 2>&1 if [[ $? -eq 0 ]] ; then CleanUp fi exit 1 fi } # CheckBalance -datadir=... amount account minconf function CheckBalance { declare -i EXPECT="$2" B=$( $CLI $1 getbalance $3 $4 ) if (( $( echo "$B == $EXPECT" | bc ) == 0 )) then echoerr "bad balance: $B (expected $2)" declare -f CleanUp > /dev/null 2>&1 if [[ $? -eq 0 ]] ; then CleanUp fi exit 1 fi } # Use: Address <datadir> [account] function Address { $CLI $1 getnewaddress $2 } # Send from to amount function Send { from=$1 to=$2 amount=$3 address=$(Address $to) txid=$( ${SENDANDWAIT} $CLI $from sendtoaddress $address $amount ) } # Use: Unspent <datadir> <n'th-last-unspent> <var> function Unspent { local r=$( $CLI $1 listunspent | awk -F'[ |:,"]+' "\$2 ~ /$3/ { print \$3 }" | tail -n $2 | head -n 1) echo $r } # Use: CreateTxn1 <datadir> <n'th-last-unspent> <destaddress> # produces hex from signrawtransaction function CreateTxn1 { TXID=$(Unspent $1 $2 txid) AMOUNT=$(Unspent $1 $2 amount) VOUT=$(Unspent $1 $2 vout) RAWTXN=$( $CLI $1 createrawtransaction "[{\"txid\":\"$TXID\",\"vout\":$VOUT}]" "{\"$3\":$AMOUNT}") ExtractKey hex "$( $CLI $1 signrawtransaction $RAWTXN )" } # Use: SendRawTxn <datadir> <hex_txn_data> function SendRawTxn { ${SENDANDWAIT} $CLI $1 sendrawtransaction $2 } # Use: GetBlocks <datadir> # returns number of blocks from getinfo function GetBlocks { $CLI $1 getblockcount }
{ "content_hash": "d24f5ad25dfa14a293b3513b0f0e36f2", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 104, "avg_line_length": 25.09433962264151, "alnum_prop": 0.6225563909774436, "repo_name": "koharjidan/dogecoin", "id": "1fff3c86ff41eb37f95a268de96f761554d5f334", "size": "2729", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qa/rpc-tests/util.sh", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "511322" }, { "name": "C++", "bytes": "4407070" }, { "name": "CSS", "bytes": "1127" }, { "name": "Groff", "bytes": "21309" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "Makefile", "bytes": "66303" }, { "name": "Objective-C", "bytes": "2023" }, { "name": "Objective-C++", "bytes": "7238" }, { "name": "Protocol Buffer", "bytes": "2449" }, { "name": "Python", "bytes": "444071" }, { "name": "QMake", "bytes": "17962" }, { "name": "Shell", "bytes": "57826" } ], "symlink_target": "" }
namespace Microsoft.Azure.Management.Billing.Models { using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using System.Linq; /// <summary> /// The customer's Policy. /// </summary> [Rest.Serialization.JsonTransformation] public partial class CustomerPolicy : Resource { /// <summary> /// Initializes a new instance of the CustomerPolicy class. /// </summary> public CustomerPolicy() { CustomInit(); } /// <summary> /// Initializes a new instance of the CustomerPolicy class. /// </summary> /// <param name="id">Resource Id.</param> /// <param name="name">Resource name.</param> /// <param name="type">Resource type.</param> /// <param name="viewCharges">The policy that controls whether the /// users in customer's organization can view charges at pay-as-you-go /// prices. Possible values include: 'Allowed', 'NotAllowed'</param> public CustomerPolicy(string id = default(string), string name = default(string), string type = default(string), string viewCharges = default(string)) : base(id, name, type) { ViewCharges = viewCharges; CustomInit(); } /// <summary> /// An initialization method that performs custom operations like setting defaults /// </summary> partial void CustomInit(); /// <summary> /// Gets or sets the policy that controls whether the users in /// customer's organization can view charges at pay-as-you-go prices. /// Possible values include: 'Allowed', 'NotAllowed' /// </summary> [JsonProperty(PropertyName = "properties.viewCharges")] public string ViewCharges { get; set; } } }
{ "content_hash": "1c564ea10a4b869bf73c06cd37fa664d", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 158, "avg_line_length": 35.80769230769231, "alnum_prop": 0.602577873254565, "repo_name": "AsrOneSdk/azure-sdk-for-net", "id": "312bd9f3d5c8c22a13afdf3fcfc1130e0c276d86", "size": "2215", "binary": false, "copies": "8", "ref": "refs/heads/psSdkJson6Current", "path": "sdk/billing/Microsoft.Azure.Management.Billing/src/Generated/Models/CustomerPolicy.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "15473" }, { "name": "Bicep", "bytes": "13438" }, { "name": "C#", "bytes": "72203239" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "5652" }, { "name": "HTML", "bytes": "6169271" }, { "name": "JavaScript", "bytes": "16012" }, { "name": "PowerShell", "bytes": "649218" }, { "name": "Shell", "bytes": "31287" }, { "name": "Smarty", "bytes": "11135" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <project name="Brackets Windows installer" basedir="." xmlns:antcontrib="antlib:net.sf.antcontrib" default="build.mul"> <!-- Key locations on disk --> <property name="wix.base" value="C:\Program Files\Windows Installer XML v3.5"/> <property name="brackets.staging" value=".\staging"/> <!-- Product & version labeling --> <!-- See also: product name definitions in Brackets_<locale>.wxl --> <property name="product.shortname" value="StarUML"/> <property name="product.version.number" value="2.5.0"/> <property name="product.version.name" value="v${product.version.number}"/> <property name="product.fullname" value="StarUML-${product.version.name}"/> <property name="product.manufacturer" value="MKLab"/> <property name="product.registry.root" value="${product.shortname}"/> <!-- Installer properties --> <property name="heat.cmd" value="${wix.base}/bin/heat.exe"/> <property name="candle.cmd" value="${wix.base}/bin/candle.exe"/> <property name="light.cmd" value="${wix.base}/bin/light.exe"/> <property name="msitran.cmd" value="${wix.base}/bin/MsiTran.exe"/> <property name="wisubstg.cmd" value="${wix.base}/bin/WiSubStg.vbs"/> <!-- Build WIX installer --> <target name="build.english" > <echo level="info">Building English installer for build: ${product.version.name} </echo> <!-- heat, candle, light--> <echo message="Generating fileset."/> <exec dir="" executable="${heat.cmd}"> <arg line="dir ${brackets.staging} -cg BRACKETSHARVESTMANAGER -gg -scom -sreg -sfrag -srd -dr INSTALLDIR -out bracketsharvestmanager.wxs"/> </exec> <echo message="Prepping installer source."/> <exec dir="" executable="${candle.cmd}"> <arg line="Brackets.wxs WixUI_InstallDir.wxs ExitDialog.wxs UserExit.wxs ProgressDlg.wxs MaintenanceWelcomeDlg.wxs InstallDirDlg.wxs bracketsharvestmanager.wxs VerifyReadyDlg.wxs BrowseDlg.wxs -ext WixUtilExtension -dcodepage=1252 -dProductVersionNumber='${product.version.number}' -dProductVersionName='${product.version.name}' -dProductManufacturer='${product.manufacturer}' -dRegistryRoot='${product.registry.root}' -dExeName='${product.shortname}' -dIconFile=appicon.ico -dlicenseRtf=License.rtf"/> </exec> <echo message="Compiling installer package."/> <exec dir="" executable="${light.cmd}"> <arg line="-b ${brackets.staging} -dvar.Version=${product.version.number} -ext WixUIExtension -ext WixUtilExtension -cultures:en-us Brackets.wixobj bracketsharvestmanager.wixobj WixUI_InstallDir.wixobj ExitDialog.wixobj InstallDirDlg.wixobj UserExit.wixobj ProgressDlg.wixobj MaintenanceWelcomeDlg.wixobj BrowseDlg.wixobj VerifyReadyDlg.wixobj -out '${product.fullname}.msi' -loc Brackets_en-us.wxl"/> </exec> </target> <target name="build.french" > <echo level="info">Building French installer for build: ${product.version.name} </echo> <!-- heat, candle, light--> <echo message="Generating fileset."/> <exec dir="" executable="${heat.cmd}"> <arg line="dir ${brackets.staging} -cg BRACKETSHARVESTMANAGER -gg -scom -sreg -sfrag -srd -dr INSTALLDIR -out bracketsharvestmanager.wxs"/> </exec> <echo message="Prepping installer source."/> <exec dir="" executable="${candle.cmd}" > <arg line="Brackets.wxs WixUI_InstallDir.wxs ExitDialog.wxs UserExit.wxs ProgressDlg.wxs MaintenanceWelcomeDlg.wxs BrowseDlg.wxs InstallDirDlg.wxs bracketsharvestmanager.wxs VerifyReadyDlg.wxs -ext WixUtilExtension -dcodepage=1252 -dProductVersionNumber='${product.version.number}' -dProductVersionName='${product.version.name}' -dProductManufacturer='${product.manufacturer}' -dRegistryRoot='${product.registry.root}' -dExeName='${product.shortname}' -dIconFile=appicon.ico -dlicenseRtf=License.rtf"/> </exec> <echo message="Compiling installer package."/> <exec dir="" executable="${light.cmd}"> <arg line="-b ${brackets.staging} -dvar.Version=${product.version.number} -ext WixUIExtension -ext WixUtilExtension -cultures:fr-fr Brackets.wixobj bracketsharvestmanager.wixobj WixUI_InstallDir.wixobj ExitDialog.wixobj InstallDirDlg.wixobj UserExit.wixobj ProgressDlg.wixobj MaintenanceWelcomeDlg.wixobj BrowseDlg.wixobj VerifyReadyDlg.wixobj -out Brackets_fr-fr.msi -loc Brackets_fr-fr.wxl"/> </exec> </target> <!-- to build a single msi for english and french --> <target name="build.mul" depends="build.english, build.french" description="Build the final multi-locale MSI"> <exec executable='"${msitran.cmd}"' failonerror='true'> <arg line="-g '${product.fullname}.msi' brackets_fr-fr.msi fr-fr.mst"/> </exec> <exec executable="cscript" failonerror='true'> <arg line='"${wisubstg.cmd}" "${product.fullname}.msi" fr-fr.mst 1036'/> </exec> <exec executable="cscript" failonerror='true'> <arg line='"${wisubstg.cmd}" "${product.fullname}.msi"'/> </exec> </target> <!-- this gets the latest build number by looking at the last modified folder and grabbing the digits at the end of \Builds*--> <target name='get.latest.build.dir.name'> <antcontrib:timestampselector property='latest.build.dir'> <path> <fileset dir='\\sjshare.corp.adobe.com\edge\AdobeEdgeCode\Brackets\Win'/> </path> </antcontrib:timestampselector> <echo>${latest.build.dir}</echo> <antcontrib:propertyregex property='latest.build.number' input='${latest.build.dir}' regexp='(Build)(\d{1,})' select='\2'/> <echo>${latest.build.number}</echo> <antcontrib:math result='new.build.number' operand1='${latest.build.number}' operation='+' operand2='1' datatype='int'/> <echo>${new.build.number}</echo> </target> <target name="post.build" depends=""></target> <!-- codesign msi file --> <target name="codesign.installer.win" description="runs signtool on msi file"> <property name="cert.file" value="C:/Certificates/CS_Certs/Adobe_Systems_CS.cer"/> <property name="cert.name" value="Adobe Systems Incorporated"/> <!-- <property name="cert.file" value="C:/Certificates/CS_Certs/Adobe_testcodesigner.cer"/> <property name="cert.name" value="AdobeTestCodeSigning"/> --> <codesign.file.win file="${product.fullname}.msi" cert.file="${cert.file}" cert.name="${cert.name}"/> </target> <!-- Cleanup targets --> <target name="clean" depends="clean.temp" description="Remove all Wix generated files from installer\win"> <delete><fileset dir="." includes="*.msi" /></delete> </target> <target name="clean.temp" description="Remove intermediate Wix files from installer\win, leaving final MSI in place"> <delete file="bracketsharvestmanager.wxs" /> <delete file="fr-fr.mst" /> <delete><fileset dir="." includes="*.wixobj" /></delete> <delete><fileset dir="." includes="*.wixpdb" /></delete> </target> </project>
{ "content_hash": "25452a8b8430e994d1b0ff324cb687c7", "timestamp": "", "source": "github", "line_count": 202, "max_line_length": 145, "avg_line_length": 42.17821782178218, "alnum_prop": 0.5650234741784037, "repo_name": "niklauslee/staruml-shell-v2.5", "id": "0cdb8444bcf7875ed2817111838d29fdcb544f3e", "size": "8520", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "installer/win/brackets-win-install-build.xml", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "824" }, { "name": "C", "bytes": "10687" }, { "name": "C++", "bytes": "419074" }, { "name": "JavaScript", "bytes": "131898" }, { "name": "Objective-C", "bytes": "22670" }, { "name": "Objective-C++", "bytes": "135635" }, { "name": "Python", "bytes": "1102377" }, { "name": "Shell", "bytes": "12553" }, { "name": "Visual Basic", "bytes": "11788" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <!-- Written by libSedML v1.1.5123.24255 see http://libsedml.sf.net --> <sedML level="1" version="1" xmlns="http://sed-ml.org/"> <listOfSimulations> <uniformTimeCourse id="simulation_1" initialTime="0" outputStartTime="0" outputEndTime="5" numberOfPoints="50"> <algorithm kisaoID="KISAO:0000019" /> </uniformTimeCourse> </listOfSimulations> <listOfModels> <model id="model_1" language="urn:sedml:language:sbml" source="00677-sbml-l2v3.xml" /> </listOfModels> <listOfTasks> <task id="task_1" modelReference="model_1" simulationReference="simulation_1" /> </listOfTasks> <listOfDataGenerators> <dataGenerator id="time_1" name="time"> <listOfVariables> <variable id="time" taskReference="task_1" symbol="urn:sedml:symbol:time" /> </listOfVariables> <math xmlns="http://www.w3.org/1998/Math/MathML"> <ci> time </ci> </math> </dataGenerator> <dataGenerator id="S1_1" name="S1"> <listOfVariables> <variable id="S1_1_s" taskReference="task_1" target="/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']" /> <variable id="S1_1_c" taskReference="task_1" target="/sbml:sbml/sbml:model/sbml:listOfCompartments/sbml:compartment[@id='C']" /> </listOfVariables> <math xmlns="http://www.w3.org/1998/Math/MathML"> <apply> <times /> <ci> S1_1_s </ci> <ci> S1_1_c </ci> </apply> </math> </dataGenerator> <dataGenerator id="S2_1" name="S2"> <listOfVariables> <variable id="S2_1_s" taskReference="task_1" target="/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S2']" /> <variable id="S2_1_c" taskReference="task_1" target="/sbml:sbml/sbml:model/sbml:listOfCompartments/sbml:compartment[@id='C']" /> </listOfVariables> <math xmlns="http://www.w3.org/1998/Math/MathML"> <apply> <times /> <ci> S2_1_s </ci> <ci> S2_1_c </ci> </apply> </math> </dataGenerator> </listOfDataGenerators> <listOfOutputs> <plot2D id="plot_1" name="Timecourse for test 00677"> <listOfCurves> <curve id="c_S1_1" name="S1" logX="false" logY="false" xDataReference="time_1" yDataReference="S1_1" /> <curve id="c_S2_1" name="S2" logX="false" logY="false" xDataReference="time_1" yDataReference="S2_1" /> </listOfCurves> </plot2D> <report id="report_1" name="Report for test 00677"> <listOfDataSets> <dataSet id="ds_time" dataReference="time_1" label="Time" /> <dataSet id="ds_S1_1" dataReference="S1_1" label="S1" /> <dataSet id="ds_S2_1" dataReference="S2_1" label="S2" /> </listOfDataSets> </report> </listOfOutputs> </sedML>
{ "content_hash": "dc17f3f4d20592aa6241bb85db5c0979", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 136, "avg_line_length": 42.36363636363637, "alnum_prop": 0.6176680972818311, "repo_name": "stanleygu/sbmltest2archive", "id": "debcddb7eab0cd079a68c11496efbc55bbe5dcf3", "size": "2796", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sbml-test-cases/cases/semantic/00677/00677-sbml-l2v3-sedml.xml", "mode": "33188", "license": "mit", "language": [ { "name": "M", "bytes": "12509" }, { "name": "Mathematica", "bytes": "721776" }, { "name": "Matlab", "bytes": "1729754" }, { "name": "Objective-C", "bytes": "144988" } ], "symlink_target": "" }
<div class="row"> <div class="col-sm-3 sidebar"> <div class="pa-sidebar well well-medium"> <ul class="nav nav-list" style="overflow:hidden; overflow-y:scroll; height:300px;"> <p class="nav-header"><strong>Servers</strong></p> <!-- This <li> will only add the 'active' class if 'servers.detail' or its descendants are active AND if it is the link for the active server (aka serverId) --> <li ng-repeat="server in servers.data" ui-sref-active="active"> <!-- Here's a ui-sref that is also providing necessary parameters --> <a ui-sref="servers.detail.details({serverId:server.ID})">{{server.Name}}</a> </li> </ul> <hr> <!-- Another named view --> <div ui-view="menuTip" class="slide"></div> </div> </div> <!-- Our unnamed main ui-view for this template --> <div ui-view class="col-sm-9 main slide"></div> </div>
{ "content_hash": "094cfb29fb26a5ad8a7c440a010ab538", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 105, "avg_line_length": 40.08695652173913, "alnum_prop": 0.6008676789587852, "repo_name": "CrunchyData/crunchy-postgresql-manager", "id": "27ddf2ce0ab3c27a69553f639b8100fb76fa2247", "size": "922", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "images/cpm/www/v3/app/servers/servers.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "1216" }, { "name": "CSS", "bytes": "53945" }, { "name": "Go", "bytes": "439271" }, { "name": "HTML", "bytes": "2715264" }, { "name": "JavaScript", "bytes": "703548" }, { "name": "Makefile", "bytes": "5821" }, { "name": "Nginx", "bytes": "826" }, { "name": "Perl", "bytes": "613980" }, { "name": "Shell", "bytes": "100612" } ], "symlink_target": "" }
/* vim: set tabstop=4 : */ #ifndef __febird_io_load_save_file_h__ #define __febird_io_load_save_file_h__ #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once #endif #include "mem_map_stream.h" #include "DataIO.h" //#include "../statistic_time.h" namespace febird { //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // for convenient using... #define FEBIRD_LOAD_FUNCTION_NAME native_load_from_file #define FEBIRD_SAVE_FUNCTION_NAME native_save_to_file #define FEBIRD_DATA_INPUT_CLASS LittleEndianDataInput #define FEBIRD_DATA_OUTPUT_CLASS LittleEndianDataOutput #define FEBIRD_DATA_INPUT_LOAD_FROM(input, x) input >> x #define FEBIRD_DATA_OUTPUT_SAVE_TO(output, x) output << x #include "load_save_convenient.h" #define FEBIRD_LOAD_FUNCTION_NAME portable_load_from_file #define FEBIRD_SAVE_FUNCTION_NAME portable_save_to_file #define FEBIRD_DATA_INPUT_CLASS PortableDataInput #define FEBIRD_DATA_OUTPUT_CLASS PortableDataOutput #define FEBIRD_DATA_INPUT_LOAD_FROM(input, x) input >> x #define FEBIRD_DATA_OUTPUT_SAVE_TO(output, x) output << x #include "load_save_convenient.h" #define FEBIRD_LOAD_FUNCTION_NAME dump_load_from_file #define FEBIRD_SAVE_FUNCTION_NAME dump_save_to_file #define FEBIRD_DATA_INPUT_CLASS LittleEndianDataInput #define FEBIRD_DATA_OUTPUT_CLASS LittleEndianDataOutput #define FEBIRD_DATA_INPUT_LOAD_FROM(input, x) DataIO_dump_load_object(input, x) #define FEBIRD_DATA_OUTPUT_SAVE_TO(output, x) DataIO_dump_save_object(output, x) #include "load_save_convenient.h" /** @brief ¸üÐÂÎļþ */ #define FEBIRD_SAVE_FUNCTION_NAME dump_update_file_only #define FEBIRD_DATA_OUTPUT_CLASS LittleEndianDataOutput #define FEBIRD_DATA_OUTPUT_SAVE_TO(output, x) x.dump_save(output) #define FEBIRD_SAVE_FILE_OPEN_MODE "rb+" //!< ¿É¶Á¿Éд #include "load_save_convenient.h" template<class Object> void dump_update_file(const Object& x, const std::string& szFile, bool printLog=true) { try { dump_update_file_only(x, szFile, printLog); } catch (const OpenFileException&) { dump_save_to_file(x, szFile, printLog); } } //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ } // namespace febird #endif // __febird_io_load_save_file_h__
{ "content_hash": "b825c9b232e2bd11a7bde0cc99718fcc", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 88, "avg_line_length": 34.31884057971015, "alnum_prop": 0.6537162162162162, "repo_name": "izenecloud/izenelib", "id": "d045bd31d376fef386f8a8fe3690967434cba6c6", "size": "2368", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "include/3rdparty/febird/io/load_save_file.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "81555" }, { "name": "C++", "bytes": "15025100" }, { "name": "Makefile", "bytes": "21662" }, { "name": "Ruby", "bytes": "699" }, { "name": "Shell", "bytes": "1359" } ], "symlink_target": "" }
package com.intellij.packageDependencies.ui; import com.intellij.analysis.AnalysisScopeBundle; import consulo.disposer.Disposable; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.PlatformDataKeys; import consulo.logging.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.Project; import consulo.disposer.Disposer; import consulo.util.dataholder.Key; import com.intellij.psi.PsiElement; import com.intellij.usageView.UsageInfo; import com.intellij.usages.*; import com.intellij.util.Alarm; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.*; import java.awt.*; public abstract class UsagesPanel extends JPanel implements Disposable, DataProvider { protected static final Logger LOG = Logger.getInstance(UsagesPanel.class); private final Project myProject; protected ProgressIndicator myCurrentProgress; private JComponent myCurrentComponent; private UsageView myCurrentUsageView; protected final Alarm myAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD); public UsagesPanel(@Nonnull Project project) { super(new BorderLayout()); myProject = project; } public void setToInitialPosition() { cancelCurrentFindRequest(); setToComponent(createLabel(getInitialPositionText())); } public abstract String getInitialPositionText(); public abstract String getCodeUsagesString(); void cancelCurrentFindRequest() { if (myCurrentProgress != null) { myCurrentProgress.cancel(); } } protected void showUsages(@Nonnull PsiElement[] primaryElements, @Nonnull UsageInfo[] usageInfos) { if (myCurrentUsageView != null) { Disposer.dispose(myCurrentUsageView); } try { Usage[] usages = UsageInfoToUsageConverter.convert(primaryElements, usageInfos); UsageViewPresentation presentation = new UsageViewPresentation(); presentation.setCodeUsagesString(getCodeUsagesString()); myCurrentUsageView = UsageViewManager.getInstance(myProject).createUsageView(UsageTarget.EMPTY_ARRAY, usages, presentation, null); setToComponent(myCurrentUsageView.getComponent()); } catch (ProcessCanceledException e) { setToCanceled(); } } private void setToCanceled() { setToComponent(createLabel(AnalysisScopeBundle.message("usage.view.canceled"))); } void setToComponent(final JComponent cmp) { SwingUtilities.invokeLater(() -> { if (myProject.isDisposed()) return; if (myCurrentComponent != null) { if (myCurrentUsageView != null && myCurrentComponent == myCurrentUsageView.getComponent()) { Disposer.dispose(myCurrentUsageView); myCurrentUsageView = null; } remove(myCurrentComponent); } myCurrentComponent = cmp; add(cmp, BorderLayout.CENTER); revalidate(); }); } @Override public void dispose() { if (myCurrentUsageView != null) { Disposer.dispose(myCurrentUsageView); myCurrentUsageView = null; } } private static JComponent createLabel(String text) { JLabel label = new JLabel(text); label.setHorizontalAlignment(SwingConstants.CENTER); return label; } @Override @Nullable public Object getData(@Nonnull Key dataId) { if (PlatformDataKeys.HELP_ID == dataId) { return "ideaInterface.find"; } return null; } }
{ "content_hash": "4be1bef2f60c65ba3d473c985478324f", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 136, "avg_line_length": 30.805309734513273, "alnum_prop": 0.7408790577420281, "repo_name": "consulo/consulo", "id": "f3a497b16656d28f2c5bba3081704f45b3b02891", "size": "4081", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/base/lang-impl/src/main/java/com/intellij/packageDependencies/ui/UsagesPanel.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "299" }, { "name": "C", "bytes": "52718" }, { "name": "C++", "bytes": "72795" }, { "name": "CMake", "bytes": "854" }, { "name": "CSS", "bytes": "64655" }, { "name": "Groovy", "bytes": "36006" }, { "name": "HTML", "bytes": "173780" }, { "name": "Java", "bytes": "64026758" }, { "name": "Lex", "bytes": "5909" }, { "name": "Objective-C", "bytes": "23787" }, { "name": "Python", "bytes": "3276" }, { "name": "SCSS", "bytes": "9782" }, { "name": "Shell", "bytes": "5689" }, { "name": "Thrift", "bytes": "1216" }, { "name": "XSLT", "bytes": "49230" } ], "symlink_target": "" }
//===- RewriterGen.cpp - MLIR pattern rewriter generator ------------------===// // // Copyright 2019 The MLIR Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ============================================================================= // // RewriterGen uses pattern rewrite definitions to generate rewriter matchers. // //===----------------------------------------------------------------------===// #include "mlir/Support/STLExtras.h" #include "mlir/TableGen/Attribute.h" #include "mlir/TableGen/Format.h" #include "mlir/TableGen/GenInfo.h" #include "mlir/TableGen/Operator.h" #include "mlir/TableGen/Pattern.h" #include "mlir/TableGen/Predicate.h" #include "mlir/TableGen/Type.h" #include "llvm/ADT/StringExtras.h" #include "llvm/ADT/StringSet.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/Debug.h" #include "llvm/Support/FormatAdapters.h" #include "llvm/Support/PrettyStackTrace.h" #include "llvm/Support/Signals.h" #include "llvm/TableGen/Error.h" #include "llvm/TableGen/Main.h" #include "llvm/TableGen/Record.h" #include "llvm/TableGen/TableGenBackend.h" using namespace mlir; using namespace mlir::tblgen; using llvm::formatv; using llvm::Record; using llvm::RecordKeeper; #define DEBUG_TYPE "mlir-tblgen-rewritergen" namespace llvm { template <> struct format_provider<mlir::tblgen::Pattern::IdentifierLine> { static void format(const mlir::tblgen::Pattern::IdentifierLine &v, raw_ostream &os, StringRef style) { os << v.first << ":" << v.second; } }; } // end namespace llvm //===----------------------------------------------------------------------===// // PatternEmitter //===----------------------------------------------------------------------===// namespace { class PatternEmitter { public: PatternEmitter(Record *pat, RecordOperatorMap *mapper, raw_ostream &os); // Emits the mlir::RewritePattern struct named `rewriteName`. void emit(StringRef rewriteName); private: // Emits the code for matching ops. void emitMatchLogic(DagNode tree); // Emits the code for rewriting ops. void emitRewriteLogic(); //===--------------------------------------------------------------------===// // Match utilities //===--------------------------------------------------------------------===// // Emits C++ statements for matching the op constrained by the given DAG // `tree`. void emitOpMatch(DagNode tree, int depth); // Emits C++ statements for matching the `argIndex`-th argument of the given // DAG `tree` as an operand. void emitOperandMatch(DagNode tree, int argIndex, int depth, int indent); // Emits C++ statements for matching the `argIndex`-th argument of the given // DAG `tree` as an attribute. void emitAttributeMatch(DagNode tree, int argIndex, int depth, int indent); //===--------------------------------------------------------------------===// // Rewrite utilities //===--------------------------------------------------------------------===// // The entry point for handling a result pattern rooted at `resultTree`. This // method dispatches to concrete handlers according to `resultTree`'s kind and // returns a symbol representing the whole value pack. Callers are expected to // further resolve the symbol according to the specific use case. // // `depth` is the nesting level of `resultTree`; 0 means top-level result // pattern. For top-level result pattern, `resultIndex` indicates which result // of the matched root op this pattern is intended to replace, which can be // used to deduce the result type of the op generated from this result // pattern. std::string handleResultPattern(DagNode resultTree, int resultIndex, int depth); // Emits the C++ statement to replace the matched DAG with a value built via // calling native C++ code. std::string handleReplaceWithNativeCodeCall(DagNode resultTree); // Returns the C++ expression referencing the old value serving as the // replacement. std::string handleReplaceWithValue(DagNode tree); // Emits the C++ statement to build a new op out of the given DAG `tree` and // returns the variable name that this op is assigned to. If the root op in // DAG `tree` has a specified name, the created op will be assigned to a // variable of the given name. Otherwise, a unique name will be used as the // result value name. std::string handleOpCreation(DagNode tree, int resultIndex, int depth); using ChildNodeIndexNameMap = DenseMap<unsigned, std::string>; // Emits a local variable for each value and attribute to be used for creating // an op. void createSeparateLocalVarsForOpArgs(DagNode node, ChildNodeIndexNameMap &childNodeNames); // Emits the concrete arguments used to call a op's builder. void supplyValuesForOpArgs(DagNode node, const ChildNodeIndexNameMap &childNodeNames); // Emits the local variables for holding all values as a whole and all named // attributes as a whole to be used for creating an op. void createAggregateLocalVarsForOpArgs( DagNode node, const ChildNodeIndexNameMap &childNodeNames); // Returns the C++ expression to construct a constant attribute of the given // `value` for the given attribute kind `attr`. std::string handleConstantAttr(Attribute attr, StringRef value); // Returns the C++ expression to build an argument from the given DAG `leaf`. // `patArgName` is used to bound the argument to the source pattern. std::string handleOpArgument(DagLeaf leaf, StringRef patArgName); //===--------------------------------------------------------------------===// // General utilities //===--------------------------------------------------------------------===// // Collects all of the operations within the given dag tree. void collectOps(DagNode tree, llvm::SmallPtrSetImpl<const Operator *> &ops); // Returns a unique symbol for a local variable of the given `op`. std::string getUniqueSymbol(const Operator *op); //===--------------------------------------------------------------------===// // Symbol utilities //===--------------------------------------------------------------------===// // Returns how many static values the given DAG `node` correspond to. int getNodeValueCount(DagNode node); private: // Pattern instantiation location followed by the location of multiclass // prototypes used. This is intended to be used as a whole to // PrintFatalError() on errors. ArrayRef<llvm::SMLoc> loc; // Op's TableGen Record to wrapper object. RecordOperatorMap *opMap; // Handy wrapper for pattern being emitted. Pattern pattern; // Map for all bound symbols' info. SymbolInfoMap symbolInfoMap; // The next unused ID for newly created values. unsigned nextValueId; raw_ostream &os; // Format contexts containing placeholder substitutions. FmtContext fmtCtx; // Number of op processed. int opCounter = 0; }; } // end anonymous namespace PatternEmitter::PatternEmitter(Record *pat, RecordOperatorMap *mapper, raw_ostream &os) : loc(pat->getLoc()), opMap(mapper), pattern(pat, mapper), symbolInfoMap(pat->getLoc()), nextValueId(0), os(os) { fmtCtx.withBuilder("rewriter"); } std::string PatternEmitter::handleConstantAttr(Attribute attr, StringRef value) { if (!attr.isConstBuildable()) PrintFatalError(loc, "Attribute " + attr.getAttrDefName() + " does not have the 'constBuilderCall' field"); // TODO(jpienaar): Verify the constants here return tgfmt(attr.getConstBuilderTemplate(), &fmtCtx, value); } // Helper function to match patterns. void PatternEmitter::emitOpMatch(DagNode tree, int depth) { Operator &op = tree.getDialectOp(opMap); LLVM_DEBUG(llvm::dbgs() << "start emitting match for op '" << op.getOperationName() << "' at depth " << depth << '\n'); int indent = 4 + 2 * depth; os.indent(indent) << formatv( "auto castedOp{0} = dyn_cast_or_null<{1}>(op{0}); (void)castedOp{0};\n", depth, op.getQualCppClassName()); // Skip the operand matching at depth 0 as the pattern rewriter already does. if (depth != 0) { // Skip if there is no defining operation (e.g., arguments to function). os.indent(indent) << formatv("if (!castedOp{0}) return matchFailure();\n", depth); } if (tree.getNumArgs() != op.getNumArgs()) { PrintFatalError(loc, formatv("op '{0}' argument number mismatch: {1} in " "pattern vs. {2} in definition", op.getOperationName(), tree.getNumArgs(), op.getNumArgs())); } // If the operand's name is set, set to that variable. auto name = tree.getSymbol(); if (!name.empty()) os.indent(indent) << formatv("{0} = castedOp{1};\n", name, depth); for (int i = 0, e = tree.getNumArgs(); i != e; ++i) { auto opArg = op.getArg(i); // Handle nested DAG construct first if (DagNode argTree = tree.getArgAsNestedDag(i)) { if (auto *operand = opArg.dyn_cast<NamedTypeConstraint *>()) { if (operand->isVariadic()) { auto error = formatv("use nested DAG construct to match op {0}'s " "variadic operand #{1} unsupported now", op.getOperationName(), i); PrintFatalError(loc, error); } } os.indent(indent) << "{\n"; os.indent(indent + 2) << formatv( "auto *op{0} = " "(*castedOp{1}.getODSOperands({2}).begin())->getDefiningOp();\n", depth + 1, depth, i); emitOpMatch(argTree, depth + 1); os.indent(indent + 2) << formatv("tblgen_ops[{0}] = op{1};\n", ++opCounter, depth + 1); os.indent(indent) << "}\n"; continue; } // Next handle DAG leaf: operand or attribute if (opArg.is<NamedTypeConstraint *>()) { emitOperandMatch(tree, i, depth, indent); } else if (opArg.is<NamedAttribute *>()) { emitAttributeMatch(tree, i, depth, indent); } else { PrintFatalError(loc, "unhandled case when matching op"); } } LLVM_DEBUG(llvm::dbgs() << "done emitting match for op '" << op.getOperationName() << "' at depth " << depth << '\n'); } void PatternEmitter::emitOperandMatch(DagNode tree, int argIndex, int depth, int indent) { Operator &op = tree.getDialectOp(opMap); auto *operand = op.getArg(argIndex).get<NamedTypeConstraint *>(); auto matcher = tree.getArgAsLeaf(argIndex); // If a constraint is specified, we need to generate C++ statements to // check the constraint. if (!matcher.isUnspecified()) { if (!matcher.isOperandMatcher()) { PrintFatalError( loc, formatv("the {1}-th argument of op '{0}' should be an operand", op.getOperationName(), argIndex + 1)); } // Only need to verify if the matcher's type is different from the one // of op definition. if (operand->constraint != matcher.getAsConstraint()) { if (operand->isVariadic()) { auto error = formatv( "further constrain op {0}'s variadic operand #{1} unsupported now", op.getOperationName(), argIndex); PrintFatalError(loc, error); } auto self = formatv("(*castedOp{0}.getODSOperands({1}).begin())->getType()", depth, argIndex); os.indent(indent) << "if (!(" << tgfmt(matcher.getConditionTemplate(), &fmtCtx.withSelf(self)) << ")) return matchFailure();\n"; } } // Capture the value auto name = tree.getArgName(argIndex); if (!name.empty()) { // We need to subtract the number of attributes before this operand to get // the index in the operand list. auto numPrevAttrs = std::count_if( op.arg_begin(), op.arg_begin() + argIndex, [](const Argument &arg) { return arg.is<NamedAttribute *>(); }); os.indent(indent) << formatv("{0} = castedOp{1}.getODSOperands({2});\n", name, depth, argIndex - numPrevAttrs); } } void PatternEmitter::emitAttributeMatch(DagNode tree, int argIndex, int depth, int indent) { Operator &op = tree.getDialectOp(opMap); auto *namedAttr = op.getArg(argIndex).get<NamedAttribute *>(); const auto &attr = namedAttr->attr; os.indent(indent) << "{\n"; indent += 2; os.indent(indent) << formatv( "auto tblgen_attr = op{0}->getAttrOfType<{1}>(\"{2}\");\n", depth, attr.getStorageType(), namedAttr->name); // TODO(antiagainst): This should use getter method to avoid duplication. if (attr.hasDefaultValueInitializer()) { os.indent(indent) << "if (!tblgen_attr) tblgen_attr = " << tgfmt(attr.getConstBuilderTemplate(), &fmtCtx, attr.getDefaultValueInitializer()) << ";\n"; } else if (attr.isOptional()) { // For a missing attribute that is optional according to definition, we // should just capture a mlir::Attribute() to signal the missing state. // That is precisely what getAttr() returns on missing attributes. } else { os.indent(indent) << "if (!tblgen_attr) return matchFailure();\n"; } auto matcher = tree.getArgAsLeaf(argIndex); if (!matcher.isUnspecified()) { if (!matcher.isAttrMatcher()) { PrintFatalError( loc, formatv("the {1}-th argument of op '{0}' should be an attribute", op.getOperationName(), argIndex + 1)); } // If a constraint is specified, we need to generate C++ statements to // check the constraint. os.indent(indent) << "if (!(" << tgfmt(matcher.getConditionTemplate(), &fmtCtx.withSelf("tblgen_attr")) << ")) return matchFailure();\n"; } // Capture the value auto name = tree.getArgName(argIndex); if (!name.empty()) { os.indent(indent) << formatv("{0} = tblgen_attr;\n", name); } indent -= 2; os.indent(indent) << "}\n"; } void PatternEmitter::emitMatchLogic(DagNode tree) { LLVM_DEBUG(llvm::dbgs() << "--- start emitting match logic ---\n"); emitOpMatch(tree, 0); for (auto &appliedConstraint : pattern.getConstraints()) { auto &constraint = appliedConstraint.constraint; auto &entities = appliedConstraint.entities; auto condition = constraint.getConditionTemplate(); auto cmd = "if (!({0})) return matchFailure();\n"; if (isa<TypeConstraint>(constraint)) { auto self = formatv("({0}->getType())", symbolInfoMap.getValueAndRangeUse(entities.front())); os.indent(4) << formatv(cmd, tgfmt(condition, &fmtCtx.withSelf(self.str()))); } else if (isa<AttrConstraint>(constraint)) { PrintFatalError( loc, "cannot use AttrConstraint in Pattern multi-entity constraints"); } else { // TODO(b/138794486): replace formatv arguments with the exact specified // args. if (entities.size() > 4) { PrintFatalError(loc, "only support up to 4-entity constraints now"); } SmallVector<std::string, 4> names; int i = 0; for (int e = entities.size(); i < e; ++i) names.push_back(symbolInfoMap.getValueAndRangeUse(entities[i])); std::string self = appliedConstraint.self; if (!self.empty()) self = symbolInfoMap.getValueAndRangeUse(self); for (; i < 4; ++i) names.push_back("<unused>"); os.indent(4) << formatv(cmd, tgfmt(condition, &fmtCtx.withSelf(self), names[0], names[1], names[2], names[3])); } } LLVM_DEBUG(llvm::dbgs() << "--- done emitting match logic ---\n"); } void PatternEmitter::collectOps(DagNode tree, llvm::SmallPtrSetImpl<const Operator *> &ops) { // Check if this tree is an operation. if (tree.isOperation()) { const Operator &op = tree.getDialectOp(opMap); LLVM_DEBUG(llvm::dbgs() << "found operation " << op.getOperationName() << '\n'); ops.insert(&op); } // Recurse the arguments of the tree. for (unsigned i = 0, e = tree.getNumArgs(); i != e; ++i) if (auto child = tree.getArgAsNestedDag(i)) collectOps(child, ops); } void PatternEmitter::emit(StringRef rewriteName) { // Get the DAG tree for the source pattern. DagNode sourceTree = pattern.getSourcePattern(); const Operator &rootOp = pattern.getSourceRootOp(); auto rootName = rootOp.getOperationName(); // Collect the set of result operations. llvm::SmallPtrSet<const Operator *, 4> resultOps; LLVM_DEBUG(llvm::dbgs() << "start collecting ops used in result patterns\n"); for (unsigned i = 0, e = pattern.getNumResultPatterns(); i != e; ++i) { collectOps(pattern.getResultPattern(i), resultOps); } LLVM_DEBUG(llvm::dbgs() << "done collecting ops used in result patterns\n"); // Emit RewritePattern for Pattern. auto locs = pattern.getLocation(); os << formatv("/* Generated from:\n\t{0:$[ instantiating\n\t]}\n*/\n", make_range(locs.rbegin(), locs.rend())); os << formatv(R"(struct {0} : public RewritePattern { {0}(MLIRContext *context) : RewritePattern("{1}", {{)", rewriteName, rootName); // Sort result operators by name. llvm::SmallVector<const Operator *, 4> sortedResultOps(resultOps.begin(), resultOps.end()); llvm::sort(sortedResultOps, [&](const Operator *lhs, const Operator *rhs) { return lhs->getOperationName() < rhs->getOperationName(); }); interleaveComma(sortedResultOps, os, [&](const Operator *op) { os << '"' << op->getOperationName() << '"'; }); os << formatv(R"(}, {0}, context) {{})", pattern.getBenefit()) << "\n"; // Emit matchAndRewrite() function. os << R"( PatternMatchResult matchAndRewrite(Operation *op0, PatternRewriter &rewriter) const override { )"; // Register all symbols bound in the source pattern. pattern.collectSourcePatternBoundSymbols(symbolInfoMap); LLVM_DEBUG( llvm::dbgs() << "start creating local variables for capturing matches\n"); os.indent(4) << "// Variables for capturing values and attributes used for " "creating ops\n"; // Create local variables for storing the arguments and results bound // to symbols. for (const auto &symbolInfoPair : symbolInfoMap) { StringRef symbol = symbolInfoPair.getKey(); auto &info = symbolInfoPair.getValue(); os.indent(4) << info.getVarDecl(symbol); } // TODO(jpienaar): capture ops with consistent numbering so that it can be // reused for fused loc. os.indent(4) << formatv("Operation *tblgen_ops[{0}];\n\n", pattern.getSourcePattern().getNumOps()); LLVM_DEBUG( llvm::dbgs() << "done creating local variables for capturing matches\n"); os.indent(4) << "// Match\n"; os.indent(4) << "tblgen_ops[0] = op0;\n"; emitMatchLogic(sourceTree); os << "\n"; os.indent(4) << "// Rewrite\n"; emitRewriteLogic(); os.indent(4) << "return matchSuccess();\n"; os << " };\n"; os << "};\n"; } void PatternEmitter::emitRewriteLogic() { LLVM_DEBUG(llvm::dbgs() << "--- start emitting rewrite logic ---\n"); const Operator &rootOp = pattern.getSourceRootOp(); int numExpectedResults = rootOp.getNumResults(); int numResultPatterns = pattern.getNumResultPatterns(); // First register all symbols bound to ops generated in result patterns. pattern.collectResultPatternBoundSymbols(symbolInfoMap); // Only the last N static values generated are used to replace the matched // root N-result op. We need to calculate the starting index (of the results // of the matched op) each result pattern is to replace. SmallVector<int, 4> offsets(numResultPatterns + 1, numExpectedResults); // If we don't need to replace any value at all, set the replacement starting // index as the number of result patterns so we skip all of them when trying // to replace the matched op's results. int replStartIndex = numExpectedResults == 0 ? numResultPatterns : -1; for (int i = numResultPatterns - 1; i >= 0; --i) { auto numValues = getNodeValueCount(pattern.getResultPattern(i)); offsets[i] = offsets[i + 1] - numValues; if (offsets[i] == 0) { if (replStartIndex == -1) replStartIndex = i; } else if (offsets[i] < 0 && offsets[i + 1] > 0) { auto error = formatv( "cannot use the same multi-result op '{0}' to generate both " "auxiliary values and values to be used for replacing the matched op", pattern.getResultPattern(i).getSymbol()); PrintFatalError(loc, error); } } if (offsets.front() > 0) { const char error[] = "no enough values generated to replace the matched op"; PrintFatalError(loc, error); } os.indent(4) << "auto loc = rewriter.getFusedLoc({"; for (int i = 0, e = pattern.getSourcePattern().getNumOps(); i != e; ++i) { os << (i ? ", " : "") << "tblgen_ops[" << i << "]->getLoc()"; } os << "}); (void)loc;\n"; // Process auxiliary result patterns. for (int i = 0; i < replStartIndex; ++i) { DagNode resultTree = pattern.getResultPattern(i); auto val = handleResultPattern(resultTree, offsets[i], 0); // Normal op creation will be streamed to `os` by the above call; but // NativeCodeCall will only be materialized to `os` if it is used. Here // we are handling auxiliary patterns so we want the side effect even if // NativeCodeCall is not replacing matched root op's results. if (resultTree.isNativeCodeCall()) os.indent(4) << val << ";\n"; } if (numExpectedResults == 0) { assert(replStartIndex >= numResultPatterns && "invalid auxiliary vs. replacement pattern division!"); // No result to replace. Just erase the op. os.indent(4) << "rewriter.eraseOp(op0);\n"; } else { // Process replacement result patterns. os.indent(4) << "SmallVector<Value *, 4> tblgen_repl_values;\n"; for (int i = replStartIndex; i < numResultPatterns; ++i) { DagNode resultTree = pattern.getResultPattern(i); auto val = handleResultPattern(resultTree, offsets[i], 0); os.indent(4) << "\n"; // Resolve each symbol for all range use so that we can loop over them. os << symbolInfoMap.getAllRangeUse( val, " for (auto *v : {0}) {{ tblgen_repl_values.push_back(v); }", "\n"); } os.indent(4) << "\n"; os.indent(4) << "rewriter.replaceOp(op0, tblgen_repl_values);\n"; } LLVM_DEBUG(llvm::dbgs() << "--- done emitting rewrite logic ---\n"); } std::string PatternEmitter::getUniqueSymbol(const Operator *op) { return formatv("tblgen_{0}_{1}", op->getCppClassName(), nextValueId++); } std::string PatternEmitter::handleResultPattern(DagNode resultTree, int resultIndex, int depth) { LLVM_DEBUG(llvm::dbgs() << "handle result pattern: "); LLVM_DEBUG(resultTree.print(llvm::dbgs())); LLVM_DEBUG(llvm::dbgs() << '\n'); if (resultTree.isNativeCodeCall()) { auto symbol = handleReplaceWithNativeCodeCall(resultTree); symbolInfoMap.bindValue(symbol); return symbol; } if (resultTree.isReplaceWithValue()) { return handleReplaceWithValue(resultTree); } // Normal op creation. auto symbol = handleOpCreation(resultTree, resultIndex, depth); if (resultTree.getSymbol().empty()) { // This is an op not explicitly bound to a symbol in the rewrite rule. // Register the auto-generated symbol for it. symbolInfoMap.bindOpResult(symbol, pattern.getDialectOp(resultTree)); } return symbol; } std::string PatternEmitter::handleReplaceWithValue(DagNode tree) { assert(tree.isReplaceWithValue()); if (tree.getNumArgs() != 1) { PrintFatalError( loc, "replaceWithValue directive must take exactly one argument"); } if (!tree.getSymbol().empty()) { PrintFatalError(loc, "cannot bind symbol to replaceWithValue"); } return tree.getArgName(0); } std::string PatternEmitter::handleOpArgument(DagLeaf leaf, StringRef patArgName) { if (leaf.isConstantAttr()) { auto constAttr = leaf.getAsConstantAttr(); return handleConstantAttr(constAttr.getAttribute(), constAttr.getConstantValue()); } if (leaf.isEnumAttrCase()) { auto enumCase = leaf.getAsEnumAttrCase(); if (enumCase.isStrCase()) return handleConstantAttr(enumCase, enumCase.getSymbol()); // This is an enum case backed by an IntegerAttr. We need to get its value // to build the constant. std::string val = std::to_string(enumCase.getValue()); return handleConstantAttr(enumCase, val); } LLVM_DEBUG(llvm::dbgs() << "handle argument '" << patArgName << "'\n"); auto argName = symbolInfoMap.getValueAndRangeUse(patArgName); if (leaf.isUnspecified() || leaf.isOperandMatcher()) { LLVM_DEBUG(llvm::dbgs() << "replace " << patArgName << " with '" << argName << "' (via symbol ref)\n"); return argName; } if (leaf.isNativeCodeCall()) { auto repl = tgfmt(leaf.getNativeCodeTemplate(), &fmtCtx.withSelf(argName)); LLVM_DEBUG(llvm::dbgs() << "replace " << patArgName << " with '" << repl << "' (via NativeCodeCall)\n"); return repl; } PrintFatalError(loc, "unhandled case when rewriting op"); } std::string PatternEmitter::handleReplaceWithNativeCodeCall(DagNode tree) { LLVM_DEBUG(llvm::dbgs() << "handle NativeCodeCall pattern: "); LLVM_DEBUG(tree.print(llvm::dbgs())); LLVM_DEBUG(llvm::dbgs() << '\n'); auto fmt = tree.getNativeCodeTemplate(); // TODO(b/138794486): replace formatv arguments with the exact specified args. SmallVector<std::string, 8> attrs(8); if (tree.getNumArgs() > 8) { PrintFatalError(loc, "unsupported NativeCodeCall argument numbers: " + Twine(tree.getNumArgs())); } for (int i = 0, e = tree.getNumArgs(); i != e; ++i) { attrs[i] = handleOpArgument(tree.getArgAsLeaf(i), tree.getArgName(i)); LLVM_DEBUG(llvm::dbgs() << "NativeCodeCall argment #" << i << " replacement: " << attrs[i] << "\n"); } return tgfmt(fmt, &fmtCtx, attrs[0], attrs[1], attrs[2], attrs[3], attrs[4], attrs[5], attrs[6], attrs[7]); } int PatternEmitter::getNodeValueCount(DagNode node) { if (node.isOperation()) { // If the op is bound to a symbol in the rewrite rule, query its result // count from the symbol info map. auto symbol = node.getSymbol(); if (!symbol.empty()) { return symbolInfoMap.getStaticValueCount(symbol); } // Otherwise this is an unbound op; we will use all its results. return pattern.getDialectOp(node).getNumResults(); } // TODO(antiagainst): This considers all NativeCodeCall as returning one // value. Enhance if multi-value ones are needed. return 1; } std::string PatternEmitter::handleOpCreation(DagNode tree, int resultIndex, int depth) { LLVM_DEBUG(llvm::dbgs() << "create op for pattern: "); LLVM_DEBUG(tree.print(llvm::dbgs())); LLVM_DEBUG(llvm::dbgs() << '\n'); Operator &resultOp = tree.getDialectOp(opMap); auto numOpArgs = resultOp.getNumArgs(); if (numOpArgs != tree.getNumArgs()) { PrintFatalError(loc, formatv("resultant op '{0}' argument number mismatch: " "{1} in pattern vs. {2} in definition", resultOp.getOperationName(), tree.getNumArgs(), numOpArgs)); } // A map to collect all nested DAG child nodes' names, with operand index as // the key. This includes both bound and unbound child nodes. ChildNodeIndexNameMap childNodeNames; // First go through all the child nodes who are nested DAG constructs to // create ops for them and remember the symbol names for them, so that we can // use the results in the current node. This happens in a recursive manner. for (int i = 0, e = resultOp.getNumOperands(); i != e; ++i) { if (auto child = tree.getArgAsNestedDag(i)) { childNodeNames[i] = handleResultPattern(child, i, depth + 1); } } // The name of the local variable holding this op. std::string valuePackName; // The symbol for holding the result of this pattern. Note that the result of // this pattern is not necessarily the same as the variable created by this // pattern because we can use `__N` suffix to refer only a specific result if // the generated op is a multi-result op. std::string resultValue; if (tree.getSymbol().empty()) { // No symbol is explicitly bound to this op in the pattern. Generate a // unique name. valuePackName = resultValue = getUniqueSymbol(&resultOp); } else { resultValue = tree.getSymbol(); // Strip the index to get the name for the value pack and use it to name the // local variable for the op. valuePackName = SymbolInfoMap::getValuePackName(resultValue); } // Create the local variable for this op. os.indent(4) << formatv("{0} {1};\n", resultOp.getQualCppClassName(), valuePackName); os.indent(4) << "{\n"; // Right now ODS don't have general type inference support. Except a few // special cases listed below, DRR needs to supply types for all results // when building an op. bool isSameOperandsAndResultType = resultOp.getTrait("OpTrait::SameOperandsAndResultType"); bool useFirstAttr = resultOp.getTrait("OpTrait::FirstAttrDerivedResultType"); if (isSameOperandsAndResultType || useFirstAttr) { // We know how to deduce the result type for ops with these traits and we've // generated builders taking aggregrate parameters. Use those builders to // create the ops. // First prepare local variables for op arguments used in builder call. createAggregateLocalVarsForOpArgs(tree, childNodeNames); // Then create the op. os.indent(6) << formatv( "{0} = rewriter.create<{1}>(loc, tblgen_values, tblgen_attrs);\n", valuePackName, resultOp.getQualCppClassName()); os.indent(4) << "}\n"; return resultValue; } bool isBroadcastable = resultOp.getTrait("OpTrait::BroadcastableTwoOperandsOneResult"); bool usePartialResults = valuePackName != resultValue; if (isBroadcastable || usePartialResults || depth > 0 || resultIndex < 0) { // For these cases (broadcastable ops, op results used both as auxiliary // values and replacement values, ops in nested patterns, auxiliary ops), we // still need to supply the result types when building the op. But because // we don't generate a builder automatically with ODS for them, it's the // developer's responsiblity to make sure such a builder (with result type // deduction ability) exists. We go through the separate-parameter builder // here given that it's easier for developers to write compared to // aggregate-parameter builders. createSeparateLocalVarsForOpArgs(tree, childNodeNames); os.indent(6) << formatv("{0} = rewriter.create<{1}>(loc", valuePackName, resultOp.getQualCppClassName()); supplyValuesForOpArgs(tree, childNodeNames); os << "\n );\n"; os.indent(4) << "}\n"; return resultValue; } // If depth == 0 and resultIndex >= 0, it means we are replacing the values // generated from the source pattern root op. Then we can use the source // pattern's value types to determine the value type of the generated op // here. // First prepare local variables for op arguments used in builder call. createAggregateLocalVarsForOpArgs(tree, childNodeNames); // Then prepare the result types. We need to specify the types for all // results. os.indent(6) << formatv( "SmallVector<Type, 4> tblgen_types; (void)tblgen_types;\n"); int numResults = resultOp.getNumResults(); if (numResults != 0) { for (int i = 0; i < numResults; ++i) os.indent(6) << formatv("for (auto *v : castedOp0.getODSResults({0})) {{" "tblgen_types.push_back(v->getType()); }\n", resultIndex + i); } os.indent(6) << formatv("{0} = rewriter.create<{1}>(loc, tblgen_types, " "tblgen_values, tblgen_attrs);\n", valuePackName, resultOp.getQualCppClassName()); os.indent(4) << "}\n"; return resultValue; } void PatternEmitter::createSeparateLocalVarsForOpArgs( DagNode node, ChildNodeIndexNameMap &childNodeNames) { Operator &resultOp = node.getDialectOp(opMap); // Now prepare operands used for building this op: // * If the operand is non-variadic, we create a `Value*` local variable. // * If the operand is variadic, we create a `SmallVector<Value*>` local // variable. int valueIndex = 0; // An index for uniquing local variable names. for (int argIndex = 0, e = resultOp.getNumArgs(); argIndex < e; ++argIndex) { const auto *operand = resultOp.getArg(argIndex).dyn_cast<NamedTypeConstraint *>(); if (!operand) { // We do not need special handling for attributes. continue; } std::string varName; if (operand->isVariadic()) { varName = formatv("tblgen_values_{0}", valueIndex++); os.indent(6) << formatv("SmallVector<Value *, 4> {0};\n", varName); std::string range; if (node.isNestedDagArg(argIndex)) { range = childNodeNames[argIndex]; } else { range = node.getArgName(argIndex); } // Resolve the symbol for all range use so that we have a uniform way of // capturing the values. range = symbolInfoMap.getValueAndRangeUse(range); os.indent(6) << formatv("for (auto *v : {0}) {1}.push_back(v);\n", range, varName); } else { varName = formatv("tblgen_value_{0}", valueIndex++); os.indent(6) << formatv("Value *{0} = ", varName); if (node.isNestedDagArg(argIndex)) { os << symbolInfoMap.getValueAndRangeUse(childNodeNames[argIndex]); } else { DagLeaf leaf = node.getArgAsLeaf(argIndex); auto symbol = symbolInfoMap.getValueAndRangeUse(node.getArgName(argIndex)); if (leaf.isNativeCodeCall()) { os << tgfmt(leaf.getNativeCodeTemplate(), &fmtCtx.withSelf(symbol)); } else { os << symbol; } } os << ";\n"; } // Update to use the newly created local variable for building the op later. childNodeNames[argIndex] = varName; } } void PatternEmitter::supplyValuesForOpArgs( DagNode node, const ChildNodeIndexNameMap &childNodeNames) { Operator &resultOp = node.getDialectOp(opMap); for (int argIndex = 0, numOpArgs = resultOp.getNumArgs(); argIndex != numOpArgs; ++argIndex) { // Start each argment on its own line. (os << ",\n").indent(8); Argument opArg = resultOp.getArg(argIndex); // Handle the case of operand first. if (auto *operand = opArg.dyn_cast<NamedTypeConstraint *>()) { if (!operand->name.empty()) os << "/*" << operand->name << "=*/"; os << childNodeNames.lookup(argIndex); continue; } // The argument in the op definition. auto opArgName = resultOp.getArgName(argIndex); if (auto subTree = node.getArgAsNestedDag(argIndex)) { if (!subTree.isNativeCodeCall()) PrintFatalError(loc, "only NativeCodeCall allowed in nested dag node " "for creating attribute"); os << formatv("/*{0}=*/{1}", opArgName, handleReplaceWithNativeCodeCall(subTree)); } else { auto leaf = node.getArgAsLeaf(argIndex); // The argument in the result DAG pattern. auto patArgName = node.getArgName(argIndex); if (leaf.isConstantAttr() || leaf.isEnumAttrCase()) { // TODO(jpienaar): Refactor out into map to avoid recomputing these. if (!opArg.is<NamedAttribute *>()) PrintFatalError(loc, Twine("expected attribute ") + Twine(argIndex)); if (!patArgName.empty()) os << "/*" << patArgName << "=*/"; } else { os << "/*" << opArgName << "=*/"; } os << handleOpArgument(leaf, patArgName); } } } void PatternEmitter::createAggregateLocalVarsForOpArgs( DagNode node, const ChildNodeIndexNameMap &childNodeNames) { Operator &resultOp = node.getDialectOp(opMap); os.indent(6) << formatv( "SmallVector<Value *, 4> tblgen_values; (void)tblgen_values;\n"); os.indent(6) << formatv( "SmallVector<NamedAttribute, 4> tblgen_attrs; (void)tblgen_attrs;\n"); for (int argIndex = 0, e = resultOp.getNumArgs(); argIndex < e; ++argIndex) { if (const auto *attr = resultOp.getArg(argIndex).dyn_cast<NamedAttribute *>()) { const char *addAttrCmd = "if ({1}) {{" " tblgen_attrs.emplace_back(rewriter." "getIdentifier(\"{0}\"), {1}); }\n"; // The argument in the op definition. auto opArgName = resultOp.getArgName(argIndex); if (auto subTree = node.getArgAsNestedDag(argIndex)) { if (!subTree.isNativeCodeCall()) PrintFatalError(loc, "only NativeCodeCall allowed in nested dag node " "for creating attribute"); os.indent(6) << formatv(addAttrCmd, opArgName, handleReplaceWithNativeCodeCall(subTree)); } else { auto leaf = node.getArgAsLeaf(argIndex); // The argument in the result DAG pattern. auto patArgName = node.getArgName(argIndex); os.indent(6) << formatv(addAttrCmd, opArgName, handleOpArgument(leaf, patArgName)); } continue; } const auto *operand = resultOp.getArg(argIndex).get<NamedTypeConstraint *>(); std::string varName; if (operand->isVariadic()) { std::string range; if (node.isNestedDagArg(argIndex)) { range = childNodeNames.lookup(argIndex); } else { range = node.getArgName(argIndex); } // Resolve the symbol for all range use so that we have a uniform way of // capturing the values. range = symbolInfoMap.getValueAndRangeUse(range); os.indent(6) << formatv( "for (auto *v : {0}) tblgen_values.push_back(v);\n", range); } else { os.indent(6) << formatv("tblgen_values.push_back(", varName); if (node.isNestedDagArg(argIndex)) { os << symbolInfoMap.getValueAndRangeUse( childNodeNames.lookup(argIndex)); } else { DagLeaf leaf = node.getArgAsLeaf(argIndex); auto symbol = symbolInfoMap.getValueAndRangeUse(node.getArgName(argIndex)); if (leaf.isNativeCodeCall()) { os << tgfmt(leaf.getNativeCodeTemplate(), &fmtCtx.withSelf(symbol)); } else { os << symbol; } } os << ");\n"; } } } static void emitRewriters(const RecordKeeper &recordKeeper, raw_ostream &os) { emitSourceFileHeader("Rewriters", os); const auto &patterns = recordKeeper.getAllDerivedDefinitions("Pattern"); auto numPatterns = patterns.size(); // We put the map here because it can be shared among multiple patterns. RecordOperatorMap recordOpMap; std::vector<std::string> rewriterNames; rewriterNames.reserve(numPatterns); std::string baseRewriterName = "GeneratedConvert"; int rewriterIndex = 0; for (Record *p : patterns) { std::string name; if (p->isAnonymous()) { // If no name is provided, ensure unique rewriter names simply by // appending unique suffix. name = baseRewriterName + llvm::utostr(rewriterIndex++); } else { name = p->getName(); } LLVM_DEBUG(llvm::dbgs() << "=== start generating pattern '" << name << "' ===\n"); PatternEmitter(p, &recordOpMap, os).emit(name); LLVM_DEBUG(llvm::dbgs() << "=== done generating pattern '" << name << "' ===\n"); rewriterNames.push_back(std::move(name)); } // Emit function to add the generated matchers to the pattern list. os << "void populateWithGenerated(MLIRContext *context, " << "OwningRewritePatternList *patterns) {\n"; for (const auto &name : rewriterNames) { os << " patterns->insert<" << name << ">(context);\n"; } os << "}\n"; } static mlir::GenRegistration genRewriters("gen-rewriters", "Generate pattern rewriters", [](const RecordKeeper &records, raw_ostream &os) { emitRewriters(records, os); return false; });
{ "content_hash": "6f186518fe2586ba8728ef41c481ff10", "timestamp": "", "source": "github", "line_count": 1043, "max_line_length": 80, "avg_line_length": 39.99520613614573, "alnum_prop": 0.6205201965719765, "repo_name": "adit-chandra/tensorflow", "id": "d2776e058056f74ec1e597a6a2d407d52d373d83", "size": "41715", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "third_party/mlir/tools/mlir-tblgen/RewriterGen.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "5003" }, { "name": "Batchfile", "bytes": "45988" }, { "name": "C", "bytes": "773694" }, { "name": "C#", "bytes": "8562" }, { "name": "C++", "bytes": "76734263" }, { "name": "CMake", "bytes": "6545" }, { "name": "Dockerfile", "bytes": "81136" }, { "name": "Go", "bytes": "1679107" }, { "name": "HTML", "bytes": "4686483" }, { "name": "Java", "bytes": "952944" }, { "name": "Jupyter Notebook", "bytes": "567243" }, { "name": "LLVM", "bytes": "6536" }, { "name": "MLIR", "bytes": "1299322" }, { "name": "Makefile", "bytes": "61397" }, { "name": "Objective-C", "bytes": "104706" }, { "name": "Objective-C++", "bytes": "297753" }, { "name": "PHP", "bytes": "24055" }, { "name": "Pascal", "bytes": "3752" }, { "name": "Pawn", "bytes": "17546" }, { "name": "Perl", "bytes": "7536" }, { "name": "Python", "bytes": "38764318" }, { "name": "RobotFramework", "bytes": "891" }, { "name": "Ruby", "bytes": "7459" }, { "name": "Shell", "bytes": "643787" }, { "name": "Smarty", "bytes": "34727" }, { "name": "Swift", "bytes": "62814" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- Copyright 2005-2015 Red Hat, Inc. Red Hat licenses this file to you under the Apache License, version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <groupId>io.fabric8.quickstarts</groupId> <artifactId>ipaas-quickstarts</artifactId> <version>2.2-SNAPSHOT</version> </parent> <modelVersion>4.0.0</modelVersion> <groupId>io.fabric8.jube.images.fabric8</groupId> <artifactId>apps</artifactId> <packaging>pom</packaging> <name>Fabric8 :: Apps</name> <properties> <activemq.container.port>6194</activemq.container.port> <activemq.service.port>6163</activemq.service.port> <amqbroker.container.port>7162</amqbroker.container.port> <amqbroker.service.port>7163</amqbroker.service.port> <docker.from>fabric8/java-jboss-openjdk8-jdk:1.1.5</docker.from> <docker.image>${fabric8.dockerUser}${project.artifactId}:${project.version}</docker.image> <docker.assemblyDescriptorRef>artifact-with-dependencies</docker.assemblyDescriptorRef> <docker.port.container.jolokia>8778</docker.port.container.jolokia> <commons.beanutils.version>1.9.2</commons.beanutils.version> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>io.fabric8</groupId> <artifactId>fabric8-project</artifactId> <version>${fabric8.version}</version> <type>pom</type> <scope>import</scope> </dependency> </dependencies> </dependencyManagement> <modules> <module>amqbroker</module> <module>artifactory</module> <module>api-registry</module> <module>apiman</module> <module>apiman-gateway</module> <module>app-library</module> <module>brackets</module> <module>cadvisor</module> <module>cdelivery</module> <module>chaos-monkey</module> <module>fabric8</module> <module>elasticsearch</module> <module>fabric8-forge</module> <module>fabric8mq</module> <module>fabric8mq-controller</module> <module>fabric8mq-autoscaler</module> <module>fabric8mq-producer</module> <module>fabric8mq-consumer</module> <module>gerrit</module> <module>gogs</module> <module>grafana</module> <module>grafana2</module> <module>heapster</module> <module>hubot-irc</module> <module>hubot-letschat</module> <module>hubot-notifier</module> <module>hubot-slack</module> <module>image-linker</module> <module>infinispan-server</module> <module>influxdb</module> <module>jadvisor</module> <module>jbpm-designer</module> <module>jenkins</module> <module>keycloak</module> <module>kibana</module> <module>kiwi-irc</module> <module>letschat</module> <module>nexus</module> <module>orion</module> <module>prometheus</module> <module>registry</module> <module>sonarqube</module> <module>taiga</module> <module>zookeeper</module> </modules> <build> <plugins> <!-- generate a .zip with all the quickstarts --> <plugin> <groupId>io.fabric8</groupId> <artifactId>fabric8-maven-plugin</artifactId> <version>${fabric8.version}</version> <executions> <execution> <id>json</id> <phase>generate-resources</phase> <goals> <goal>json</goal> </goals> </execution> <execution> <id>attach</id> <phase>package</phase> <goals> <goal>attach</goal> </goals> </execution> </executions> <configuration> <replaceReadmeLinksPrefix>/apps/</replaceReadmeLinksPrefix> </configuration> </plugin> </plugins> </build> <profiles> <profile> <id>docker-build</id> <build> <pluginManagement> <plugins> <plugin> <groupId>io.fabric8</groupId> <artifactId>docker-maven-plugin</artifactId> <version>${docker.maven.plugin.version}</version> <executions> <execution> <goals> <goal>build</goal> </goals> <phase>package</phase> </execution> </executions> </plugin> </plugins> </pluginManagement> </build> </profile> </profiles> </project>
{ "content_hash": "00f41d4923bcf304604c60acf1e63c64", "timestamp": "", "source": "github", "line_count": 160, "max_line_length": 204, "avg_line_length": 37.3, "alnum_prop": 0.5423927613941019, "repo_name": "chirino/ipaas-quickstarts", "id": "1bfede4317a495997b93eea7d9c1e12d0f9c1ddf", "size": "5968", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sandbox/apps/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "1481" }, { "name": "HTML", "bytes": "1088" }, { "name": "Java", "bytes": "946458" }, { "name": "Shell", "bytes": "618" } ], "symlink_target": "" }
public class B : A { public override A this[int index] { get { return null; } } } public class A { public virtual B this[int index] { get { return null; } } }
{ "content_hash": "a958db98496f0248b68b02d8fa67f1d9", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 36, "avg_line_length": 11.866666666666667, "alnum_prop": 0.5842696629213483, "repo_name": "monoman/cnatural-language", "id": "c1f5b89a2323a742c8d5d85666c06ca279293f4c", "size": "178", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/resources/ObjectModelErrorTest/sources/IndexerCovariance.stab.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "3031665" }, { "name": "Shell", "bytes": "334" } ], "symlink_target": "" }
BC.js (v0.3.0) =================================== The BC.js is implemented as an [Apache Cordova](http://cordova.apache.org) / [PhoneGap](http://phonegap.com) Plugin for smartphones and tablets. </br>It only supports Bluetooth Low Energy(BLE) API currently. * Use this command to install our plugin in your Cordova/Phonegap project: <br/> <b>cordova plugin add https://github.com/bcsphere/bluetooth.git </b> <br/> * Online document is here: http://www.bcsphere.org/portal.php?mod=topic&topicid=3 <br/> * [BC Explorer](https://github.com/bcsphere/bcexplorer) is a useful tool to show the BLE services structure in device, we hope it will help you to start your development. * [BC IBeacon](https://github.com/bcsphere/ibeacon) implements as a plugin based on BC.js, provides a way to develop your own HW SDK based on BC.js . Features ----------------------------------- #### Multi-platform development and deployment * Your team will not be separated into some smaller teams to develop app for different platforms. * With the [Apache Cordova](http://cordova.apache.org) / [PhoneGap](http://phonegap.com) Plugin technique, you can develop BLE enabled cross-platform mobile apps for e.g. iOS and Android using JavaScript, HTML5 and CSS3. * If you are a senior Web developer and want to develop a BLE app, BC.js is your best choice, you can use uniform UI code in multi-platform. #### Effective Utilization of Apache Cordova/PhoneGap Ecosystem * PhoneGap enables the implementation of many useful plugins such as [Camera](http://docs.phonegap.com/en/edge/cordova_camera_camera.md.html#Camera)/[Accelerometer](http://docs.phonegap.com/en/edge/cordova_accelerometer_accelerometer.md.html#Accelerometer)/[Compass](http://docs.phonegap.com/en/edge/cordova_compass_compass.md.html#Compass)... * You can implements your own [plugins](http://docs.phonegap.com/en/3.3.0/guide_hybrid_plugins_index.md.html#Plugin%20Development%20Guide) based on BC.js for others. * There is many plugins implements by third party(you can find them on [PlugReg](http://plugreg.com/)), most of them are open-source. #### Internet of Things * Both Bluetooth Low Energy and JavaScript are asynchronous by their DNA,which makes them a good match. * BLE is good enough for wireless short-range communication. But we won't be limited to BLE API. </br></br> Support platforms ----------------------------------- * Android API 18+ (All devices with Bluetooth Low Energy HW) * Android API 17+ (Sumsung devices with Bluetooth Low Energy HW) * Android API 16+ (HTC devices with Bluetooth Low Energy HW, <b>Please note:</b> HTC use shared library to operate BLE device, you should add the 'com.htc.android.bluetooth.le.jar' in the build path manually to support HTC devices.) * IOS 6.0.1+ (iPhone4S+/iTouch5/iPad3+/iPad-mini/iPadAir) </br>[See Details](http://www.bcsphere.org/document/supportplatforms.html) </br></br> Backward ----------------------------------- If your background is Java/C#/C/C++ or HW programming, you may feel confused when use functional programming language(such as javascript) for the first time. </br>But once you get with it, you will love it, enjoy :) [Version-History-&-Roadmap](https://github.com/bcsphere/bluetooth/wiki/Version-History-&-Roadmap) -----------------------------------
{ "content_hash": "0fd05b0399b450a91ab6a459f3fea129", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 344, "avg_line_length": 70.31914893617021, "alnum_prop": 0.7170953101361573, "repo_name": "bcsphere/bcibeacon", "id": "3cd8814061c31e913453f0f5d76ce1e3bc10502b", "size": "3305", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "plugins/org.bcsphere.bluetooth/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package main import ( "bytes" "context" "crypto/tls" "crypto/x509" "encoding/base64" "encoding/binary" "encoding/json" "flag" "fmt" "io" "io/ioutil" "log" "net/http" "net/url" "os" "strconv" "strings" "syscall" "time" "github.com/golang/protobuf/proto" "github.com/gorilla/websocket" "google.golang.org/grpc" "google.golang.org/grpc/metadata" "google.golang.org/grpc/status" grpc_echo_pb "github.com/datawire/ambassador/pkg/api/kat" ) // Should we output GRPCWeb debugging? var debug_grpc_web bool // We set this value in main() XXX This is a hack // Limit concurrency // Semaphore is a counting semaphore that can be used to limit concurrency. type Semaphore chan bool // NewSemaphore returns a new Semaphore with the specified capacity. func NewSemaphore(n int) Semaphore { sem := make(Semaphore, n) for i := 0; i < n; i++ { sem.Release() } return sem } // Acquire blocks until a slot/token is available. func (s Semaphore) Acquire() { <-s } // Release returns a slot/token to the pool. func (s Semaphore) Release() { s <- true } // rlimit frobnicates the interplexing beacon. Or maybe it reverses the polarity // of the neutron flow. I'm not sure. FIXME. func rlimit() { var rLimit syscall.Rlimit err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit) if err != nil { log.Println("Error getting rlimit:", err) } else { log.Println("Initial rlimit:", rLimit) } rLimit.Max = 999999 rLimit.Cur = 999999 err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit) if err != nil { log.Println("Error setting rlimit:", err) } err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit) if err != nil { log.Println("Error getting rlimit:", err) } else { log.Println("Final rlimit", rLimit) } } // Query and Result manipulation // Query represents one kat query as read from the supplied input. It will be // mutated to include results from that query. type Query map[string]interface{} // CACert returns the "ca_cert" field as a string or returns the empty string. func (q Query) CACert() string { val, ok := q["ca_cert"] if ok { return val.(string) } return "" } // ClientCert returns the "client_cert" field as a string or returns the empty string. func (q Query) ClientCert() string { val, ok := q["client_cert"] if ok { return val.(string) } return "" } // ClientKey returns the "client_key" field as a string or returns the empty string. func (q Query) ClientKey() string { val, ok := q["client_key"] if ok { return val.(string) } return "" } // Insecure returns whether the query has a field called "insecure" whose value is true. func (q Query) Insecure() bool { val, ok := q["insecure"] return ok && val.(bool) } // SNI returns whether the query has a field called "sni" whose value is true. func (q Query) SNI() bool { val, ok := q["sni"] return ok && val.(bool) } // IsWebsocket returns whether the query's URL starts with "ws:". func (q Query) IsWebsocket() bool { return strings.HasPrefix(q.URL(), "ws:") } // URL returns the query's URL. func (q Query) URL() string { return q["url"].(string) } // MinTLSVersion returns the minimun TLS protocol version. func (q Query) MinTLSVersion() uint16 { switch q["minTLSv"].(string) { case "v1.0": return tls.VersionTLS10 case "v1.1": return tls.VersionTLS11 case "v1.2": return tls.VersionTLS12 case "v1.3": return tls.VersionTLS13 default: return 0 } } // MaxTLSVersion returns the maximum TLS protocol version. func (q Query) MaxTLSVersion() uint16 { switch q["maxTLSv"].(string) { case "v1.0": return tls.VersionTLS10 case "v1.1": return tls.VersionTLS11 case "v1.2": return tls.VersionTLS12 case "v1.3": return tls.VersionTLS13 default: return 0 } } // CipherSuites returns the list of configured Cipher Suites func (q Query) CipherSuites() []uint16 { val, ok := q["cipherSuites"] if !ok { return []uint16{} } cs := []uint16{} for _, s := range val.([]interface{}) { switch s.(string) { // TLS 1.0 - 1.2 cipher suites. case "TLS_RSA_WITH_RC4_128_SHA": cs = append(cs, tls.TLS_RSA_WITH_RC4_128_SHA) case "TLS_RSA_WITH_3DES_EDE_CBC_SHA": cs = append(cs, tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA) case "TLS_RSA_WITH_AES_128_CBC_SHA": cs = append(cs, tls.TLS_RSA_WITH_AES_128_CBC_SHA) case "TLS_RSA_WITH_AES_256_CBC_SHA": cs = append(cs, tls.TLS_RSA_WITH_AES_256_CBC_SHA) case "TLS_RSA_WITH_AES_128_CBC_SHA256": cs = append(cs, tls.TLS_RSA_WITH_AES_128_CBC_SHA256) case "TLS_RSA_WITH_AES_128_GCM_SHA256": cs = append(cs, tls.TLS_RSA_WITH_AES_128_GCM_SHA256) case "TLS_RSA_WITH_AES_256_GCM_SHA384": cs = append(cs, tls.TLS_RSA_WITH_AES_256_GCM_SHA384) case "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA) case "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA) case "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA) case "TLS_ECDHE_RSA_WITH_RC4_128_SHA": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA) case "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA) case "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA) case "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA) case "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256) case "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256) case "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256) case "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256) case "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384) case "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384) case "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": cs = append(cs, tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305) case "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": cs = append(cs, tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305) // TLS 1.3 cipher suites are not tunable // TLS_RSA_WITH_RC4_128_SHA // TLS_ECDHE_RSA_WITH_RC4_128_SHA // TLS_ECDHE_ECDSA_WITH_RC4_128_SHA // TLS_FALLBACK_SCSV isn't a standard cipher suite but an indicator // that the client is doing version fallback. See RFC 7507. case "TLS_FALLBACK_SCSV": cs = append(cs, tls.TLS_FALLBACK_SCSV) default: } } return cs } // ECDHCurves returns the list of configured ECDH CurveIDs func (q Query) ECDHCurves() []tls.CurveID { val, ok := q["ecdhCurves"] if !ok { return []tls.CurveID{} } cs := []tls.CurveID{} for _, s := range val.([]interface{}) { switch s.(string) { // TLS 1.0 - 1.2 cipher suites. case "CurveP256": cs = append(cs, tls.CurveP256) case "CurveP384": cs = append(cs, tls.CurveP384) case "CurveP521": cs = append(cs, tls.CurveP521) case "X25519": cs = append(cs, tls.X25519) default: } } return cs } // Method returns the query's method or "GET" if unspecified. func (q Query) Method() string { val, ok := q["method"] if ok { return val.(string) } return "GET" } // Headers returns the an http.Header object populated with any headers passed // in as part of the query. func (q Query) Headers() (result http.Header) { result = make(http.Header) headers, ok := q["headers"] if ok { for key, val := range headers.(map[string]interface{}) { result.Add(key, val.(string)) } } return result } // Body returns an io.Reader for the base64 encoded body supplied in // the query. func (q Query) Body() io.Reader { body, ok := q["body"] if ok { buf, err := base64.StdEncoding.DecodeString(body.(string)) if err != nil { panic(err) } return bytes.NewReader(buf) } else { return nil } } // GrpcType returns the query's grpc_type field or the empty string. func (q Query) GrpcType() string { val, ok := q["grpc_type"] if ok { return val.(string) } return "" } // Cookies returns a slice of http.Cookie objects populated with any cookies // passed in as part of the query. func (q Query) Cookies() (result []http.Cookie) { result = []http.Cookie{} cookies, ok := q["cookies"] if ok { for _, c := range cookies.([]interface{}) { cookie := http.Cookie{ Name: c.(map[string]interface{})["name"].(string), Value: c.(map[string]interface{})["value"].(string), } result = append(result, cookie) } } return result } // Result represents the result of one kat query. Upon first access to a query's // result field, the Result object will be created and added to the query. type Result map[string]interface{} // Result returns the query's result field as a Result object. If the field // doesn't exist, a new Result object is created and placed in that field. If // the field exists and contains something else, panic! func (q Query) Result() Result { val, ok := q["result"] if !ok { val = make(Result) q["result"] = val } return val.(Result) } // CheckErr populates the query result with error information if an error is // passed in (and logs the error). func (q Query) CheckErr(err error) bool { if err != nil { log.Printf("%v: %v", q.URL(), err) q.Result()["error"] = err.Error() return true } return false } // DecodeGrpcWebTextBody treats the body as a series of base64-encode chunks. It // returns the decoded proto and trailers. func DecodeGrpcWebTextBody(body []byte) ([]byte, http.Header, error) { // First, decode all the base64 stuff coming in. An annoyance here // is that while the data coming over the wire are encoded in // multiple chunks, we can't rely on seeing that framing when // decoding: a chunk that's the right length to not need any base-64 // padding will just run into the next chunk. // // So we loop to grab all the chunks, but we just serialize it into // a single raw byte array. var raw []byte cycle := 0 for { if debug_grpc_web { log.Printf("%v: base64 body '%v'", cycle, body) } cycle++ if len(body) <= 0 { break } chunk := make([]byte, base64.StdEncoding.DecodedLen(len(body))) n, err := base64.StdEncoding.Decode(chunk, body) if err != nil && n <= 0 { log.Printf("Failed to process body: %v\n", err) return nil, nil, err } raw = append(raw, chunk[:n]...) consumed := base64.StdEncoding.EncodedLen(n) body = body[consumed:] } // Next up, we need to split this into protobuf data and trailers. We // do this using grpc-web framing information for this -- each frame // consists of one byte of type, four bytes of length, then the data // itself. // // For our use case here, a type of 0 is the protobuf frame, and a type // of 0x80 is the trailers. trailers := make(http.Header) // the trailers will get saved here var proto []byte // this is what we hand off to protobuf decode var frame_start, frame_len uint32 var frame_type byte var frame []byte frame_start = 0 if debug_grpc_web { log.Printf("starting frame split, len %v: %v", len(raw), raw) } for (frame_start + 5) < uint32(len(raw)) { frame_type = raw[frame_start] frame_len = binary.BigEndian.Uint32(raw[frame_start+1 : frame_start+5]) frame = raw[frame_start+5 : frame_start+5+frame_len] if (frame_type & 128) > 0 { // Trailers frame if debug_grpc_web { log.Printf(" trailers @%v (len %v, type %v) %v - %v", frame_start, frame_len, frame_type, len(frame), frame) } lines := strings.Split(string(frame), "\n") for _, line := range lines { split := strings.SplitN(strings.TrimSpace(line), ":", 2) if len(split) == 2 { key := strings.TrimSpace(split[0]) value := strings.TrimSpace(split[1]) trailers.Add(key, value) } } } else { // Protobuf frame if debug_grpc_web { log.Printf(" protobuf @%v (len %v, type %v) %v - %v", frame_start, frame_len, frame_type, len(frame), frame) } proto = frame } frame_start += frame_len + 5 } return proto, trailers, nil } // AddResponse populates a query's result with data from the query's HTTP // response object. // // This is not called for websockets or real GRPC. It _is_ called for // GRPC-bridge, GRPC-web, and (of course) HTTP(s). func (q Query) AddResponse(resp *http.Response) { result := q.Result() result["status"] = resp.StatusCode result["headers"] = resp.Header headers := result["headers"].(http.Header) if headers != nil { // Copy in the client's start date. cstart := q["client-start-date"] // We'll only have a client-start-date if we're doing plain old HTTP, at // present -- so not for WebSockets or gRPC or the like. Don't try to // save the start and end dates if we have no start date. if cstart != nil { headers.Add("Client-Start-Date", q["client-start-date"].(string)) // Add the client's end date. headers.Add("Client-End-Date", time.Now().Format(time.RFC3339Nano)) } } if resp.TLS != nil { result["tls_version"] = resp.TLS.Version result["tls"] = resp.TLS.PeerCertificates result["cipher_suite"] = resp.TLS.CipherSuite } body, err := ioutil.ReadAll(resp.Body) if !q.CheckErr(err) { log.Printf("%v: %v", q.URL(), resp.Status) result["body"] = body if q.GrpcType() != "" && len(body) > 5 { if q.GrpcType() == "web" { // This is the GRPC-web case. Go forth and decode the base64'd // GRPC-web body madness. decodedBody, trailers, err := DecodeGrpcWebTextBody(body) if q.CheckErr(err) { log.Printf("Failed to decode grpc-web-text body: %v", err) return } body = decodedBody if debug_grpc_web { log.Printf("decodedBody '%v'", body) } for key, values := range trailers { for _, value := range values { headers.Add(key, value) } } } else { // This is the GRPC-bridge case -- throw away the five-byte type/length // framing at the start, and just leave the protobuf. body = body[5:] } response := &grpc_echo_pb.EchoResponse{} err := proto.Unmarshal(body, response) if q.CheckErr(err) { log.Printf("Failed to unmarshal proto: %v", err) return } result["text"] = response // q.r.json needs a different format return } var jsonBody interface{} err = json.Unmarshal(body, &jsonBody) if err == nil { result["json"] = jsonBody } else { result["text"] = string(body) } } } // Request processing // ExecuteWebsocketQuery handles Websocket queries func ExecuteWebsocketQuery(query Query) { url := query.URL() c, resp, err := websocket.DefaultDialer.Dial(url, query.Headers()) if query.CheckErr(err) { return } defer c.Close() query.AddResponse(resp) messages := query["messages"].([]interface{}) for _, msg := range messages { err = c.WriteMessage(websocket.TextMessage, []byte(msg.(string))) if query.CheckErr(err) { return } } err = c.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) if query.CheckErr(err) { return } answers := []string{} result := query.Result() defer func() { result["messages"] = answers }() for { _, message, err := c.ReadMessage() if err != nil { if websocket.IsUnexpectedCloseError(err, websocket.CloseNormalClosure) { query.CheckErr(err) } return } answers = append(answers, string(message)) } } // GetGRPCReqBody returns the body of the HTTP request using the // HTTP/1.1-gRPC bridge format as described in the Envoy docs // https://www.envoyproxy.io/docs/envoy/v1.9.0/configuration/http_filters/grpc_http1_bridge_filter func GetGRPCReqBody() (*bytes.Buffer, error) { // Protocol: // . 1 byte of zero (not compressed). // . network order (big-endian) of proto message length. // . serialized proto message. buf := &bytes.Buffer{} if err := binary.Write(buf, binary.BigEndian, uint8(0)); err != nil { log.Printf("error when packing first byte: %v", err) return nil, err } m := &grpc_echo_pb.EchoRequest{} m.Data = "foo" pbuf := &proto.Buffer{} if err := pbuf.Marshal(m); err != nil { log.Printf("error when serializing the gRPC message: %v", err) return nil, err } if err := binary.Write(buf, binary.BigEndian, uint32(len(pbuf.Bytes()))); err != nil { log.Printf("error when packing message length: %v", err) return nil, err } for i := 0; i < len(pbuf.Bytes()); i++ { if err := binary.Write(buf, binary.BigEndian, uint8(pbuf.Bytes()[i])); err != nil { log.Printf("error when packing message: %v", err) return nil, err } } return buf, nil } // CallRealGRPC handles real gRPC queries, i.e. queries that use the normal gRPC // generated code and the normal HTTP/2-based transport. func CallRealGRPC(query Query) { qURL, err := url.Parse(query.URL()) if query.CheckErr(err) { log.Printf("grpc url parse failed: %v", err) return } const requiredPath = "/echo.EchoService/Echo" if qURL.Path != requiredPath { query.Result()["error"] = fmt.Sprintf("GRPC path %s is not %s", qURL.Path, requiredPath) return } dialHost := qURL.Host if !strings.Contains(dialHost, ":") { // There is no port number in the URL, but grpc.Dial wants host:port. if qURL.Scheme == "https" { dialHost = dialHost + ":443" } else { dialHost = dialHost + ":80" } } ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() // Dial runs in the background and thus always appears to succeed. If you // pass grpc.WithBlock() to make it wait for a connection, failures just hit // the deadline rather than returning a useful error like "no such host" or // "connection refused" or whatever. Perhaps they are considered "transient" // and there's some retry logic we need to turn off. Anyhow, we don't pass // grpc.WithBlock(), instead letting the error happen at the request below. // This makes useful error messages visible in most cases. var dialOptions []grpc.DialOption if qURL.Scheme != "https" { dialOptions = append(dialOptions, grpc.WithInsecure()) } conn, err := grpc.DialContext(ctx, dialHost, dialOptions...) if query.CheckErr(err) { log.Printf("grpc dial failed: %v", err) return } defer conn.Close() client := grpc_echo_pb.NewEchoServiceClient(conn) request := &grpc_echo_pb.EchoRequest{Data: "real gRPC"} // Prepare outgoing headers, which are passed via Context md := metadata.MD{} headers, ok := query["headers"] if ok { for key, val := range headers.(map[string]interface{}) { md.Set(key, val.(string)) } } ctx = metadata.NewOutgoingContext(ctx, md) response, err := client.Echo(ctx, request) stat, ok := status.FromError(err) if !ok { // err is not nil and not a grpc Status query.CheckErr(err) log.Printf("grpc echo request failed: %v", err) return } // It's hard to tell the difference between a failed connection and a // successful connection that set an error code. We'll use the // heuristic that DNS errors and Connection Refused both appear to // return code 14 (Code.Unavailable). grpcCode := int(stat.Code()) if grpcCode == 14 { query.CheckErr(err) log.Printf("grpc echo request connection failed: %v", err) return } // Now process the response and synthesize the requisite result values. // Note: Don't set result.body to anything that cannot be decoded as base64, // or the kat harness will fail. resHeader := make(http.Header) resHeader.Add("Grpc-Status", fmt.Sprint(grpcCode)) resHeader.Add("Grpc-Message", stat.Message()) result := query.Result() result["headers"] = resHeader result["body"] = "" result["status"] = 200 if err == nil { result["text"] = response // q.r.json needs a different format } // Stuff that's not available: // - query.result.status (the HTTP status -- synthesized as 200) // - query.result.headers (the HTTP response headers -- we're just putting // in grpc-status and grpc-message as the former is required by the // tests and the latter can be handy) // - query.result.body (the raw HTTP body) // - query.result.json or query.result.text (the parsed HTTP body -- we're // emitting the full EchoResponse object in the text field) } // ExecuteQuery constructs the appropriate request, executes it, and records the // response and related information in query.result. func ExecuteQuery(query Query) { // Websocket stuff is handled elsewhere if query.IsWebsocket() { ExecuteWebsocketQuery(query) return } // Real gRPC is handled elsewhere if query.GrpcType() == "real" { CallRealGRPC(query) return } // Prepare an http.Transport with customized TLS settings. transport := &http.Transport{ MaxIdleConns: 10, IdleConnTimeout: 30 * time.Second, TLSClientConfig: &tls.Config{}, } if query.Insecure() { transport.TLSClientConfig.InsecureSkipVerify = true } if caCert := query.CACert(); len(caCert) > 0 { caCertPool := x509.NewCertPool() caCertPool.AppendCertsFromPEM([]byte(caCert)) transport.TLSClientConfig.RootCAs = caCertPool } if query.ClientCert() != "" || query.ClientKey() != "" { clientCert, err := tls.X509KeyPair([]byte(query.ClientCert()), []byte(query.ClientKey())) if err != nil { log.Fatal(err) } transport.TLSClientConfig.Certificates = []tls.Certificate{clientCert} } if query.MinTLSVersion() != 0 { transport.TLSClientConfig.MinVersion = query.MinTLSVersion() } if query.MaxTLSVersion() != 0 { transport.TLSClientConfig.MaxVersion = query.MaxTLSVersion() } if len(query.CipherSuites()) > 0 { transport.TLSClientConfig.CipherSuites = query.CipherSuites() } if len(query.ECDHCurves()) > 0 { transport.TLSClientConfig.CurvePreferences = query.ECDHCurves() } // Prepare the HTTP request var body io.Reader method := query.Method() if query.GrpcType() != "" { // Perform special handling for gRPC-bridge and gRPC-web buf, err := GetGRPCReqBody() if query.CheckErr(err) { log.Printf("gRPC buffer error: %v", err) return } if query.GrpcType() == "web" { result := make([]byte, base64.StdEncoding.EncodedLen(buf.Len())) base64.StdEncoding.Encode(result, buf.Bytes()) buf = bytes.NewBuffer(result) } body = buf method = "POST" } else { body = query.Body() } req, err := http.NewRequest(method, query.URL(), body) if query.CheckErr(err) { log.Printf("request error: %v", err) return } req.Header = query.Headers() for _, cookie := range query.Cookies() { req.AddCookie(&cookie) } // Save the client's start date. query["client-start-date"] = time.Now().Format(time.RFC3339Nano) // Handle host and SNI host := req.Header.Get("Host") if host != "" { if query.SNI() { transport.TLSClientConfig.ServerName = host } req.Host = host } // Perform the request and save the results. client := &http.Client{ Transport: transport, Timeout: time.Duration(10 * time.Second), CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, } resp, err := client.Do(req) if query.CheckErr(err) { return } query.AddResponse(resp) } type Args struct { input string output string } func parseArgs(rawArgs ...string) Args { var args Args flagset := flag.NewFlagSet("kat-client", flag.ExitOnError) flagset.StringVar(&args.input, "input", "", "input filename") flagset.StringVar(&args.output, "output", "", "output filename") flagset.Parse(rawArgs) return args } func main() { debug_grpc_web = false rlimit() args := parseArgs(os.Args[1:]...) var data []byte var err error // Read input file if args.input == "" { data, err = ioutil.ReadAll(os.Stdin) } else { data, err = ioutil.ReadFile(args.input) } if err != nil { panic(err) } // Parse input file var specs []Query err = json.Unmarshal(data, &specs) if err != nil { panic(err) } // Prep semaphore to limit concurrency limitStr := os.Getenv("KAT_QUERY_LIMIT") limit, err := strconv.Atoi(limitStr) if err != nil { limit = 25 } sem := NewSemaphore(limit) // Launch queries concurrently count := len(specs) queries := make(chan bool) for i := 0; i < count; i++ { go func(idx int) { sem.Acquire() defer func() { queries <- true sem.Release() }() ExecuteQuery(specs[idx]) }(i) } // Wait for all the answers for i := 0; i < count; i++ { <-queries } // Generate the output file bytes, err := json.MarshalIndent(specs, "", " ") if err != nil { log.Print(err) } else if args.output == "" { fmt.Print(string(bytes)) } else { err = ioutil.WriteFile(args.output, bytes, 0644) if err != nil { log.Print(err) } } }
{ "content_hash": "bca2e318fa8fa4031d0391a74ae0bfa2", "timestamp": "", "source": "github", "line_count": 910, "max_line_length": 113, "avg_line_length": 27.145054945054945, "alnum_prop": 0.670876852076755, "repo_name": "datawire/ambassador", "id": "9e2fd5662b37a234de29e4e816531df8206804a8", "size": "24702", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cmd/kat-client/client.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "20990" }, { "name": "Go", "bytes": "564752" }, { "name": "HTML", "bytes": "25150" }, { "name": "JavaScript", "bytes": "32368" }, { "name": "Makefile", "bytes": "113905" }, { "name": "Python", "bytes": "1158187" }, { "name": "Shell", "bytes": "188832" } ], "symlink_target": "" }
package javax.swing; import java.awt.*; import java.awt.event.*; /** * The editor component used for JComboBox components. * * @author Arnaud Weber */ public interface ComboBoxEditor { /** Return the component that should be added to the tree hierarchy for * this editor */ public Component getEditorComponent(); /** Set the item that should be edited. Cancel any editing if necessary **/ public void setItem(Object anObject); /** Return the edited item **/ public Object getItem(); /** Ask the editor to start editing and to select everything **/ public void selectAll(); /** Add an ActionListener. An action event is generated when the edited item changes **/ public void addActionListener(ActionListener l); /** Remove an ActionListener **/ public void removeActionListener(ActionListener l); }
{ "content_hash": "c8d51ab7aaee16ba41c319dee6bf57b3", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 90, "avg_line_length": 25.545454545454547, "alnum_prop": 0.7153024911032029, "repo_name": "haikuowuya/android_system_code", "id": "568d173e27c4cb68ab165c2e1ad0c2181c4acf49", "size": "1058", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "src/javax/swing/ComboBoxEditor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "182432" }, { "name": "Java", "bytes": "124952631" } ], "symlink_target": "" }
COCOAnnotate ============ CSA kifu annotator
{ "content_hash": "255ccbb10f7e1501d4756aea845e8242", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 18, "avg_line_length": 11.5, "alnum_prop": 0.6086956521739131, "repo_name": "kimrin/COCOAnnotate", "id": "4ddbc2007445947fb0df25da3e2550ff1dbfcfbc", "size": "46", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "3437" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0) on Wed Apr 05 01:58:42 EDT 2017 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>org.owasp.appsensor.accesscontrol (appsensor-parent 2.3.2 API)</title> <meta name="date" content="2017-04-05"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.owasp.appsensor.accesscontrol (appsensor-parent 2.3.2 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <div class="header"> <h1 title="Package" class="title">Package&nbsp;org.owasp.appsensor.accesscontrol</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../org/owasp/appsensor/accesscontrol/ReferenceAccessController.html" title="class in org.owasp.appsensor.accesscontrol">ReferenceAccessController</a></td> </tr> </tbody> </table> </li> </ul> </div> <p class="legalCopy"><small>Copyright &#169; 2017 <a href="http://www.owasp.org">The Open Web Application Security Project (OWASP)</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "595eb17e71e80167d4ce229c260e4f13", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 193, "avg_line_length": 38.265306122448976, "alnum_prop": 0.6768, "repo_name": "dscrobonia/appsensor", "id": "e90e3fa949b5c9eb9000490af725f416bf7c2380", "size": "1875", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "appsensor-dot-org/site-contents/docs/v2.3.2/javadoc/org/owasp/appsensor/accesscontrol/package-summary.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "583594" }, { "name": "HTML", "bytes": "817683" }, { "name": "Java", "bytes": "1011903" }, { "name": "JavaScript", "bytes": "12314518" }, { "name": "Python", "bytes": "2875" }, { "name": "Ruby", "bytes": "43017" }, { "name": "Shell", "bytes": "11114" }, { "name": "Thrift", "bytes": "1916" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using System.Threading.Tasks; namespace Vita.Tools.Testing { public class TestClassInfo { public Type TestClass; public MethodInfo Init; public MethodInfo Cleanup; public List<MethodInfo> Tests = new List<MethodInfo>(); public TestClassInfo(Type testClass) { TestClass = testClass; Setup(); } public override string ToString() { return Tests.Count + " Tests; Init:" + Init + ", Cleanup:" + Cleanup; } private void Setup() { var methods = TestClass.GetTypeInfo().GetMethods(BindingFlags.Public | BindingFlags.Instance); var voidMethods = methods.Where(m => m.ReturnType == typeof(void) && m.GetParameters().Length == 0).ToList(); foreach (var method in voidMethods) { var tkind = TestUtil.GetMethodKind(method); switch (tkind) { case TestMethodKind.Init: this.Init = method; break; case TestMethodKind.Test: this.Tests.Add(method); break; case TestMethodKind.Cleanup: this.Cleanup = method; break; } }//foreach } }//class }
{ "content_hash": "3dd9951ac33ec2423f4e3a35b734272f", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 115, "avg_line_length": 29.55, "alnum_prop": 0.6632825719120136, "repo_name": "rivantsov/vita", "id": "c1af7de52f607abd8a61811e05d2071c14ca9864", "size": "1184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/1.Framework/Vita.Tools/Testing/TestClassInfo.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "3163" }, { "name": "C#", "bytes": "2140470" } ], "symlink_target": "" }
/** * Provides HttpMessageConverter implementations for handling Atom and RSS feeds. * Based on the <a href="https://github.com/rometools/rome">ROME tools</a> project. */ package org.springframework.http.converter.feed;
{ "content_hash": "2d578401ebc4f1f400eeb16315904b2d", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 83, "avg_line_length": 44.6, "alnum_prop": 0.7623318385650224, "repo_name": "boggad/jdk9-sample", "id": "440db3d3778ce01b5393f86e9cfe049d1228b63b", "size": "223", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "sample-catalog/spring-jdk9/src/spring.web/org/springframework/http/converter/feed/package-info.java", "mode": "33188", "license": "mit", "language": [ { "name": "AspectJ", "bytes": "32336" }, { "name": "HTML", "bytes": "124863" }, { "name": "Java", "bytes": "30303872" }, { "name": "Makefile", "bytes": "1069" }, { "name": "Standard ML", "bytes": "119878" }, { "name": "XSLT", "bytes": "16226" } ], "symlink_target": "" }
package com.ponkotuy.run import com.ponkotuy.build.BuildInfo import com.ponkotuy.config.ClientConfig import com.ponkotuy.http.MFGHttp import com.ponkotuy.intercept.KCInterceptor import com.ponkotuy.proxy.{KCFiltersSource, LittleProxy} import com.ponkotuy.util.Log import com.ponkotuy.value.KCServer import io.netty.util.ResourceLeakDetector /** * * @author ponkotuy * Date: 14/02/18. */ object Main extends App with Log { ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.ADVANCED) try { message() val proxy = new LittleProxy( ClientConfig.proxyHost, ClientConfig.proxyPort, ClientConfig.upstreamProxyHost, new KCFiltersSource(KCServer.ips, new KCInterceptor()) ) proxy.start() } catch { case e: ExceptionInInitializerError => logger.info("proxy初期化エラー", e) println("application.confが存在しないか設定が無効です。application.conf.sampleをコピーして設定しましょう") } def message(): Unit = { println() println("---------------------------------------------") println(s" Welcome to MyFleetGirls Client Ver ${BuildInfo.version}") println("---------------------------------------------") println() try { val url = s"${ClientConfig.post}/assets/message" MFGHttp.getOrig(url).foreach { str => str.lines.foreach(println) } } catch { case e: Throwable => } println() } }
{ "content_hash": "67cf19649516eb53fb29a484b6fcfefc", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 84, "avg_line_length": 27.176470588235293, "alnum_prop": 0.6507936507936508, "repo_name": "b-wind/MyFleetGirls", "id": "20c9a5b691b08bd7ebf82ead3c326699b6017206", "size": "1454", "binary": false, "copies": "2", "ref": "refs/heads/my-debug", "path": "client/src/main/scala/com/ponkotuy/run/Main.scala", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "758" }, { "name": "CSS", "bytes": "1903" }, { "name": "CoffeeScript", "bytes": "57061" }, { "name": "HTML", "bytes": "137587" }, { "name": "Java", "bytes": "9514" }, { "name": "JavaScript", "bytes": "22570" }, { "name": "PLSQL", "bytes": "1619" }, { "name": "Scala", "bytes": "742029" }, { "name": "Shell", "bytes": "2251" } ], "symlink_target": "" }
const struct CDAttachmentAttributes CDAttachmentAttributes = { .id = @"id", .mimeType = @"mimeType", .url = @"url", }; const struct CDAttachmentRelationships CDAttachmentRelationships = { .message = @"message", }; @implementation CDAttachmentID @end @implementation _CDAttachment + (id)insertInManagedObjectContext:(NSManagedObjectContext*)moc_ { NSParameterAssert(moc_); return [NSEntityDescription insertNewObjectForEntityForName:@"CDAttachment" inManagedObjectContext:moc_]; } + (NSString*)entityName { return @"CDAttachment"; } + (NSEntityDescription*)entityInManagedObjectContext:(NSManagedObjectContext*)moc_ { NSParameterAssert(moc_); return [NSEntityDescription entityForName:@"CDAttachment" inManagedObjectContext:moc_]; } - (CDAttachmentID*)objectID { return (CDAttachmentID*)[super objectID]; } + (NSSet*)keyPathsForValuesAffectingValueForKey:(NSString*)key { NSSet *keyPaths = [super keyPathsForValuesAffectingValueForKey:key]; return keyPaths; } @dynamic id; @dynamic mimeType; @dynamic url; @dynamic message; @end
{ "content_hash": "4b76e0aaa3ed4a54f7e34b8037be5d90", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 106, "avg_line_length": 21.53061224489796, "alnum_prop": 0.7725118483412322, "repo_name": "ingridmmjos/IngridProject", "id": "28a3ea42c1b5b27c67001ea88e21da2f86531efc", "size": "1201", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Pods/QMServicesDevelopment/QMChatCache/QMChatCache/CoreData/EntriesMachine/_CDAttachment.m", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2544" }, { "name": "Objective-C", "bytes": "804507" }, { "name": "Shell", "bytes": "1140" } ], "symlink_target": "" }
include(CMakeParseArguments) # cmake_parse_arguments include(hunter_check_download_error_message) include(hunter_init_not_found_counter) include(hunter_internal_error) include(hunter_sleep_before_download) include(hunter_status_debug) include(hunter_test_string_not_empty) include(hunter_user_error) function(hunter_download_cache_meta_file) hunter_test_string_not_empty("${HUNTER_CACHED_ROOT}") cmake_parse_arguments(x "" "LOCAL;DONE" "" ${ARGV}) # -> x_LOCAL # -> x_DONE # -> x_UNPARSED_ARGUMENTS string(COMPARE NOTEQUAL "${x_UNPARSED_ARGUMENTS}" "" has_unparsed) if(has_unparsed) hunter_internal_error("Unparsed: ${x_UNPARSED_ARGUMENTS}") endif() string(COMPARE EQUAL "${x_LOCAL}" "" is_empty) if(is_empty) hunter_internal_error("LOCAL can't be empty") endif() string(COMPARE EQUAL "${x_DONE}" "" is_empty) if(is_empty) hunter_internal_error("DONE can't be empty") endif() set(cache_directory "${HUNTER_CACHED_ROOT}/_Base/Cache") hunter_lock_directory("${cache_directory}" "") string(REPLACE "${cache_directory}/meta/" "" local_suffix "${x_LOCAL}") string(REPLACE "${cache_directory}/meta/" "" done_suffix "${x_DONE}") set(local_temp "${x_LOCAL}.__HUNTER_TEMP__") set(done_temp "${x_DONE}.__HUNTER_TEMP__") if(EXISTS "${x_DONE}") return() endif() file(REMOVE "${x_LOCAL}") string(COMPARE EQUAL "${HUNTER_USE_CACHE_SERVERS}" "NO" disable_server) if(disable_server) hunter_status_debug("Skip servers") return() endif() list(LENGTH HUNTER_CACHE_SERVERS number_of_servers) hunter_init_not_found_counter( NOT_FOUND_NEEDED not_found_counter "${number_of_servers}" ) set(total_retry 10) foreach(x RANGE ${total_retry}) foreach(server ${HUNTER_CACHE_SERVERS}) string(REGEX MATCH "^https://github.com/" is_github "${server}") if(NOT is_github) set(local_url "${server}/meta/${local_suffix}") set(done_url "${server}/meta/${done_suffix}") else() string( REPLACE "https://github.com/" "https://raw.githubusercontent.com/" url "${server}" ) set(local_url "${url}/master/${local_suffix}") set(done_url "${url}/master/${done_suffix}") endif() hunter_status_debug("Downloading file (try #${x} of ${total_retry}):") hunter_status_debug(" ${done_url}") hunter_status_debug(" -> ${x_DONE}") hunter_sleep_before_download("${x}") file(DOWNLOAD "${done_url}" "${done_temp}" STATUS status) list(GET status 0 error_code) list(GET status 1 error_message) hunter_check_download_error_message( ERROR_CODE "${error_code}" ERROR_MESSAGE "${error_message}" REMOVE_ON_ERROR "${done_temp}" NOT_FOUND_COUNTER not_found_counter ) if(NOT error_code EQUAL 0) if(error_code EQUAL 22) hunter_status_debug("File not found") if(NOT_FOUND_NEEDED EQUAL not_found_counter) return() endif() else() hunter_status_debug("Download error (${error_message})") endif() continue() endif() # Done stamp exists, now downloading real file hunter_init_not_found_counter( NOT_FOUND_NEEDED not_found_counter 1 # polling one server ) set(total_retry ${NOT_FOUND_NEEDED}) foreach(x RANGE ${total_retry}) hunter_status_debug("Downloading file (try #${x} of ${total_retry}):") hunter_status_debug(" ${local_url}") hunter_status_debug(" -> ${x_LOCAL}") hunter_sleep_before_download("${x}") file(DOWNLOAD "${local_url}" "${local_temp}" STATUS status) list(GET status 0 error_code) list(GET status 1 error_message) hunter_check_download_error_message( ERROR_CODE "${error_code}" ERROR_MESSAGE "${error_message}" REMOVE_ON_ERROR "${local_temp}" NOT_FOUND_COUNTER not_found_counter ) if(error_code EQUAL 0) # Success. Rename temporary files to the final destination. # RENAME operation is atomic. Note that DONE should be the last to # signal that everything ended as expected. file(RENAME "${local_temp}" "${x_LOCAL}") file(RENAME "${done_temp}" "${x_DONE}") return() elseif(error_code EQUAL 22) hunter_status_debug("File not found") else() hunter_status_debug("Download error (${error_message})") endif() endforeach() file(REMOVE "${done_temp}") hunter_internal_error( "Server error. File not exists but DONE stamp found.\n" " file: ${local_url}" " done: ${done_url}" ) endforeach() endforeach() endfunction()
{ "content_hash": "74d0654f6d44b591ea4b1ff1004751e8", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 78, "avg_line_length": 31.18064516129032, "alnum_prop": 0.6037657769501344, "repo_name": "Knitschi/hunter", "id": "fa5375cc09cfd01896612d9669cf9386ea8e3b4e", "size": "4894", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cmake/modules/hunter_download_cache_meta_file.cmake", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "1868" }, { "name": "C++", "bytes": "56148" }, { "name": "CMake", "bytes": "1089446" }, { "name": "Python", "bytes": "36239" }, { "name": "Shell", "bytes": "25428" } ], "symlink_target": "" }
#ifndef SHARE_VM_OOPS_FIELDINFO_HPP #define SHARE_VM_OOPS_FIELDINFO_HPP #include "oops/typeArrayOop.hpp" #include "classfile/vmSymbols.hpp" // This class represents the field information contained in the fields // array of an instanceKlass. Currently it's laid on top an array of // Java shorts but in the future it could simply be used as a real // array type. FieldInfo generally shouldn't be used directly. // Fields should be queried either through instanceKlass or through // the various FieldStreams. class FieldInfo VALUE_OBJ_CLASS_SPEC { friend class fieldDescriptor; friend class JavaFieldStream; friend class ClassFileParser; public: // fields // Field info extracted from the class file and stored // as an array of 7 shorts enum FieldOffset { access_flags_offset = 0, name_index_offset = 1, signature_index_offset = 2, initval_index_offset = 3, low_offset = 4, high_offset = 5, field_slots = 6 }; private: u2 _shorts[field_slots]; void set_name_index(u2 val) { _shorts[name_index_offset] = val; } void set_signature_index(u2 val) { _shorts[signature_index_offset] = val; } void set_initval_index(u2 val) { _shorts[initval_index_offset] = val; } u2 name_index() const { return _shorts[name_index_offset]; } u2 signature_index() const { return _shorts[signature_index_offset]; } u2 initval_index() const { return _shorts[initval_index_offset]; } public: static FieldInfo* from_field_array(typeArrayOop fields, int index) { return ((FieldInfo*)fields->short_at_addr(index * field_slots)); } static FieldInfo* from_field_array(u2* fields, int index) { return ((FieldInfo*)(fields + index * field_slots)); } void initialize(u2 access_flags, u2 name_index, u2 signature_index, u2 initval_index, u4 offset) { _shorts[access_flags_offset] = access_flags; _shorts[name_index_offset] = name_index; _shorts[signature_index_offset] = signature_index; _shorts[initval_index_offset] = initval_index; set_offset(offset); } u2 access_flags() const { return _shorts[access_flags_offset]; } u4 offset() const { return build_int_from_shorts(_shorts[low_offset], _shorts[high_offset]); } Symbol* name(constantPoolHandle cp) const { int index = name_index(); if (is_internal()) { return lookup_symbol(index); } return cp->symbol_at(index); } Symbol* signature(constantPoolHandle cp) const { int index = signature_index(); if (is_internal()) { return lookup_symbol(index); } return cp->symbol_at(index); } void set_access_flags(u2 val) { _shorts[access_flags_offset] = val; } void set_offset(u4 val) { _shorts[low_offset] = extract_low_short_from_int(val); _shorts[high_offset] = extract_high_short_from_int(val); } bool is_internal() const { return (access_flags() & JVM_ACC_FIELD_INTERNAL) != 0; } Symbol* lookup_symbol(int symbol_index) const { assert(is_internal(), "only internal fields"); return vmSymbols::symbol_at((vmSymbols::SID)symbol_index); } }; #endif // SHARE_VM_OOPS_FIELDINFO_HPP
{ "content_hash": "0e8a4746b424923910c9bef55b5e81a1", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 125, "avg_line_length": 34.87, "alnum_prop": 0.61198738170347, "repo_name": "codefollower/Open-Source-Research", "id": "f4cf1a4c40f2bec29c44696edae81620cf55c396", "size": "4540", "binary": false, "copies": "16", "ref": "refs/heads/master", "path": "HotSpot1.7/src/share/vm/oops/fieldInfo.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "146130" }, { "name": "C", "bytes": "1175615" }, { "name": "C++", "bytes": "52932465" }, { "name": "CSS", "bytes": "13948" }, { "name": "D", "bytes": "90195" }, { "name": "Java", "bytes": "32377266" }, { "name": "JavaScript", "bytes": "39651" }, { "name": "Objective-C", "bytes": "18238" }, { "name": "Shell", "bytes": "276979" }, { "name": "XSLT", "bytes": "351083" } ], "symlink_target": "" }
!****h* ROBODoc/H5R ! ! NAME ! MODULE H5R ! ! FILE ! fortran/src/H5Rff.f90 ! ! PURPOSE ! This file contains Fortran interfaces for H5R functions. It includes ! all the functions that are independent on whether the Fortran 2003 functions ! are enabled or disabled. ! ! ! COPYRIGHT ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ! Copyright by The HDF Group. * ! Copyright by the Board of Trustees of the University of Illinois. * ! All rights reserved. * ! * ! This file is part of HDF5. The full HDF5 copyright notice, including * ! terms governing use, modification, and redistribution, is contained in * ! the files COPYING and Copyright.html. COPYING can be found at the root * ! of the source code distribution tree; Copyright.html can be found at the * ! root level of an installed copy of the electronic HDF5 document set and * ! is linked from the top-level documents page. It can also be found at * ! http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * ! access to either file, you may request a copy from help@hdfgroup.org. * ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * ! ! NOTES ! *** IMPORTANT *** ! If you add a new H5R function you must add the function name to the ! Windows dll file 'hdf5_fortrandll.def.in' in the fortran/src directory. ! This is needed for Windows based operating systems. ! !***** MODULE H5R USE H5GLOBAL ! If you change the value of these parameters, do not forget to change corresponding ! values in the H5f90.h file. ! INTEGER, PARAMETER :: REF_OBJ_BUF_LEN = 2 ! INTEGER, PARAMETER :: REF_REG_BUF_LEN = 3 ! ! TYPE hobj_ref_t_f ! INTEGER ref(REF_OBJ_BUF_LEN) ! END TYPE ! ! TYPE hdset_reg_ref_t_f ! INTEGER ref(REF_REG_BUF_LEN) ! END TYPE ! INTERFACE h5rget_object_type_f MODULE PROCEDURE h5rget_object_type_obj_f END INTERFACE CONTAINS !****s* H5R/h5rget_object_type_obj_f ! ! NAME ! h5rget_object_type_obj_f ! ! PURPOSE ! Retrieves the type of object that an object reference points to. ! ! INPUTS ! dset_id - identifier of the dataset containing ! reference to the objects ! ref - reference to open ! OUTPUTS ! obj_type - object_type, possible values: ! H5G_UNKNOWN_F (-1) ! H5G_GROUP_F 0 ! H5G_DATASET_F 1 ! H5G_TYPE_F 2 ! H5G_LINK_F 3 ! hdferr - Returns 0 if successful and -1 if fails ! ! AUTHOR ! Elena Pourmal ! August 12, 1999 ! ! HISTORY ! Explicit Fortran interfaces were added for ! called C functions (it is needed for Windows ! port). February 28, 2001 ! ! NOTES ! This is a module procedure for the h5rget_object_type_f ! subroutine. ! SOURCE SUBROUTINE h5rget_object_type_obj_f(dset_id, ref, obj_type, hdferr) IMPLICIT NONE INTEGER(HID_T), INTENT(IN) :: dset_id ! Dataset identifier TYPE(hobj_ref_t_f), INTENT(IN) :: ref ! Object reference INTEGER, INTENT(OUT) :: obj_type ! Object type ! H5G_UNKNOWN_F (-1) ! H5G_GROUP_F 0 ! H5G_DATASET_F 1 ! H5G_TYPE_F 2 ! H5G_LINK_F 3 INTEGER, INTENT(OUT) :: hdferr ! Error code !***** INTEGER(HADDR_T) :: ref_f ! Local buffer to pass reference INTERFACE INTEGER FUNCTION h5rget_object_type_obj_c(dset_id, ref_f, obj_type) USE H5GLOBAL !DEC$IF DEFINED(HDF5F90_WINDOWS) !DEC$ATTRIBUTES C,reference,decorate,alias:'H5RGET_OBJECT_TYPE_OBJ_C':: h5rget_object_type_obj_c !DEC$ENDIF ! INTEGER, PARAMETER :: REF_OBJ_BUF_LEN = 2 INTEGER(HID_T), INTENT(IN) :: dset_id INTEGER(HADDR_T) :: ref_f INTEGER, INTENT(OUT) :: obj_type END FUNCTION h5rget_object_type_obj_c END INTERFACE ref_f = ref%ref hdferr = h5rget_object_type_obj_c(dset_id, ref_f, obj_type ) END SUBROUTINE h5rget_object_type_obj_f END MODULE H5R
{ "content_hash": "216626b89832f8df7d9a549267a54363", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 105, "avg_line_length": 34.93076923076923, "alnum_prop": 0.5463554283197534, "repo_name": "parkin/hdf5.js", "id": "64f136505cf4a9059e0e2f91072d39d5845efd8c", "size": "4541", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "hdf5-1.8.12/fortran/src/H5Rff.f90", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "24970066" }, { "name": "C++", "bytes": "1020496" }, { "name": "CSS", "bytes": "7532" }, { "name": "FORTRAN", "bytes": "2643862" }, { "name": "LiveScript", "bytes": "149612" }, { "name": "Perl", "bytes": "78876" }, { "name": "Scilab", "bytes": "998" }, { "name": "Shell", "bytes": "409155" } ], "symlink_target": "" }
"""MetaGraph and related functions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import copy import os.path import re from collections import OrderedDict, deque import six from google.protobuf.any_pb2 import Any from google.protobuf import text_format from tensorflow.core.framework import attr_value_pb2 from tensorflow.core.framework import graph_pb2 from tensorflow.core.framework import op_def_pb2 from tensorflow.core.protobuf import meta_graph_pb2 from tensorflow.core.protobuf import saver_pb2 from tensorflow.python.framework import graph_io from tensorflow.python.framework import importer from tensorflow.python.framework import op_def_registry from tensorflow.python.framework import ops from tensorflow.python.framework import versions from tensorflow.python.lib.io import file_io from tensorflow.python.platform import tf_logging as logging from tensorflow.python.util import compat # Prefix to be added to unbound input names so they are easily identifiable. _UNBOUND_INPUT_PREFIX = "$unbound_inputs_" def _node_def(from_node_def, export_scope, unbound_inputs, clear_devices=False): """Create a `NodeDef` proto with export_scope stripped. Args: from_node_def: A `node_def_pb2.NodeDef` protocol buffer. export_scope: A `string` representing the name scope to remove. unbound_inputs: An array of unbound input names if they exist. clear_devices: Boolean which controls whether to clear device information from node_def. Default false. Returns: A `node_def_pb2.NodeDef` protocol buffer. """ node_def = copy.deepcopy(from_node_def) for i, v in enumerate(node_def.input): if (export_scope and not node_def.input[i].lstrip("^").startswith(export_scope)): # Adds "$unbound_inputs_" prefix to the unbound name so they are easily # identifiable. node_def.input[i] = re.sub(r"([\^]|^)(.*)", r"\1" + _UNBOUND_INPUT_PREFIX + r"\2", compat.as_str(v)) unbound_inputs.append(node_def.input[i]) else: node_def.input[i] = ops.strip_name_scope(v, export_scope) node_def.name = compat.as_bytes( ops.strip_name_scope(from_node_def.name, export_scope)) for k, v in six.iteritems(from_node_def.attr): if k == "_class": new_s = [compat.as_bytes( ops.strip_name_scope(s, export_scope)) for s in v.list.s if not export_scope or compat.as_str(s).split("@")[1].startswith(export_scope)] node_def.attr[k].CopyFrom(attr_value_pb2.AttrValue( list=attr_value_pb2.AttrValue.ListValue(s=new_s))) else: node_def.attr[k].CopyFrom(v) if clear_devices: node_def.device = "" return node_def def _read_file(filename): """Reads a file containing `GraphDef` and returns the protocol buffer. Args: filename: `graph_def` filename including the path. Returns: A `GraphDef` protocol buffer. Raises: IOError: If the file doesn't exist, or cannot be successfully parsed. """ graph_def = graph_pb2.GraphDef() if not file_io.file_exists(filename): raise IOError("File %s does not exist." % filename) # First try to read it as a binary file. file_content = file_io.FileIO(filename, "rb").read() try: graph_def.ParseFromString(file_content) return graph_def except Exception: # pylint: disable=broad-except pass # Next try to read it as a text file. try: text_format.Merge(file_content, graph_def) except text_format.ParseError as e: raise IOError("Cannot parse file %s: %s." % (filename, str(e))) return graph_def def ops_used_by_graph_def(graph_def): """Collect the list of ops used by a graph. Does not validate that the ops are all registered. Args: graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`. Returns: A list of strings, each naming an op used by the graph. """ # Map function names to definitions name_to_function = {} for fun in graph_def.library.function: name_to_function[fun.signature.name] = fun # Collect the list of op names. Since functions can reference functions, we # need a recursive traversal. used_ops = set() # Includes both primitive ops and functions functions_to_process = [] # A subset of used_ops def mark_op_as_used(op): if op not in used_ops and op in name_to_function: functions_to_process.append(name_to_function[op]) used_ops.add(op) for node in graph_def.node: mark_op_as_used(node.op) while functions_to_process: fun = functions_to_process.pop() for node in fun.node_def: mark_op_as_used(node.op) return [op for op in used_ops if op not in name_to_function] def stripped_op_list_for_graph(graph_def): """Collect the stripped OpDefs for ops used by a graph. This function computes the `stripped_op_list` field of `MetaGraphDef` and similar protos. The result can be communicated from the producer to the consumer, which can then use the C++ function `RemoveNewDefaultAttrsFromGraphDef` to improve forwards compatibility. Args: graph_def: A `GraphDef` proto, as from `graph.as_graph_def()`. Returns: An `OpList` of ops used by the graph. Raises: ValueError: If an unregistered op is used. """ # This is the Python equivalent of StrippedOpListForGraph in C++. # Unfortunately, since the Python op registry can differ from that in C++, we # can't remove the duplication using swig (at least naively). # TODO(irving): Support taking graphs directly. used_ops = ops_used_by_graph_def(graph_def) # Verify that all used ops are registered. registered_ops = op_def_registry.get_registered_ops() # These internal ops used by functions are not registered, so we need to # whitelist them. # TODO(irving): Do something better here. op_whitelist = ("_Arg", "_Retval", "_ListToArray", "_ArrayToList") for op in used_ops: if op not in registered_ops and op not in op_whitelist: raise ValueError("Op %s is used by the graph, but is not registered" % op) # Build the stripped op list in sorted order return op_def_pb2.OpList(op=[registered_ops[op] for op in sorted(used_ops) if op in registered_ops]) def _get_kind_name(item): """Returns the kind name in CollectionDef. Args: item: A data item. Returns: The string representation of the kind in CollectionDef. """ if isinstance(item, (six.string_types, six.binary_type)): kind = "bytes_list" elif isinstance(item, six.integer_types): kind = "int64_list" elif isinstance(item, float): kind = "float_list" elif isinstance(item, Any): kind = "any_list" else: kind = "node_list" return kind def _should_include_node(node_or_node_name, export_scope): """Returns `True` if a node should be included. Args: node_or_node_name: A node or `string` node name. export_scope: `string`. Name scope under which to extract the subgraph. The scope name will be striped from the node definitions for easy import later into new name scopes. Returns: `True` if the node should be included. """ if not isinstance(node_or_node_name, six.string_types): try: node_name = node_or_node_name.name except AttributeError: # Keep the object that we don't know how to process. return True else: node_name = node_or_node_name return (node_name.startswith(_UNBOUND_INPUT_PREFIX) or (not export_scope or node_name.startswith(export_scope))) def add_collection_def(meta_graph_def, key, graph=None, export_scope=None): """Adds a collection to MetaGraphDef protocol buffer. Args: meta_graph_def: MetaGraphDef protocol buffer. key: One of the GraphKeys or user-defined string. graph: The `Graph` from which to get collections. export_scope: Optional `string`. Name scope to remove. """ if graph and not isinstance(graph, ops.Graph): raise TypeError("graph must be of type Graph, not %s", type(graph)) if not isinstance(key, six.string_types) and not isinstance(key, bytes): logging.warning("Only collections with string type keys will be " "serialized. This key has %s", type(key)) return # Sets graph to default graph if it's not passed in. graph = graph or ops.get_default_graph() collection_list = graph.get_collection(key) # Remove nodes that should not be exported from the collection list. collection_list = [x for x in collection_list if _should_include_node(x, export_scope)] if not collection_list: return try: col_def = meta_graph_def.collection_def[key] to_proto = ops.get_to_proto_function(key) proto_type = ops.get_collection_proto_type(key) if to_proto: kind = "bytes_list" for x in collection_list: # Additional type check to make sure the returned proto is indeed # what we expect. proto = to_proto(x, export_scope=export_scope) if proto: assert isinstance(proto, proto_type) getattr(col_def, kind).value.append(proto.SerializeToString()) else: kind = _get_kind_name(collection_list[0]) if kind == "node_list": for x in collection_list: if not export_scope or x.name.startswith(export_scope): getattr(col_def, kind).value.append( ops.strip_name_scope(x.name, export_scope)) elif kind == "bytes_list": # NOTE(opensource): This force conversion is to work around the fact # that Python3 distinguishes between bytes and strings. getattr(col_def, kind).value.extend( [compat.as_bytes(x) for x in collection_list]) else: getattr(col_def, kind).value.extend([x for x in collection_list]) except Exception as e: # pylint: disable=broad-except logging.warning("Error encountered when serializing %s.\n" "Type is unsupported, or the types of the items don't " "match field type in CollectionDef.\n%s", key, str(e)) if key in meta_graph_def.collection_def: del meta_graph_def.collection_def[key] return def create_meta_graph_def(meta_info_def=None, graph_def=None, saver_def=None, add_collections=True, collection_list=None, graph=None, export_scope=None): """Construct and returns a `MetaGraphDef` protocol buffer. Args: meta_info_def: `MetaInfoDef` protocol buffer. graph_def: `GraphDef` protocol buffer. saver_def: `SaverDef` protocol buffer. collection_list: List of string keys to collect. graph: The `Graph` to create `MetaGraphDef` out of. export_scope: Optional `string`. Name scope to remove. Returns: MetaGraphDef protocol buffer. Raises: TypeError: If the arguments are not of the correct proto buffer type. """ # Type check. if graph and not isinstance(graph, ops.Graph): raise TypeError("graph must be of type Graph, not %s", type(graph)) if meta_info_def and not isinstance(meta_info_def, meta_graph_pb2.MetaGraphDef.MetaInfoDef): raise TypeError("meta_info_def must be of type MetaInfoDef, not %s", type(meta_info_def)) if graph_def and not isinstance(graph_def, graph_pb2.GraphDef): raise TypeError("graph_def must be of type GraphDef, not %s", type(graph_def)) if saver_def and not isinstance(saver_def, saver_pb2.SaverDef): raise TypeError("saver_def must be of type SaverDef, not %s", type(saver_def)) # Sets graph to default graph if it's not passed in. graph = graph or ops.get_default_graph() # Creates a MetaGraphDef proto. meta_graph_def = meta_graph_pb2.MetaGraphDef() # Adds meta_info_def. if not meta_info_def: meta_info_def = meta_graph_pb2.MetaGraphDef.MetaInfoDef() # Set the tf version strings to the current tf build. meta_info_def.tensorflow_version = versions.__version__ meta_info_def.tensorflow_git_version = versions.__git_version__ meta_graph_def.meta_info_def.MergeFrom(meta_info_def) # Adds graph_def or the default. if not graph_def: meta_graph_def.graph_def.MergeFrom(graph.as_graph_def(add_shapes=True)) else: meta_graph_def.graph_def.MergeFrom(graph_def) # Fills in meta_info_def.stripped_op_list using the ops from graph_def. # pylint: disable=g-explicit-length-test if len(meta_graph_def.meta_info_def.stripped_op_list.op) == 0: meta_graph_def.meta_info_def.stripped_op_list.MergeFrom( stripped_op_list_for_graph(meta_graph_def.graph_def)) # pylint: enable=g-explicit-length-test # Adds saver_def. if saver_def: meta_graph_def.saver_def.MergeFrom(saver_def) # Adds collection_list. if add_collections: if collection_list: clist = collection_list else: clist = graph.get_all_collection_keys() for ctype in clist: add_collection_def(meta_graph_def, ctype, graph=graph, export_scope=export_scope) return meta_graph_def def read_meta_graph_file(filename): """Reads a file containing `MetaGraphDef` and returns the protocol buffer. Args: filename: `meta_graph_def` filename including the path. Returns: A `MetaGraphDef` protocol buffer. Raises: IOError: If the file doesn't exist, or cannot be successfully parsed. """ meta_graph_def = meta_graph_pb2.MetaGraphDef() if not file_io.file_exists(filename): raise IOError("File %s does not exist." % filename) # First try to read it as a binary file. file_content = file_io.FileIO(filename, "rb").read() try: meta_graph_def.ParseFromString(file_content) return meta_graph_def except Exception: # pylint: disable=broad-except pass # Next try to read it as a text file. try: text_format.Merge(file_content.decode("utf-8"), meta_graph_def) except text_format.ParseError as e: raise IOError("Cannot parse file %s: %s." % (filename, str(e))) return meta_graph_def def import_scoped_meta_graph(meta_graph_or_file, clear_devices=False, graph=None, import_scope=None, input_map=None, unbound_inputs_col_name="unbound_inputs"): """Recreates a`Graph` saved in a `MetaGraphDef` proto. This function takes a `MetaGraphDef` protocol buffer as input. If the argument is a file containing a `MetaGraphDef` protocol buffer , it constructs a protocol buffer from the file content. The function then adds all the nodes from the `graph_def` field to the current graph, recreates all the collections, and returns a saver constructed from the `saver_def` field. In combination with `export_scoped_meta_graph()`, this function can be used to * Serialize a graph along with other Python objects such as `QueueRunner`, `Variable` into a `MetaGraphDef`. * Restart training from a saved graph and checkpoints. * Run inference from a saved graph and checkpoints. Args: meta_graph_or_file: `MetaGraphDef` protocol buffer or filename (including the path) containing a `MetaGraphDef`. clear_devices: Boolean which controls whether to clear device information from graph_def. Default false. graph: The `Graph` to import into. If `None`, use the default graph. import_scope: Optional `string`. Name scope into which to import the subgraph. If `None`, the graph is imported to the root name scope. input_map: A dictionary mapping input names (as strings) in `graph_def` to `Tensor` objects. The values of the named input tensors in the imported graph will be re-mapped to the respective `Tensor` values. unbound_inputs_col_name: Collection name for looking up unbound inputs. Returns: A dictionary of all the `Variables` imported into the name scope. Raises: ValueError: If the graph_def contains unbound inputs. """ if isinstance(meta_graph_or_file, meta_graph_pb2.MetaGraphDef): meta_graph_def = meta_graph_or_file else: meta_graph_def = read_meta_graph_file(meta_graph_or_file) if unbound_inputs_col_name: for key, col_def in meta_graph_def.collection_def.items(): if key == unbound_inputs_col_name: kind = col_def.WhichOneof("kind") field = getattr(col_def, kind) if field.value and ( not input_map or sorted(set([compat.as_str(v) for v in field.value])) != sorted(input_map)): raise ValueError("Graph contains unbound inputs: %s. Must " "provide these inputs through input_map." % ",".join([compat.as_str(v) for v in field.value if not input_map or v not in input_map])) break # Sets graph to default graph if it's not passed in. graph = graph or ops.get_default_graph() # Gathers the list of nodes we are interested in. with graph.as_default(): producer_op_list = None if meta_graph_def.meta_info_def.HasField("stripped_op_list"): producer_op_list = meta_graph_def.meta_info_def.stripped_op_list input_graph_def = meta_graph_def.graph_def # Remove all the explicit device specifications for this node. This helps to # make the graph more portable. if clear_devices: for node in input_graph_def.node: node.device = "" importer.import_graph_def( input_graph_def, name=(import_scope or ""), input_map=input_map, producer_op_list=producer_op_list) # Restores all the other collections. for key, col_def in meta_graph_def.collection_def.items(): # Don't add unbound_inputs to the new graph. if key == unbound_inputs_col_name: continue kind = col_def.WhichOneof("kind") if kind is None: logging.error("Cannot identify data type for collection %s. Skipping.", key) continue from_proto = ops.get_from_proto_function(key) if from_proto: assert kind == "bytes_list" proto_type = ops.get_collection_proto_type(key) for value in col_def.bytes_list.value: proto = proto_type() proto.ParseFromString(value) graph.add_to_collection( key, from_proto(proto, import_scope=import_scope)) else: field = getattr(col_def, kind) if kind == "node_list": for value in field.value: col_op = graph.as_graph_element( ops.prepend_name_scope(value, import_scope)) graph.add_to_collection(key, col_op) elif kind == "int64_list": # NOTE(opensource): This force conversion is to work around the fact # that Python2 distinguishes between int and long, while Python3 has # only int. for value in field.value: graph.add_to_collection(key, int(value)) else: for value in field.value: graph.add_to_collection( key, ops.prepend_name_scope(value, import_scope)) var_list = {} variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, scope=import_scope) for v in variables: var_list[ops.strip_name_scope(v.name, import_scope)] = v return var_list def export_scoped_meta_graph(filename=None, graph_def=None, graph=None, export_scope=None, as_text=False, unbound_inputs_col_name="unbound_inputs", clear_devices=False, **kwargs): """Returns `MetaGraphDef` proto. Optionally writes it to filename. This function exports the graph, saver, and collection objects into `MetaGraphDef` protocol buffer with the intention of it being imported at a later time or location to restart training, run inference, or be a subgraph. Args: filename: Optional filename including the path for writing the generated `MetaGraphDef` protocol buffer. graph_def: `GraphDef` protocol buffer. graph: The `Graph` to import into. If `None`, use the default graph. export_scope: Optional `string`. Name scope under which to extract the subgraph. The scope name will be striped from the node definitions for easy import later into new name scopes. If `None`, the whole graph is exported. graph_def and export_scope cannot both be specified. as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto. unbound_inputs_col_name: Optional `string`. If provided, a string collection with the given name will be added to the returned `MetaGraphDef`, containing the names of tensors that must be remapped when importing the `MetaGraphDef`. clear_devices: Boolean which controls whether to clear device information before exporting the graph. **kwargs: Optional keyed arguments, including meta_info_def, saver_def, collection_list. Returns: A `MetaGraphDef` proto and dictionary of `Variables` in the exported name scope. Raises: ValueError: When the `GraphDef` is larger than 2GB. """ graph = graph or ops.get_default_graph() unbound_inputs = [] if export_scope or clear_devices: if graph_def: new_graph_def = graph_pb2.GraphDef() new_graph_def.versions.CopyFrom(graph_def.versions) for node_def in graph_def.node: if _should_include_node(node_def.name, export_scope): new_node_def = _node_def(node_def, export_scope, unbound_inputs, clear_devices=clear_devices) new_graph_def.node.extend([new_node_def]) graph_def = new_graph_def else: # Only do this complicated work if we want to remove a name scope. graph_def = graph_pb2.GraphDef() # pylint: disable=protected-access graph_def.versions.CopyFrom(graph.graph_def_versions) bytesize = 0 for key in sorted(graph._nodes_by_id): if _should_include_node(graph._nodes_by_id[key].name, export_scope): value = graph._nodes_by_id[key] # pylint: enable=protected-access node_def = _node_def(value.node_def, export_scope, unbound_inputs, clear_devices=clear_devices) graph_def.node.extend([node_def]) if value.outputs: assert "_output_shapes" not in graph_def.node[-1].attr graph_def.node[-1].attr["_output_shapes"].list.shape.extend([ output.get_shape().as_proto() for output in value.outputs]) bytesize += value.node_def.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") # It's possible that not all the inputs are in the export_scope. # If we would like such information included in the exported meta_graph, # add them to a special unbound_inputs collection. if unbound_inputs_col_name: # Clears the unbound_inputs collections. graph.clear_collection(unbound_inputs_col_name) for k in unbound_inputs: graph.add_to_collection(unbound_inputs_col_name, k) var_list = {} variables = graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, scope=export_scope) for v in variables: if _should_include_node(v, export_scope): var_list[ops.strip_name_scope(v.name, export_scope)] = v scoped_meta_graph_def = create_meta_graph_def( graph_def=graph_def, graph=graph, export_scope=export_scope, **kwargs) if filename: graph_io.write_graph( scoped_meta_graph_def, os.path.dirname(filename), os.path.basename(filename), as_text=as_text) return scoped_meta_graph_def, var_list def copy_scoped_meta_graph(from_scope, to_scope, from_graph=None, to_graph=None): """Copies a sub-meta_graph from one scope to another. Args: from_scope: `String` name scope containing the subgraph to be copied. to_scope: `String` name scope under which the copied subgraph will reside. from_graph: Optional `Graph` from which to copy the subgraph. If `None`, the default graph is use. to_graph: Optional `Graph` to which to copy the subgraph. If `None`, the default graph is used. Returns: A dictionary of `Variables` that has been copied into `to_scope`. Raises: ValueError: If `from_scope` and `to_scope` are the same while `from_graph` and `to_graph` are also the same. """ from_graph = from_graph or ops.get_default_graph() to_graph = to_graph or ops.get_default_graph() if from_graph == to_graph and from_scope == to_scope: raise ValueError("'from_scope' and 'to_scope' need to be different " "when performing copy in the same graph.") orig_meta_graph, var_list = export_scoped_meta_graph( export_scope=from_scope, graph=from_graph) var_list = import_scoped_meta_graph(orig_meta_graph, graph=to_graph, import_scope=to_scope) return var_list def _unbound_name(name): return re.sub(r"([\^]|loc:@|^)(.*?)", r"\1" + _UNBOUND_INPUT_PREFIX + r"\2", compat.as_str(name)) def _node_def_unbound(from_node_def, export_scope, unbound_inputs, as_unbound_inputs, clear_devices=False): """Create a `NodeDef` proto with export_scope stripped given input names that are treated as unbound. Args: from_node_def: A `node_def_pb2.NodeDef` protocol buffer. export_scope: A `string` representing the name scope to remove. unbound_inputs: An array of unbound input names if they exist. as_unbound_inputs: A list of `String`s. Input names that are treated as unbound when exporting Operations. clear_devices: Boolean which controls whether to clear device information from node_def. Default false. Returns: A `node_def_pb2.NodeDef` protocol buffer. """ node_def = copy.deepcopy(from_node_def) as_unbound_inputs = set(as_unbound_inputs) for i, v in enumerate(node_def.input): if node_def.input[i] in as_unbound_inputs: # Adds "$unbound_inputs_" prefix to the unbound name so they are easily # identifiable. node_def.input[i] = _unbound_name(v) unbound_inputs.append(node_def.input[i]) else: node_def.input[i] = ops.strip_name_scope(v, export_scope) node_def.name = compat.as_bytes( ops.strip_name_scope(from_node_def.name, export_scope)) for k, v in six.iteritems(from_node_def.attr): if k == "_class": new_s = [] for s in v.list.s: if compat.as_str(s) in as_unbound_inputs: new_s.append(compat.as_bytes(_unbound_name(s))) else: new_s.append(compat.as_bytes(ops.strip_name_scope(s, export_scope))) node_def.attr[k].CopyFrom(attr_value_pb2.AttrValue( list=attr_value_pb2.AttrValue.ListValue(s=new_s))) else: node_def.attr[k].CopyFrom(v) if clear_devices: node_def.device = "" return node_def def export_ops_meta_graph(op_list, graph=None, export_scope="", as_unbound_inputs=None, as_text=False, unbound_inputs_col_name="unbound_inputs", clear_devices=False, **kwargs): """This function exports a list of `Operation` objects into `MetaGraphDef` protocol buffer with the intention of it being imported at a later time or location. Args: op_list: A list of `Operation` objects to export. graph: The `Graph` to import into. If `None`, use the default graph. export_scope: Optional `string`. Name scope under which to extract the ops. The scope name will be striped from the node definitions for easy import later into new name scopes. as_unbound_inputs: A list of `String`s. Input names that are treated as unbound when exporting Operations. as_text: If `True`, writes the `MetaGraphDef` as an ASCII proto. unbound_inputs_col_name: Optional `string`. If provided, a string collection with the given name will be added to the returned `MetaGraphDef`, containing the names of tensors that must be remapped when importing the `MetaGraphDef`. clear_devices: Boolean which controls whether to clear device information before exporting the graph. **kwargs: Optional keyed arguments, including meta_info_def, saver_def, collection_list. Returns: A `MetaGraphDef` proto. """ op_list = set(op_list) for op in op_list: if not op.name.startswith(export_scope): raise ValueError("The Operation (%s) to export is not under " "'export_scope'." % op.name) graph = graph or ops.get_default_graph() as_unbound_inputs = as_unbound_inputs or [] unbound_inputs = [] graph_def = graph_pb2.GraphDef() # pylint: disable=protected-access graph_def.versions.CopyFrom(graph.graph_def_versions) bytesize = 0 for key in sorted(graph._nodes_by_id): if graph._nodes_by_id[key] in op_list: op = graph._nodes_by_id[key] node_def = _node_def_unbound( op.node_def, export_scope, unbound_inputs, as_unbound_inputs, clear_devices=clear_devices) graph_def.node.extend([node_def]) if op.outputs: assert "_output_shapes" not in graph_def.node[-1].attr graph_def.node[-1].attr["_output_shapes"].list.shape.extend([ output.get_shape().as_proto() for output in op.outputs]) bytesize += op.node_def.ByteSize() if bytesize >= (1 << 31) or bytesize < 0: raise ValueError("GraphDef cannot be larger than 2GB.") # It's possible that not all the inputs are in the export_scope. # If we would like such information included in the exported meta_graph, # add them to a special unbound_inputs collection. if unbound_inputs_col_name: # Clears the unbound_inputs collections. graph.clear_collection(unbound_inputs_col_name) for k in unbound_inputs: graph.add_to_collection(unbound_inputs_col_name, k) scoped_meta_graph_def = create_meta_graph_def( graph_def=graph_def, graph=graph, add_collections=False, **kwargs) return scoped_meta_graph_def def copy_ops_meta_graph(op_list, from_scope, to_scope, replace=None): """Copies a list of `Operation`s from one scope to another, with variables shared between them. Args: op_list: A list of `Operation` objects to be copied. from_scope: `String` name scope containing the ops to be copied. to_scope: `String` name scope under which the copied ops will reside. replace: A dictionary containing the mapping from input Tensors of these ops to their replacements. Returns: A dictionary containing the mapping from original ops to their copies and a dictionary of `Variables` that have been copied into `to_scope`. Raises: ValueError: If `from_scope` and `to_scope` are the same. """ if from_scope == to_scope: raise ValueError("'from_scope' and 'to_scope' need to be different " "when performing copy in the same graph.") op_list = set(op_list) op_names = set(op.name for op in op_list) op_outputs = set() for op in op_list: if not op.name.startswith(from_scope): raise ValueError("The Operation (%s) to copy is not under " "'from_scope'." % op.name) op_outputs.update(set(op.outputs)) input_map = {} as_unbound_inputs = [] for op in op_list: for tensor in op.inputs: if not (tensor in op_outputs) or (tensor in replace): name = tensor.name[:-2] if tensor.name[-2:] == ":0" else tensor.name as_unbound_inputs.append(name) if tensor in replace: input_map[_unbound_name(name)] = replace[tensor] else: input_map[_unbound_name(name)] = tensor for dep in op.control_inputs: if dep not in op_list: name = "^" + dep.name as_unbound_inputs.append(name) input_map[_unbound_name(name)] = dep for name in op.colocation_groups(): if name[5:] not in op_names: as_unbound_inputs.append(name) input_map[_unbound_name(name)] = ops.get_default_graph(). \ as_graph_element(name[5:]) orig_meta_graph = export_ops_meta_graph( op_list, export_scope=from_scope, as_unbound_inputs=as_unbound_inputs) _ = import_scoped_meta_graph(orig_meta_graph, import_scope=to_scope, input_map=input_map) copied_ops = {} for op in op_list: new_op_name = ops.prepend_name_scope( ops.strip_name_scope(op.name, from_scope), to_scope) new_op = ops.get_default_graph().as_graph_element(new_op_name, allow_tensor=False) copied_ops[op] = new_op return copied_ops def _get_backward_ops(seed_tensors, as_inputs=None): """Get backward ops from inputs to `seed_tensors` by topological order. Args: seed_tensors: A list of `Tensor`s, for which to get all preceding ops. as_inputs: A list of `Tensor`s that are treated as inputs during the search (where to stop searching the backward graph). Returns: A list of `Operation`s in topological order. """ as_inputs = set(as_inputs or []) seed_tensors = [t for t in seed_tensors if t not in as_inputs] seed_ops = list(OrderedDict.fromkeys(t.op for t in seed_tensors)) q = deque(seed_ops) seen = set() done = set() ret = [] while q: op = q[0] if op not in seen: seen.add(op) for tensor in reversed(op.inputs): if tensor not in as_inputs: q.appendleft(tensor.op) q.extendleft(reversed(op.control_inputs)) else: # have seen this op before q.popleft() if op not in done: done.add(op) ret.append(op) return ret def clone(outputs, to_scope, from_scope="", replace=None): """Copy the subgraph that generates `outputs` from one scope to another, with Tensors in `replace` being replaced by their corresponding values. with variables shared between them. Args: outputs: A `Tensor` or a list of `Tensor`s. to_scope: `String` name scope under which the copied subgraph will reside. from_scope: `String` name scope containing the subgraph to be copied. replace: A dictionary containing the mapping from Tensors in the subgraph to their replacements. Returns: A copy or a list of the copies of `outputs` in `to_scope` and a dictionary of `Variables` that have been copied into `to_scope`. """ if from_scope == to_scope: raise ValueError("'from_scope' and 'to_scope' need to be different " "when performing copying in the same graph.") seed_tensors = outputs if not isinstance(outputs, (list, tuple)): seed_tensors = [outputs] seed_tensors_set = set(seed_tensors) replace = replace or {} for k, v in six.iteritems(replace): try: assert isinstance(k, ops.Tensor) v = ops.convert_to_tensor(v) except Exception: raise TypeError( "The 'replace' argument should consist of Tensor pairs. " "Error type: (%s, %s)" % (type(k), type(v))) try: k.get_shape().merge_with(v.get_shape()) except ValueError: raise ValueError( "Key-value pairs in 'replace' should have the same " "shape (%s vs %s). Error pair: (%s, %s)" % ( k.get_shape(), v.get_shape(), k, v)) as_inputs = list(replace.keys()) backward_ops = _get_backward_ops(seed_tensors, as_inputs) copied_ops = set() copied_tensors = set() for op in backward_ops: if any((t in replace or t in copied_tensors) for t in op.inputs) or \ any(dep in copied_ops for dep in op.control_inputs) or \ any(t in seed_tensors_set for t in op.outputs): copied_ops.add(op) copied_tensors.update(set(op.outputs)) new_ops = copy_ops_meta_graph(list(copied_ops), from_scope, to_scope, replace=replace) new_tensors = [] for tensor in seed_tensors: if tensor in replace: new_tensors.append(replace[tensor]) elif tensor.op in new_ops: new_tensors.append(new_ops[tensor.op].outputs[tensor.value_index]) else: new_tensors.append(tensor) if len(new_tensors) == 1: new_tensors = new_tensors[0] return new_tensors
{ "content_hash": "665a98244db58d05bc479058fceb8693", "timestamp": "", "source": "github", "line_count": 974, "max_line_length": 80, "avg_line_length": 37.84907597535934, "alnum_prop": 0.6518106605181067, "repo_name": "thjashin/tensorflow", "id": "0c1303edcaef36f90521355aa95dd4b5215aff41", "size": "37555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tensorflow/python/framework/meta_graph.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7583" }, { "name": "C", "bytes": "175009" }, { "name": "C++", "bytes": "21512044" }, { "name": "CMake", "bytes": "130133" }, { "name": "CSS", "bytes": "774" }, { "name": "Go", "bytes": "786880" }, { "name": "HTML", "bytes": "557007" }, { "name": "Java", "bytes": "277432" }, { "name": "JavaScript", "bytes": "13406" }, { "name": "Jupyter Notebook", "bytes": "1833840" }, { "name": "LLVM", "bytes": "6536" }, { "name": "Makefile", "bytes": "36990" }, { "name": "Objective-C", "bytes": "7056" }, { "name": "Objective-C++", "bytes": "64764" }, { "name": "Protocol Buffer", "bytes": "197812" }, { "name": "Python", "bytes": "17881706" }, { "name": "Shell", "bytes": "319872" }, { "name": "TypeScript", "bytes": "775401" } ], "symlink_target": "" }
export { default as Animation, AnimationClass } from './animation.js'; export { imageAssets, audioAssets, dataAssets, setImagePath, setAudioPath, setDataPath, loadImage, loadAudio, loadData, load } from './assets.js'; export { default as Button, ButtonClass } from './button.js'; export { init, getCanvas, getContext } from './core.js'; export { on, off, emit } from './events.js'; export { default as GameLoop } from './gameLoop.js'; export { default as GameObject, GameObjectClass } from './gameObject.js'; export { gamepadMap, updateGamepad, initGamepad, onGamepad, offGamepad, gamepadPressed, gamepadAxis } from './gamepad.js'; export { gestureMap, initGesture, onGesture, offGesture } from './gesture.js'; export { default as Grid, GridClass } from './grid.js'; export { degToRad, radToDeg, angleToTarget, rotatePoint, movePoint, randInt, seedRand, lerp, inverseLerp, clamp, setStoreItem, getStoreItem, collides, getWorldRect, depthSort } from './helpers.js'; export { initInput, onInput, offInput } from './input.js'; export { keyMap, initKeys, onKey, offKey, keyPressed } from './keyboard.js'; export { registerPlugin, unregisterPlugin, extendObject } from './plugin.js'; export { initPointer, getPointer, track, untrack, pointerOver, onPointer, offPointer, pointerPressed } from './pointer.js'; export { default as Pool, PoolClass } from './pool.js'; export { default as Quadtree, QuadtreeClass } from './quadtree.js'; export { default as Scene, SceneClass } from './scene.js'; export { default as Sprite, SpriteClass } from './sprite.js'; export { default as SpriteSheet, SpriteSheetClass } from './spriteSheet.js'; export { default as Text, TextClass } from './text.js'; export { default as TileEngine, TileEngineClass } from './tileEngine.js'; export { default as Vector, VectorClass } from './vector.js'; export { default } from './kontra.defaults.js';
{ "content_hash": "271bce627d4a84449b4642e8a64e7a3b", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 73, "avg_line_length": 22.213483146067414, "alnum_prop": 0.6960040465351542, "repo_name": "straker/kontra", "id": "1eb00e600b27e03bad47810bcb1edc32a2d95e94", "size": "1977", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/kontra.js", "mode": "33188", "license": "mit", "language": [ { "name": "Handlebars", "bytes": "952" }, { "name": "JavaScript", "bytes": "566515" }, { "name": "Shell", "bytes": "1139" }, { "name": "TypeScript", "bytes": "23363" } ], "symlink_target": "" }
<?php namespace frontend\controllers; use yii; use frontend\smile\controllers\SmileFrontendController; class AdviceController extends SmileFrontendController { public function actionIndex(){ } public function actionOne($url){ } }
{ "content_hash": "07d680c742d26769eecab8fbff083016", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 55, "avg_line_length": 14.823529411764707, "alnum_prop": 0.75, "repo_name": "MaN4thes3t/smile-cms", "id": "c4283135976f02ede831892ba6850b3c1b3a6930", "size": "252", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frontend/controllers/AdviceController.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "1952" }, { "name": "Batchfile", "bytes": "1026" }, { "name": "CSS", "bytes": "1285898" }, { "name": "JavaScript", "bytes": "3041683" }, { "name": "PHP", "bytes": "430382" } ], "symlink_target": "" }
from __future__ import absolute_import input_name = '../examples/homogenization/linear_elastic_mM.py' output_name = 'test_linear_elastic_mM.vtk' from tests_basic import TestInput class Test( TestInput ): pass
{ "content_hash": "d8cbdac713921d92c541177e63cb3d78", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 62, "avg_line_length": 30.571428571428573, "alnum_prop": 0.7523364485981309, "repo_name": "lokik/sfepy", "id": "5ac80deb276cf8a4d6ec9f4a633b5250cbc3ba48", "size": "214", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_input_linear_elastic_mM.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "448969" }, { "name": "C++", "bytes": "37842" }, { "name": "GLSL", "bytes": "6058" }, { "name": "Makefile", "bytes": "184" }, { "name": "PowerShell", "bytes": "3118" }, { "name": "Python", "bytes": "2701733" }, { "name": "Shell", "bytes": "71" } ], "symlink_target": "" }