code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.common.filespecification;
import java.io.IOException;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSString;
import org.apache.pdfbox.pdmodel.common.COSObjectable;
/**
* This represents a file specification.
*
* @author Ben Litchfield
*/
public abstract class PDFileSpecification implements COSObjectable
{
/**
* A file specfication can either be a COSString or a COSDictionary. This
* will create the file specification either way.
*
* @param base The cos object that describes the fs.
*
* @return The file specification for the COSBase object.
*
* @throws IOException If there is an error creating the file spec.
*/
public static PDFileSpecification createFS( COSBase base ) throws IOException
{
PDFileSpecification retval = null;
if( base == null )
{
//then simply return null
}
else if( base instanceof COSString )
{
retval = new PDSimpleFileSpecification( (COSString)base );
}
else if( base instanceof COSDictionary )
{
retval = new PDComplexFileSpecification( (COSDictionary)base );
}
else
{
throw new IOException( "Error: Unknown file specification " + base );
}
return retval;
}
/**
* This will get the file name.
*
* @return The file name.
*/
public abstract String getFile();
/**
* This will set the file name.
*
* @param file The name of the file.
*/
public abstract void setFile( String file );
}
| ZhenyaM/veraPDF-pdfbox | pdfbox/src/main/java/org/apache/pdfbox/pdmodel/common/filespecification/PDFileSpecification.java | Java | apache-2.0 | 2,502 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.relaxNG.compact;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.completion.*;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.codeInsight.lookup.TailTypeDecorator;
import com.intellij.patterns.ElementPattern;
import com.intellij.patterns.PsiElementPattern;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ProcessingContext;
import org.intellij.plugins.relaxNG.compact.psi.RncDecl;
import org.intellij.plugins.relaxNG.compact.psi.RncDefine;
import org.intellij.plugins.relaxNG.compact.psi.RncGrammar;
import org.intellij.plugins.relaxNG.compact.psi.util.EscapeUtil;
import org.jetbrains.annotations.NotNull;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import static com.intellij.patterns.StandardPatterns.and;
import static com.intellij.patterns.StandardPatterns.not;
/**
* @author Dennis.Ushakov
*/
public class RncCompletionContributor extends CompletionContributor {
private static final ElementPattern TOP_LEVEL =
not(psiElement().inside(psiElement(RncGrammar.class)
.inside(true, psiElement(RncGrammar.class))));
private static final PsiElementPattern DECL_PATTERN =
psiElement().inside(psiElement(RncDecl.class));
private static final PsiElementPattern DEFAULT_PATTERN =
DECL_PATTERN.afterLeaf(psiElement().withText("default"));
private static final ElementPattern DEFINE_PATTERN =
and(psiElement().withParent(RncDefine.class), psiElement().afterLeafSkipping(psiElement(PsiWhiteSpace.class), psiElement().withText("=")));
private static final String[] DECL_KEYWORDS = new String[]{ "default", "namespace", "datatypes" };
private static final String[] GRAMMAR_CONTENT_KEYWORDS = new String[]{ "include", "div", "start" };
private static final String[] PATTERN_KEYWORDS = new String[]{ "attribute", "element", "grammar",
"notAllowed", "text", "empty", "external", "parent", "list", "mixed" };
public RncCompletionContributor() {
CompletionProvider<CompletionParameters> provider = new CompletionProvider<CompletionParameters>() {
@Override
protected void addCompletions(@NotNull CompletionParameters parameters,
ProcessingContext context,
@NotNull CompletionResultSet result) {
String[] keywords = getKeywords(parameters.getPosition());
for (String keyword : keywords) {
result.addElement(TailTypeDecorator.withTail(LookupElementBuilder.create(keyword).bold(), TailType.SPACE));
}
}
};
extend(null, psiElement().afterLeaf(psiElement(RncTokenTypes.KEYWORD_DEFAULT)), provider);
extend(null, psiElement().andNot(psiElement().inside(psiElement(RncTokenTypes.LITERAL))).
andNot(psiElement().afterLeaf(psiElement().withElementType(RncTokenTypes.KEYWORDS))), provider);
}
private static String[] getKeywords(PsiElement context) {
final PsiElement next = PsiTreeUtil.skipWhitespacesForward(context);
if (next != null && EscapeUtil.unescapeText(next).equals("=")) {
return new String[]{ "start" };
}
if (DEFAULT_PATTERN.accepts(context)) {
return new String[]{ "namespace" };
} else if (DECL_PATTERN.accepts(context)) {
return ArrayUtil.EMPTY_STRING_ARRAY;
} else if (context.getParent() instanceof RncDefine && context.getParent().getFirstChild() == context) {
if (DEFINE_PATTERN.accepts(context)) {
return ArrayUtil.EMPTY_STRING_ARRAY;
}
if (TOP_LEVEL.accepts(context)) {
if (!afterPattern(context)) {
return ArrayUtil.mergeArrays(DECL_KEYWORDS, ArrayUtil.mergeArrays(GRAMMAR_CONTENT_KEYWORDS, PATTERN_KEYWORDS));
}
}
return GRAMMAR_CONTENT_KEYWORDS;
}
return PATTERN_KEYWORDS;
}
private static boolean afterPattern(PsiElement context) {
// TODO: recognize all patterns
return PsiTreeUtil.getPrevSiblingOfType(context.getParent(), RncDefine.class) != null;
}
}
| asedunov/intellij-community | xml/relaxng/src/org/intellij/plugins/relaxNG/compact/RncCompletionContributor.java | Java | apache-2.0 | 4,782 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.zipfile;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CCombo;
import org.eclipse.swt.events.FocusListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.steps.zipfile.ZipFileMeta;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
public class ZipFileDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = ZipFileMeta.class; // for i18n purposes, needed by Translator2!!
private Label wlSourceFileNameField;
private CCombo wSourceFileNameField;
private FormData fdlSourceFileNameField, fdSourceFileNameField;
private Label wlTargetFileNameField;
private CCombo wTargetFileNameField;
private FormData fdlTargetFileNameField, fdTargetFileNameField;
private Button wAddResult;
private FormData fdAddResult, fdlAddResult;
private Label wlAddResult;
private Button wOverwriteZipEntry;
private FormData fdOverwriteTarget, fdlOverwriteTarget;
private Label wlOverwriteTarget;
private Button wCreateParentFolder;
private FormData fdCreateParentFolder, fdlCreateParentFolder;
private Label wlCreateParentFolder;
private Button wKeepFolders;
private FormData fdKeepFolders, fdlKeepFolders;
private Label wlKeepFolders;
private Group wSettingsGroup;
private FormData fdSettingsGroup;
private ZipFileMeta input;
private Label wlBaseFolderField;
private CCombo wBaseFolderField;
private FormData fdlBaseFolderField, fdBaseFolderField;
private Label wlOperation;
private CCombo wOperation;
private FormData fdlOperation;
private FormData fdOperation;
private Label wlMoveToFolderField;
private CCombo wMoveToFolderField;
private FormData fdlMoveToFolderField, fdMoveToFolderField;
private boolean gotPreviousFields = false;
public ZipFileDialog( Shell parent, Object in, TransMeta transMeta, String sname ) {
super( parent, (BaseStepMeta) in, transMeta, sname );
input = (ZipFileMeta) in;
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN );
props.setLook( shell );
setShellImage( shell, input );
ModifyListener lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
input.setChanged();
}
};
SelectionAdapter lsSel = new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
input.setChanged();
}
};
changed = input.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "ZipFileDialog.Shell.Title" ) );
int middle = props.getMiddlePct();
int margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "ZipFileDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// ///////////////////////////////
// START OF Settings GROUP //
// ///////////////////////////////
wSettingsGroup = new Group( shell, SWT.SHADOW_NONE );
props.setLook( wSettingsGroup );
wSettingsGroup.setText( BaseMessages.getString( PKG, "ZipFileDialog.wSettingsGroup.Label" ) );
FormLayout settingGroupLayout = new FormLayout();
settingGroupLayout.marginWidth = 10;
settingGroupLayout.marginHeight = 10;
wSettingsGroup.setLayout( settingGroupLayout );
// Create target parent folder?
wlCreateParentFolder = new Label( wSettingsGroup, SWT.RIGHT );
wlCreateParentFolder.setText( BaseMessages.getString( PKG, "ZipFileDialog.CreateParentFolder.Label" ) );
props.setLook( wlCreateParentFolder );
fdlCreateParentFolder = new FormData();
fdlCreateParentFolder.left = new FormAttachment( 0, 0 );
fdlCreateParentFolder.top = new FormAttachment( wStepname, margin );
fdlCreateParentFolder.right = new FormAttachment( middle, -margin );
wlCreateParentFolder.setLayoutData( fdlCreateParentFolder );
wCreateParentFolder = new Button( wSettingsGroup, SWT.CHECK );
props.setLook( wCreateParentFolder );
wCreateParentFolder.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.CreateParentFolder.Tooltip" ) );
fdCreateParentFolder = new FormData();
fdCreateParentFolder.left = new FormAttachment( middle, 0 );
fdCreateParentFolder.top = new FormAttachment( wStepname, margin );
wCreateParentFolder.setLayoutData( fdCreateParentFolder );
wCreateParentFolder.addSelectionListener( lsSel );
// Overwrite target file?
wlOverwriteTarget = new Label( wSettingsGroup, SWT.RIGHT );
wlOverwriteTarget.setText( BaseMessages.getString( PKG, "ZipFileDialog.OverwriteTarget.Label" ) );
props.setLook( wlOverwriteTarget );
fdlOverwriteTarget = new FormData();
fdlOverwriteTarget.left = new FormAttachment( 0, 0 );
fdlOverwriteTarget.top = new FormAttachment( wCreateParentFolder, margin );
fdlOverwriteTarget.right = new FormAttachment( middle, -margin );
wlOverwriteTarget.setLayoutData( fdlOverwriteTarget );
wOverwriteZipEntry = new Button( wSettingsGroup, SWT.CHECK );
props.setLook( wOverwriteZipEntry );
wOverwriteZipEntry.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.OverwriteTarget.Tooltip" ) );
fdOverwriteTarget = new FormData();
fdOverwriteTarget.left = new FormAttachment( middle, 0 );
fdOverwriteTarget.top = new FormAttachment( wCreateParentFolder, margin );
wOverwriteZipEntry.setLayoutData( fdOverwriteTarget );
wOverwriteZipEntry.addSelectionListener( lsSel );
// Add Target filename to result filenames?
wlAddResult = new Label( wSettingsGroup, SWT.RIGHT );
wlAddResult.setText( BaseMessages.getString( PKG, "ZipFileDialog.AddResult.Label" ) );
props.setLook( wlAddResult );
fdlAddResult = new FormData();
fdlAddResult.left = new FormAttachment( 0, 0 );
fdlAddResult.top = new FormAttachment( wOverwriteZipEntry, margin );
fdlAddResult.right = new FormAttachment( middle, -margin );
wlAddResult.setLayoutData( fdlAddResult );
wAddResult = new Button( wSettingsGroup, SWT.CHECK );
props.setLook( wAddResult );
wAddResult.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.AddResult.Tooltip" ) );
fdAddResult = new FormData();
fdAddResult.left = new FormAttachment( middle, 0 );
fdAddResult.top = new FormAttachment( wOverwriteZipEntry, margin );
wAddResult.setLayoutData( fdAddResult );
wAddResult.addSelectionListener( lsSel );
fdSettingsGroup = new FormData();
fdSettingsGroup.left = new FormAttachment( 0, margin );
fdSettingsGroup.top = new FormAttachment( wStepname, margin );
fdSettingsGroup.right = new FormAttachment( 100, -margin );
wSettingsGroup.setLayoutData( fdSettingsGroup );
// ///////////////////////////////
// END OF Settings Fields GROUP //
// ///////////////////////////////
// SourceFileNameField field
wlSourceFileNameField = new Label( shell, SWT.RIGHT );
wlSourceFileNameField.setText( BaseMessages.getString( PKG, "ZipFileDialog.SourceFileNameField.Label" ) );
props.setLook( wlSourceFileNameField );
fdlSourceFileNameField = new FormData();
fdlSourceFileNameField.left = new FormAttachment( 0, 0 );
fdlSourceFileNameField.right = new FormAttachment( middle, -margin );
fdlSourceFileNameField.top = new FormAttachment( wSettingsGroup, 2 * margin );
wlSourceFileNameField.setLayoutData( fdlSourceFileNameField );
wSourceFileNameField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
props.setLook( wSourceFileNameField );
wSourceFileNameField.setEditable( true );
wSourceFileNameField.addModifyListener( lsMod );
fdSourceFileNameField = new FormData();
fdSourceFileNameField.left = new FormAttachment( middle, 0 );
fdSourceFileNameField.top = new FormAttachment( wSettingsGroup, 2 * margin );
fdSourceFileNameField.right = new FormAttachment( 100, -margin );
wSourceFileNameField.setLayoutData( fdSourceFileNameField );
wSourceFileNameField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
get();
}
} );
// TargetFileNameField field
wlTargetFileNameField = new Label( shell, SWT.RIGHT );
wlTargetFileNameField.setText( BaseMessages.getString( PKG, "ZipFileDialog.TargetFileNameField.Label" ) );
props.setLook( wlTargetFileNameField );
fdlTargetFileNameField = new FormData();
fdlTargetFileNameField.left = new FormAttachment( 0, 0 );
fdlTargetFileNameField.right = new FormAttachment( middle, -margin );
fdlTargetFileNameField.top = new FormAttachment( wSourceFileNameField, margin );
wlTargetFileNameField.setLayoutData( fdlTargetFileNameField );
wTargetFileNameField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
wTargetFileNameField.setEditable( true );
props.setLook( wTargetFileNameField );
wTargetFileNameField.addModifyListener( lsMod );
fdTargetFileNameField = new FormData();
fdTargetFileNameField.left = new FormAttachment( middle, 0 );
fdTargetFileNameField.top = new FormAttachment( wSourceFileNameField, margin );
fdTargetFileNameField.right = new FormAttachment( 100, -margin );
wTargetFileNameField.setLayoutData( fdTargetFileNameField );
wTargetFileNameField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
get();
}
} );
wlKeepFolders = new Label( shell, SWT.RIGHT );
wlKeepFolders.setText( BaseMessages.getString( PKG, "ZipFileDialog.KeepFolders.Label" ) );
props.setLook( wlKeepFolders );
fdlKeepFolders = new FormData();
fdlKeepFolders.left = new FormAttachment( 0, 0 );
fdlKeepFolders.top = new FormAttachment( wTargetFileNameField, margin );
fdlKeepFolders.right = new FormAttachment( middle, -margin );
wlKeepFolders.setLayoutData( fdlKeepFolders );
wKeepFolders = new Button( shell, SWT.CHECK );
props.setLook( wKeepFolders );
wKeepFolders.setToolTipText( BaseMessages.getString( PKG, "ZipFileDialog.KeepFolders.Tooltip" ) );
fdKeepFolders = new FormData();
fdKeepFolders.left = new FormAttachment( middle, 0 );
fdKeepFolders.top = new FormAttachment( wTargetFileNameField, margin );
wKeepFolders.setLayoutData( fdKeepFolders );
wKeepFolders.addSelectionListener( lsSel );
wKeepFolders.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent arg0 ) {
keepFolder();
}
} );
// BaseFolderField field
wlBaseFolderField = new Label( shell, SWT.RIGHT );
wlBaseFolderField.setText( BaseMessages.getString( PKG, "ZipFileDialog.BaseFolderField.Label" ) );
props.setLook( wlBaseFolderField );
fdlBaseFolderField = new FormData();
fdlBaseFolderField.left = new FormAttachment( 0, 0 );
fdlBaseFolderField.right = new FormAttachment( middle, -margin );
fdlBaseFolderField.top = new FormAttachment( wKeepFolders, margin );
wlBaseFolderField.setLayoutData( fdlBaseFolderField );
wBaseFolderField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
wBaseFolderField.setEditable( true );
props.setLook( wBaseFolderField );
wBaseFolderField.addModifyListener( lsMod );
fdBaseFolderField = new FormData();
fdBaseFolderField.left = new FormAttachment( middle, 0 );
fdBaseFolderField.top = new FormAttachment( wKeepFolders, margin );
fdBaseFolderField.right = new FormAttachment( 100, -margin );
wBaseFolderField.setLayoutData( fdBaseFolderField );
wBaseFolderField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
get();
}
} );
// Operation
wlOperation = new Label( shell, SWT.RIGHT );
wlOperation.setText( BaseMessages.getString( PKG, "ZipFileDialog.Operation.Label" ) );
props.setLook( wlOperation );
fdlOperation = new FormData();
fdlOperation.left = new FormAttachment( 0, 0 );
fdlOperation.right = new FormAttachment( middle, -margin );
fdlOperation.top = new FormAttachment( wBaseFolderField, margin );
wlOperation.setLayoutData( fdlOperation );
wOperation = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
props.setLook( wOperation );
wOperation.addModifyListener( lsMod );
fdOperation = new FormData();
fdOperation.left = new FormAttachment( middle, 0 );
fdOperation.top = new FormAttachment( wBaseFolderField, margin );
fdOperation.right = new FormAttachment( 100, -margin );
wOperation.setLayoutData( fdOperation );
wOperation.setItems( ZipFileMeta.operationTypeDesc );
wOperation.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
updateOperation();
}
} );
// MoveToFolderField field
wlMoveToFolderField = new Label( shell, SWT.RIGHT );
wlMoveToFolderField.setText( BaseMessages.getString( PKG, "ZipFileDialog.MoveToFolderField.Label" ) );
props.setLook( wlMoveToFolderField );
fdlMoveToFolderField = new FormData();
fdlMoveToFolderField.left = new FormAttachment( 0, 0 );
fdlMoveToFolderField.right = new FormAttachment( middle, -margin );
fdlMoveToFolderField.top = new FormAttachment( wOperation, margin );
wlMoveToFolderField.setLayoutData( fdlMoveToFolderField );
wMoveToFolderField = new CCombo( shell, SWT.BORDER | SWT.READ_ONLY );
wMoveToFolderField.setEditable( true );
props.setLook( wMoveToFolderField );
wMoveToFolderField.addModifyListener( lsMod );
fdMoveToFolderField = new FormData();
fdMoveToFolderField.left = new FormAttachment( middle, 0 );
fdMoveToFolderField.top = new FormAttachment( wOperation, margin );
fdMoveToFolderField.right = new FormAttachment( 100, -margin );
wMoveToFolderField.setLayoutData( fdMoveToFolderField );
wMoveToFolderField.addFocusListener( new FocusListener() {
public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
}
public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
get();
}
} );
// THE BUTTONS
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, wMoveToFolderField );
// Add listeners
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
wOK.addListener( SWT.Selection, lsOK );
wCancel.addListener( SWT.Selection, lsCancel );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
keepFolder();
updateOperation();
input.setChanged( changed );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
if ( log.isDebug() ) {
log.logDebug( toString(), BaseMessages.getString( PKG, "ZipFileDialog.Log.GettingKeyInfo" ) );
}
if ( input.getBaseFolderField() != null ) {
wBaseFolderField.setText( input.getBaseFolderField() );
}
if ( input.getDynamicSourceFileNameField() != null ) {
wSourceFileNameField.setText( input.getDynamicSourceFileNameField() );
}
if ( input.getDynamicTargetFileNameField() != null ) {
wTargetFileNameField.setText( input.getDynamicTargetFileNameField() );
}
wOperation.setText( ZipFileMeta.getOperationTypeDesc( input.getOperationType() ) );
if ( input.getMoveToFolderField() != null ) {
wMoveToFolderField.setText( input.getMoveToFolderField() );
}
wAddResult.setSelection( input.isaddTargetFileNametoResult() );
wOverwriteZipEntry.setSelection( input.isOverwriteZipEntry() );
wCreateParentFolder.setSelection( input.isCreateParentFolder() );
wKeepFolders.setSelection( input.isKeepSouceFolder() );
wStepname.selectAll();
wStepname.setFocus();
}
private void cancel() {
stepname = null;
input.setChanged( changed );
dispose();
}
private void ok() {
if ( Utils.isEmpty( wStepname.getText() ) ) {
MessageBox mb = new MessageBox( shell, SWT.OK | SWT.ICON_ERROR );
mb.setMessage( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Message" ) );
mb.setText( BaseMessages.getString( PKG, "System.Error.StepNameMissing.Title" ) );
mb.open();
return;
}
input.setBaseFolderField( wBaseFolderField.getText() );
input.setDynamicSourceFileNameField( wSourceFileNameField.getText() );
input.setDynamicTargetFileNameField( wTargetFileNameField.getText() );
input.setaddTargetFileNametoResult( wAddResult.getSelection() );
input.setOverwriteZipEntry( wOverwriteZipEntry.getSelection() );
input.setCreateParentFolder( wCreateParentFolder.getSelection() );
input.setKeepSouceFolder( wKeepFolders.getSelection() );
input.setOperationType( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) );
input.setMoveToFolderField( wMoveToFolderField.getText() );
stepname = wStepname.getText(); // return value
dispose();
}
private void keepFolder() {
wlBaseFolderField.setEnabled( wKeepFolders.getSelection() );
wBaseFolderField.setEnabled( wKeepFolders.getSelection() );
}
private void get() {
if ( !gotPreviousFields ) {
gotPreviousFields = true;
String source = wSourceFileNameField.getText();
String target = wTargetFileNameField.getText();
String base = wBaseFolderField.getText();
try {
wSourceFileNameField.removeAll();
wTargetFileNameField.removeAll();
wBaseFolderField.removeAll();
RowMetaInterface r = transMeta.getPrevStepFields( stepname );
if ( r != null ) {
String[] fields = r.getFieldNames();
wSourceFileNameField.setItems( fields );
wTargetFileNameField.setItems( fields );
wBaseFolderField.setItems( fields );
}
} catch ( KettleException ke ) {
new ErrorDialog(
shell, BaseMessages.getString( PKG, "ZipFileDialog.FailedToGetFields.DialogTitle" ), BaseMessages
.getString( PKG, "ZipFileDialog.FailedToGetFields.DialogMessage" ), ke );
} finally {
if ( source != null ) {
wSourceFileNameField.setText( source );
}
if ( target != null ) {
wTargetFileNameField.setText( target );
}
if ( base != null ) {
wBaseFolderField.setText( base );
}
}
}
}
private void updateOperation() {
wlMoveToFolderField
.setEnabled( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) == ZipFileMeta.OPERATION_TYPE_MOVE );
wMoveToFolderField
.setEnabled( ZipFileMeta.getOperationTypeByDesc( wOperation.getText() ) == ZipFileMeta.OPERATION_TYPE_MOVE );
}
}
| nicoben/pentaho-kettle | ui/src/org/pentaho/di/ui/trans/steps/zipfile/ZipFileDialog.java | Java | apache-2.0 | 22,737 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
/**
* Various constants.
*
* @version
*/
public final class Constants {
public static final String JAXB_CONTEXT_PACKAGES = ""
+ "org.apache.camel:"
+ "org.apache.camel.model:"
+ "org.apache.camel.model.config:"
+ "org.apache.camel.model.dataformat:"
+ "org.apache.camel.model.language:"
+ "org.apache.camel.model.loadbalancer:"
+ "org.apache.camel.model.remote:"
+ "org.apache.camel.model.rest";
public static final String PLACEHOLDER_QNAME = "http://camel.apache.org/schema/placeholder";
private Constants() {
}
}
| jmandawg/camel | camel-core/src/main/java/org/apache/camel/model/Constants.java | Java | apache-2.0 | 1,441 |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v1/services/account_budget_service.proto
package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources"
import _ "google.golang.org/genproto/googleapis/api/annotations"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// Request message for
// [AccountBudgetService.GetAccountBudget][google.ads.googleads.v1.services.AccountBudgetService.GetAccountBudget].
type GetAccountBudgetRequest struct {
// The resource name of the account-level budget to fetch.
ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetAccountBudgetRequest) Reset() { *m = GetAccountBudgetRequest{} }
func (m *GetAccountBudgetRequest) String() string { return proto.CompactTextString(m) }
func (*GetAccountBudgetRequest) ProtoMessage() {}
func (*GetAccountBudgetRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_account_budget_service_94295ad5ca373008, []int{0}
}
func (m *GetAccountBudgetRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetAccountBudgetRequest.Unmarshal(m, b)
}
func (m *GetAccountBudgetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetAccountBudgetRequest.Marshal(b, m, deterministic)
}
func (dst *GetAccountBudgetRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetAccountBudgetRequest.Merge(dst, src)
}
func (m *GetAccountBudgetRequest) XXX_Size() int {
return xxx_messageInfo_GetAccountBudgetRequest.Size(m)
}
func (m *GetAccountBudgetRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetAccountBudgetRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetAccountBudgetRequest proto.InternalMessageInfo
func (m *GetAccountBudgetRequest) GetResourceName() string {
if m != nil {
return m.ResourceName
}
return ""
}
func init() {
proto.RegisterType((*GetAccountBudgetRequest)(nil), "google.ads.googleads.v1.services.GetAccountBudgetRequest")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// AccountBudgetServiceClient is the client API for AccountBudgetService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type AccountBudgetServiceClient interface {
// Returns an account-level budget in full detail.
GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error)
}
type accountBudgetServiceClient struct {
cc *grpc.ClientConn
}
func NewAccountBudgetServiceClient(cc *grpc.ClientConn) AccountBudgetServiceClient {
return &accountBudgetServiceClient{cc}
}
func (c *accountBudgetServiceClient) GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error) {
out := new(resources.AccountBudget)
err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// AccountBudgetServiceServer is the server API for AccountBudgetService service.
type AccountBudgetServiceServer interface {
// Returns an account-level budget in full detail.
GetAccountBudget(context.Context, *GetAccountBudgetRequest) (*resources.AccountBudget, error)
}
func RegisterAccountBudgetServiceServer(s *grpc.Server, srv AccountBudgetServiceServer) {
s.RegisterService(&_AccountBudgetService_serviceDesc, srv)
}
func _AccountBudgetService_GetAccountBudget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetAccountBudgetRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, req.(*GetAccountBudgetRequest))
}
return interceptor(ctx, in, info, handler)
}
var _AccountBudgetService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.ads.googleads.v1.services.AccountBudgetService",
HandlerType: (*AccountBudgetServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetAccountBudget",
Handler: _AccountBudgetService_GetAccountBudget_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/ads/googleads/v1/services/account_budget_service.proto",
}
func init() {
proto.RegisterFile("google/ads/googleads/v1/services/account_budget_service.proto", fileDescriptor_account_budget_service_94295ad5ca373008)
}
var fileDescriptor_account_budget_service_94295ad5ca373008 = []byte{
// 364 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x4f, 0x4a, 0xf3, 0x40,
0x18, 0xc6, 0x49, 0x3e, 0xf8, 0xc0, 0xa0, 0x20, 0x41, 0x50, 0x8b, 0x8b, 0x52, 0xbb, 0x90, 0x2e,
0x66, 0x9a, 0x0a, 0xa2, 0x23, 0x15, 0xd2, 0x4d, 0x5d, 0x49, 0xa9, 0xd0, 0x85, 0x04, 0xca, 0x34,
0x19, 0x86, 0x40, 0x33, 0x53, 0xe7, 0x9d, 0x74, 0x23, 0x82, 0x78, 0x05, 0x6f, 0xe0, 0xd2, 0x1b,
0x78, 0x05, 0x97, 0x7a, 0x05, 0x57, 0x9e, 0x42, 0xd2, 0xe9, 0x04, 0xaa, 0x86, 0xee, 0x1e, 0xde,
0x3c, 0xbf, 0xf7, 0xcf, 0x93, 0xf1, 0xba, 0x5c, 0x4a, 0x3e, 0x65, 0x98, 0x26, 0x80, 0x8d, 0x2c,
0xd4, 0x3c, 0xc0, 0xc0, 0xd4, 0x3c, 0x8d, 0x19, 0x60, 0x1a, 0xc7, 0x32, 0x17, 0x7a, 0x3c, 0xc9,
0x13, 0xce, 0xf4, 0x78, 0x59, 0x47, 0x33, 0x25, 0xb5, 0xf4, 0xeb, 0x86, 0x41, 0x34, 0x01, 0x54,
0xe2, 0x68, 0x1e, 0x20, 0x8b, 0xd7, 0x4e, 0xaa, 0x06, 0x28, 0x06, 0x32, 0x57, 0xbf, 0x27, 0x98,
0xce, 0xb5, 0x03, 0xcb, 0xcd, 0x52, 0x4c, 0x85, 0x90, 0x9a, 0xea, 0x54, 0x0a, 0x30, 0x5f, 0x1b,
0x17, 0xde, 0x6e, 0x9f, 0xe9, 0xd0, 0x80, 0xbd, 0x05, 0x37, 0x64, 0xb7, 0x39, 0x03, 0xed, 0x1f,
0x7a, 0x5b, 0xb6, 0xf5, 0x58, 0xd0, 0x8c, 0xed, 0x39, 0x75, 0xe7, 0x68, 0x63, 0xb8, 0x69, 0x8b,
0x57, 0x34, 0x63, 0x9d, 0x77, 0xc7, 0xdb, 0x59, 0xa1, 0xaf, 0xcd, 0xbe, 0xfe, 0xab, 0xe3, 0x6d,
0xff, 0xec, 0xec, 0x9f, 0xa1, 0x75, 0x67, 0xa2, 0x8a, 0x6d, 0x6a, 0xed, 0x4a, 0xb4, 0xbc, 0x1f,
0xad, 0x80, 0x8d, 0xd3, 0xc7, 0x8f, 0xcf, 0x27, 0xb7, 0xe3, 0xb7, 0x8b, 0x90, 0xee, 0x56, 0x4e,
0xe9, 0xc6, 0x39, 0x68, 0x99, 0x31, 0x05, 0xb8, 0x65, 0x53, 0x33, 0x14, 0xe0, 0xd6, 0x7d, 0xef,
0xc1, 0xf5, 0x9a, 0xb1, 0xcc, 0xd6, 0x2e, 0xdb, 0xdb, 0xff, 0xeb, 0xf4, 0x41, 0x11, 0xec, 0xc0,
0xb9, 0xb9, 0x5c, 0xe2, 0x5c, 0x4e, 0xa9, 0xe0, 0x48, 0x2a, 0x8e, 0x39, 0x13, 0x8b, 0xd8, 0xed,
0x0f, 0x9c, 0xa5, 0x50, 0xfd, 0x60, 0xce, 0xad, 0x78, 0x76, 0xff, 0xf5, 0xc3, 0xf0, 0xc5, 0xad,
0xf7, 0x4d, 0xc3, 0x30, 0x01, 0x64, 0x64, 0xa1, 0x46, 0x01, 0x5a, 0x0e, 0x86, 0x37, 0x6b, 0x89,
0xc2, 0x04, 0xa2, 0xd2, 0x12, 0x8d, 0x82, 0xc8, 0x5a, 0xbe, 0xdc, 0xa6, 0xa9, 0x13, 0x12, 0x26,
0x40, 0x48, 0x69, 0x22, 0x64, 0x14, 0x10, 0x62, 0x6d, 0x93, 0xff, 0x8b, 0x3d, 0x8f, 0xbf, 0x03,
0x00, 0x00, 0xff, 0xff, 0x29, 0x16, 0xed, 0x5b, 0xd7, 0x02, 0x00, 0x00,
}
| pweil-/origin | vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go | GO | apache-2.0 | 8,224 |
#include <Eigen/Sparse>
#include <vector>
#include <iostream>
typedef Eigen::SparseMatrix<double> SpMat; // declares a column-major sparse matrix type of double
typedef Eigen::Triplet<double> T;
void buildProblem(std::vector<T>& coefficients, Eigen::VectorXd& b, int n);
void saveAsBitmap(const Eigen::VectorXd& x, int n, const char* filename);
int main(int argc, char** argv)
{
if(argc!=2) {
std::cerr << "Error: expected one and only one argument.\n";
return -1;
}
int n = 300; // size of the image
int m = n*n; // number of unknows (=number of pixels)
// Assembly:
std::vector<T> coefficients; // list of non-zeros coefficients
Eigen::VectorXd b(m); // the right hand side-vector resulting from the constraints
buildProblem(coefficients, b, n);
SpMat A(m,m);
A.setFromTriplets(coefficients.begin(), coefficients.end());
// Solving:
Eigen::SimplicialCholesky<SpMat> chol(A); // performs a Cholesky factorization of A
Eigen::VectorXd x = chol.solve(b); // use the factorization to solve for the given right hand side
// Export the result to a file:
saveAsBitmap(x, n, argv[1]);
return 0;
}
| OSVR/OSVR-Core | vendor/eigen/doc/special_examples/Tutorial_sparse_example.cpp | C++ | apache-2.0 | 1,183 |
#include "drape/pointers.hpp"
#include "base/logging.hpp"
DpPointerTracker & DpPointerTracker::Instance()
{
static DpPointerTracker pointersTracker;
return pointersTracker;
}
DpPointerTracker::~DpPointerTracker()
{
ASSERT(m_alivePointers.empty(), ());
}
void DpPointerTracker::RefPtrNamed(void * refPtr, std::string const & name)
{
std::lock_guard<std::mutex> lock(m_mutex);
if (refPtr != nullptr)
{
auto it = m_alivePointers.find(refPtr);
if (it != m_alivePointers.end())
it->second.first++;
else
m_alivePointers.insert(make_pair(refPtr, make_pair(1, name)));
}
}
void DpPointerTracker::DestroyPtr(void * p)
{
std::lock_guard<std::mutex> lock(m_mutex);
ASSERT(p != nullptr, ());
auto it = m_alivePointers.find(p);
if (it != m_alivePointers.end())
{
if (it->second.first != 0)
{
LOG(LWARNING, ("Drape pointer [", it->second.second, p,
"] was destroyed, but had references, ref count = ",
it->second.first));
}
m_alivePointers.erase(it);
}
}
void DpPointerTracker::DerefPtr(void * p)
{
std::lock_guard<std::mutex> lock(m_mutex);
if (p != nullptr)
{
auto it = m_alivePointers.find(p);
if (it != m_alivePointers.end())
{
ASSERT(it->second.first > 0, ());
it->second.first--;
}
}
}
DpPointerTracker::TAlivePointers const & DpPointerTracker::GetAlivePointers() const
{
return m_alivePointers;
}
| alexzatsepin/omim | drape/pointers.cpp | C++ | apache-2.0 | 1,448 |
package snowballstem
// to regenerate these commands, run
// go run gengen.go /path/to/snowball/algorithms/directory
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/arabic/stem_Unicode.sbl -go -o arabic/arabic_stemmer -gop arabic -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w arabic/arabic_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/danish/stem_ISO_8859_1.sbl -go -o danish/danish_stemmer -gop danish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w danish/danish_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/dutch/stem_ISO_8859_1.sbl -go -o dutch/dutch_stemmer -gop dutch -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w dutch/dutch_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/english/stem_ISO_8859_1.sbl -go -o english/english_stemmer -gop english -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w english/english_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/finnish/stem_ISO_8859_1.sbl -go -o finnish/finnish_stemmer -gop finnish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w finnish/finnish_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/french/stem_ISO_8859_1.sbl -go -o french/french_stemmer -gop french -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w french/french_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/german/stem_ISO_8859_1.sbl -go -o german/german_stemmer -gop german -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w german/german_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/hungarian/stem_Unicode.sbl -go -o hungarian/hungarian_stemmer -gop hungarian -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w hungarian/hungarian_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/irish/stem_ISO_8859_1.sbl -go -o irish/irish_stemmer -gop irish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w irish/irish_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/italian/stem_ISO_8859_1.sbl -go -o italian/italian_stemmer -gop italian -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w italian/italian_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/norwegian/stem_ISO_8859_1.sbl -go -o norwegian/norwegian_stemmer -gop norwegian -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w norwegian/norwegian_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/porter/stem_ISO_8859_1.sbl -go -o porter/porter_stemmer -gop porter -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w porter/porter_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/portuguese/stem_ISO_8859_1.sbl -go -o portuguese/portuguese_stemmer -gop portuguese -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w portuguese/portuguese_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/romanian/stem_Unicode.sbl -go -o romanian/romanian_stemmer -gop romanian -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w romanian/romanian_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/russian/stem_Unicode.sbl -go -o russian/russian_stemmer -gop russian -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w russian/russian_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/spanish/stem_ISO_8859_1.sbl -go -o spanish/spanish_stemmer -gop spanish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w spanish/spanish_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/swedish/stem_ISO_8859_1.sbl -go -o swedish/swedish_stemmer -gop swedish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w swedish/swedish_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/tamil/stem_Unicode.sbl -go -o tamil/tamil_stemmer -gop tamil -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w tamil/tamil_stemmer.go
//go:generate $SNOWBALL/snowball $SNOWBALL/algorithms/turkish/stem_Unicode.sbl -go -o turkish/turkish_stemmer -gop turkish -gor github.com/blevesearch/snowballstem
//go:generate gofmt -s -w turkish/turkish_stemmer.go
| heroiclabs/nakama | vendor/github.com/blevesearch/snowballstem/gen.go | GO | apache-2.0 | 4,281 |
/*
* Copyright 2012 MyBatis.org.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.reflection;
import org.apache.ibatis.reflection.factory.DefaultObjectFactory;
import org.apache.ibatis.reflection.factory.ObjectFactory;
import org.apache.ibatis.reflection.wrapper.DefaultObjectWrapperFactory;
import org.apache.ibatis.reflection.wrapper.ObjectWrapperFactory;
/**
* @author Clinton Begin
*/
/**
* 一些系统级别的元对象
*
*/
public final class SystemMetaObject {
public static final ObjectFactory DEFAULT_OBJECT_FACTORY = new DefaultObjectFactory();
public static final ObjectWrapperFactory DEFAULT_OBJECT_WRAPPER_FACTORY = new DefaultObjectWrapperFactory();
public static final MetaObject NULL_META_OBJECT = MetaObject.forObject(NullObject.class, DEFAULT_OBJECT_FACTORY, DEFAULT_OBJECT_WRAPPER_FACTORY);
private SystemMetaObject() {
// Prevent Instantiation of Static Class
}
//空对象
private static class NullObject {
}
public static MetaObject forObject(Object object) {
return MetaObject.forObject(object, DEFAULT_OBJECT_FACTORY, DEFAULT_OBJECT_WRAPPER_FACTORY);
}
}
| shurun19851206/mybaties | src/main/java/org/apache/ibatis/reflection/SystemMetaObject.java | Java | apache-2.0 | 1,662 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.geo;
import com.spatial4j.core.exception.InvalidShapeException;
import com.spatial4j.core.shape.Circle;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.ShapeCollection;
import com.spatial4j.core.shape.jts.JtsGeometry;
import com.spatial4j.core.shape.jts.JtsPoint;
import com.vividsolutions.jts.geom.*;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ElasticsearchTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT;
/**
* Tests for {@link GeoJSONShapeParser}
*/
public class GeoJSONShapeParserTests extends ElasticsearchTestCase {
private final static GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory();
public void testParse_simplePoint() throws IOException {
String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.endObject().string();
Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson);
}
public void testParse_lineString() throws IOException {
String lineGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "LineString")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject().string();
List<Coordinate> lineCoordinates = new ArrayList<>();
lineCoordinates.add(new Coordinate(100, 0));
lineCoordinates.add(new Coordinate(101, 1));
LineString expected = GEOMETRY_FACTORY.createLineString(
lineCoordinates.toArray(new Coordinate[lineCoordinates.size()]));
assertGeometryEquals(jtsGeom(expected), lineGeoJson);
}
public void testParse_multiLineString() throws IOException {
String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiLineString")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.endArray()
.endArray()
.endObject().string();
MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString(new LineString[]{
GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(100, 0),
new Coordinate(101, 1),
}),
GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(102, 2),
new Coordinate(103, 3),
}),
});
assertGeometryEquals(jtsGeom(expected), multilinesGeoJson);
}
public void testParse_circle() throws IOException {
String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "circle")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.field("radius", "100m")
.endObject().string();
Circle expected = SPATIAL_CONTEXT.makeCircle(100.0, 0.0, 360 * 100 / GeoUtils.EARTH_EQUATOR);
assertGeometryEquals(expected, multilinesGeoJson);
}
public void testParse_envelope() throws IOException {
// test #1: envelope with expected coordinate order (TopLeft, BottomRight)
String multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(-50).value(30).endArray()
.startArray().value(50).value(-30).endArray()
.endArray()
.endObject().string();
Rectangle expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30);
assertGeometryEquals(expected, multilinesGeoJson);
// test #2: envelope with agnostic coordinate order (TopRight, BottomLeft)
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(50).value(30).endArray()
.startArray().value(-50).value(-30).endArray()
.endArray()
.endObject().string();
expected = SPATIAL_CONTEXT.makeRectangle(-50, 50, -30, 30);
assertGeometryEquals(expected, multilinesGeoJson);
// test #3: "envelope" (actually a triangle) with invalid number of coordinates (TopRight, BottomLeft, BottomRight)
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.startArray().value(50).value(30).endArray()
.startArray().value(-50).value(-30).endArray()
.startArray().value(50).value(-39).endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(multilinesGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test #4: "envelope" with empty coordinates
multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope")
.startArray("coordinates")
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(multilinesGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParse_polygonNoHoles() throws IOException {
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.endArray()
.endObject().string();
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null);
assertGeometryEquals(jtsGeom(expected), polygonGeoJson);
}
public void testParse_invalidPoint() throws IOException {
// test case 1: create an invalid point object with multipoint data format
String invalidPoint1 = XContentFactory.jsonBuilder().startObject().field("type", "point")
.startArray("coordinates")
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(invalidPoint1);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid point object with an empty number of coordinates
String invalidPoint2 = XContentFactory.jsonBuilder().startObject().field("type", "point")
.startArray("coordinates")
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoint2);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParse_invalidMultipoint() throws IOException {
// test case 1: create an invalid multipoint object with single coordinate
String invalidMultipoint1 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint")
.startArray("coordinates").value(-74.011).value(40.753).endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(invalidMultipoint1);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid multipoint object with null coordinate
String invalidMultipoint2 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint")
.startArray("coordinates")
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidMultipoint2);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates
String invalidMultipoint3 = XContentFactory.jsonBuilder().startObject().field("type", "multipoint")
.startArray("coordinates")
.startArray().endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidMultipoint3);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParse_invalidMultiPolygon() throws IOException {
// test invalid multipolygon (an "accidental" polygon with inner rings outside outer ring)
String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()//one poly (with two holes)
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.startArray().value(102.0).value(3.0).endArray()
.startArray().value(102.0).value(2.0).endArray()
.endArray()
.startArray()// first hole
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.endArray()
.startArray()//second hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(multiPolygonGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParse_OGCPolygonWithoutHoles() throws IOException {
// test 1: ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: ccw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 3: cw poly not crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(180.0).value(10.0).endArray()
.startArray().value(180.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: cw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(184.0).value(15.0).endArray()
.startArray().value(184.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(174.0).value(-10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
public void testParse_OGCPolygonWithHoles() throws IOException {
// test 1: ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: ccw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.endArray()
.startArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(-180.0).value(-8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 3: cw poly not crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(180.0).value(10.0).endArray()
.startArray().value(179.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(177.0).value(8.0).endArray()
.startArray().value(179.0).value(10.0).endArray()
.startArray().value(179.0).value(-8.0).endArray()
.startArray().value(177.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: cw poly crossing dateline
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(183.0).value(10.0).endArray()
.startArray().value(183.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(183.0).value(10.0).endArray()
.endArray()
.startArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(182.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
public void testParse_invalidPolygon() throws IOException {
/**
* The following 3 test cases ensure proper error handling of invalid polygons
* per the GeoJSON specification
*/
// test case 1: create an invalid polygon with only 2 points
String invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-74.011).value(40.753).endArray()
.startArray().value(-75.022).value(41.783).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 2: create an invalid polygon with only 1 point
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 3: create an invalid polygon with 0 points
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 4: create an invalid polygon with null value points
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray()
.startArray().nullValue().nullValue().endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchIllegalArgumentException.class);
// test case 5: create an invalid polygon with 1 invalid LinearRing
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.nullValue().nullValue()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchIllegalArgumentException.class);
// test case 6: create an invalid polygon with 0 LinearRings
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates").endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
// test case 7: create an invalid polygon with 0 LinearRings
invalidPoly = XContentFactory.jsonBuilder().startObject().field("type", "polygon")
.startArray("coordinates")
.startArray().value(-74.011).value(40.753).endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(invalidPoly);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class);
}
public void testParse_polygonWithHole() throws IOException {
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.startArray()
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endObject().string();
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(
shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
LinearRing[] holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(
holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes);
assertGeometryEquals(jtsGeom(expected), polygonGeoJson);
}
public void testParse_selfCrossingPolygon() throws IOException {
// test self crossing ccw poly not crossing dateline
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(-177.0).value(15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class);
}
public void testParse_multiPoint() throws IOException {
String multiPointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPoint")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject().string();
ShapeCollection expected = shapeCollection(
SPATIAL_CONTEXT.makePoint(100, 0),
SPATIAL_CONTEXT.makePoint(101, 1.0));
assertGeometryEquals(expected, multiPointGeoJson);
}
public void testParse_multiPolygon() throws IOException {
// test #1: two polygons; one without hole, one with hole
String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()//first poly (without holes)
.startArray()
.startArray().value(102.0).value(2.0).endArray()
.startArray().value(103.0).value(2.0).endArray()
.startArray().value(103.0).value(3.0).endArray()
.startArray().value(102.0).value(3.0).endArray()
.startArray().value(102.0).value(2.0).endArray()
.endArray()
.endArray()
.startArray()//second poly (with hole)
.startArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.endArray()
.startArray()//hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject().string();
List<Coordinate> shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(100, 0));
List<Coordinate> holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
LinearRing[] holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
Polygon withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes);
shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(102, 3));
shellCoordinates.add(new Coordinate(103, 3));
shellCoordinates.add(new Coordinate(103, 2));
shellCoordinates.add(new Coordinate(102, 2));
shellCoordinates.add(new Coordinate(102, 3));
shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null);
Shape expected = shapeCollection(withoutHoles, withHoles);
assertGeometryEquals(expected, multiPolygonGeoJson);
// test #2: multipolygon; one polygon with one hole
// this test converting the multipolygon from a ShapeCollection type
// to a simple polygon (jtsGeom)
multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon")
.startArray("coordinates")
.startArray()
.startArray()
.startArray().value(100.0).value(1.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.startArray().value(101.0).value(0.0).endArray()
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(100.0).value(1.0).endArray()
.endArray()
.startArray()// hole
.startArray().value(100.2).value(0.8).endArray()
.startArray().value(100.2).value(0.2).endArray()
.startArray().value(100.8).value(0.2).endArray()
.startArray().value(100.8).value(0.8).endArray()
.startArray().value(100.2).value(0.8).endArray()
.endArray()
.endArray()
.endArray()
.endObject().string();
shellCoordinates = new ArrayList<>();
shellCoordinates.add(new Coordinate(100, 1));
shellCoordinates.add(new Coordinate(101, 1));
shellCoordinates.add(new Coordinate(101, 0));
shellCoordinates.add(new Coordinate(100, 0));
shellCoordinates.add(new Coordinate(100, 1));
holeCoordinates = new ArrayList<>();
holeCoordinates.add(new Coordinate(100.2, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.2));
holeCoordinates.add(new Coordinate(100.8, 0.8));
holeCoordinates.add(new Coordinate(100.2, 0.8));
shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
holes = new LinearRing[1];
holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes);
assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson);
}
public void testParse_geometryCollection() throws IOException {
String geometryCollectionGeoJson = XContentFactory.jsonBuilder().startObject()
.field("type", "GeometryCollection")
.startArray("geometries")
.startObject()
.field("type", "LineString")
.startArray("coordinates")
.startArray().value(100.0).value(0.0).endArray()
.startArray().value(101.0).value(1.0).endArray()
.endArray()
.endObject()
.startObject()
.field("type", "Point")
.startArray("coordinates").value(102.0).value(2.0).endArray()
.endObject()
.endArray()
.endObject()
.string();
Shape[] expected = new Shape[2];
LineString expectedLineString = GEOMETRY_FACTORY.createLineString(new Coordinate[]{
new Coordinate(100, 0),
new Coordinate(101, 1),
});
expected[0] = jtsGeom(expectedLineString);
Point expectedPoint = GEOMETRY_FACTORY.createPoint(new Coordinate(102.0, 2.0));
expected[1] = new JtsPoint(expectedPoint, SPATIAL_CONTEXT);
//equals returns true only if geometries are in the same order
assertGeometryEquals(shapeCollection(expected), geometryCollectionGeoJson);
}
public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException {
String pointGeoJson = XContentFactory.jsonBuilder().startObject()
.startObject("crs")
.field("type", "name")
.startObject("properties")
.field("name", "urn:ogc:def:crs:OGC:1.3:CRS84")
.endObject()
.endObject()
.field("bbox", "foobar")
.field("type", "point")
.field("bubu", "foobar")
.startArray("coordinates").value(100.0).value(0.0).endArray()
.startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject()
.startObject("lala").field("type", "NotAPoint").endObject()
.endObject().string();
Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson);
}
public void testParse_orientationOption() throws IOException {
// test 1: valid ccw (right handed system) poly not crossing dateline (with 'right' field)
String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "right")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
XContentParser parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
Shape shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field)
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "ccw")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field)
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "counterclockwise")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-172.0).value(8.0).endArray()
.startArray().value(174.0).value(10.0).endArray()
.startArray().value(-172.0).value(-8.0).endArray()
.startArray().value(-172.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertPolygon(shape);
// test 4: valid cw (left handed system) poly crossing dateline (with 'left' field)
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "left")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field)
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "cw")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
// test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field)
polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
.field("orientation", "clockwise")
.startArray("coordinates")
.startArray()
.startArray().value(176.0).value(15.0).endArray()
.startArray().value(-177.0).value(10.0).endArray()
.startArray().value(-177.0).value(-10.0).endArray()
.startArray().value(176.0).value(-15.0).endArray()
.startArray().value(172.0).value(0.0).endArray()
.startArray().value(176.0).value(15.0).endArray()
.endArray()
.startArray()
.startArray().value(-178.0).value(8.0).endArray()
.startArray().value(178.0).value(8.0).endArray()
.startArray().value(180.0).value(-8.0).endArray()
.startArray().value(-178.0).value(8.0).endArray()
.endArray()
.endArray()
.endObject().string();
parser = JsonXContent.jsonXContent.createParser(polygonGeoJson);
parser.nextToken();
shape = ShapeBuilder.parse(parser).build();
ElasticsearchGeoAssertions.assertMultiPolygon(shape);
}
private void assertGeometryEquals(Shape expected, String geoJson) throws IOException {
XContentParser parser = JsonXContent.jsonXContent.createParser(geoJson);
parser.nextToken();
ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build());
}
private ShapeCollection<Shape> shapeCollection(Shape... shapes) {
return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT);
}
private ShapeCollection<Shape> shapeCollection(Geometry... geoms) {
List<Shape> shapes = new ArrayList<>(geoms.length);
for (Geometry geom : geoms) {
shapes.add(jtsGeom(geom));
}
return new ShapeCollection<>(shapes, SPATIAL_CONTEXT);
}
private JtsGeometry jtsGeom(Geometry geom) {
return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false);
}
}
| Asimov4/elasticsearch | src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java | Java | apache-2.0 | 47,639 |
class Clfft < Formula
desc "FFT functions written in OpenCL"
homepage "https://github.com/clMathLibraries/clFFT"
url "https://github.com/clMathLibraries/clFFT/archive/v2.12.2.tar.gz"
sha256 "e7348c146ad48c6a3e6997b7702202ad3ee3b5df99edf7ef00bbacc21e897b12"
bottle do
cellar :any
sha256 "1e3aca16a694b761c0267c0dfdd9933d43cddd3ed0ea9d20fd4016222a7748f9" => :high_sierra
sha256 "009c0a8a81d783393abc8ca6307631c50e50ba72dc09e3f2cda5f2e2d8aa617c" => :sierra
sha256 "369c0df6b06b7ea116120e177a44a54760cc4d7132a1fb59a83ef52a99a6b5f4" => :el_capitan
sha256 "3c91564548f9b7844de09de3d54b77b43e7855c17def6d3efac5866e357635f0" => :yosemite
end
depends_on "boost" => :build
depends_on "cmake" => :build
def install
mkdir "build" do
system "cmake", "../src", "-DBUILD_EXAMPLES:BOOL=OFF", "-DBUILD_TEST:BOOL=OFF", *std_cmake_args
system "make", "install"
end
pkgshare.install "src/examples"
end
test do
system ENV.cxx, pkgshare/"examples/fft1d.c", "-I#{include}", "-L#{lib}",
"-lclFFT", "-framework", "OpenCL", "-o", "fft1d"
assert_match "one dimensional array of size N = 16", shell_output("./fft1d")
end
end
| robohack/homebrew-core | Formula/clfft.rb | Ruby | bsd-2-clause | 1,197 |
/**
* @license
* Copyright The Closure Library Authors.
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Contains utility methods to extract text content from HTML.
* @supported IE 10+, Chrome 26+, Firefox 22+, Safari 7.1+, Opera 15+
*/
goog.provide('goog.html.textExtractor');
goog.require('goog.dom.TagName');
goog.require('goog.html.sanitizer.HtmlSanitizer');
goog.require('goog.object');
goog.require('goog.userAgent');
/**
* Safely extracts text from an untrusted HTML string using the HtmlSanitizer.
* Compared to goog.html.utils.stripHtmlTags, it tries to be smarter about
* printing newlines between blocks and leave out textual content that would not
* be displayed to the user (such as SCRIPT and STYLE tags).
* @param {string} html The untrusted HTML string.
* @return {string}
*/
// TODO(pelizzi): consider an optional bool parameter to also extract the text
// content of alt attributes and such.
goog.html.textExtractor.extractTextContent = function(html) {
'use strict';
if (!goog.html.textExtractor.isSupported()) {
return '';
}
// Disable all attributes except style to protect against DOM clobbering.
var sanitizer = new goog.html.sanitizer.HtmlSanitizer.Builder()
.onlyAllowAttributes(['style'])
.allowCssStyles()
.build();
// The default policy of the sanitizer strips the content of tags such as
// SCRIPT and STYLE, whose non-textual content would otherwise end up in the
// extracted text.
var sanitizedNodes = sanitizer.sanitizeToDomNode(html);
// textContent and innerText do not handle spacing between block elements
// properly. We need to reimplement a similar algorithm ourselves and account
// for spacing between block elements.
return goog.html.textExtractor.extractTextContentFromNode_(sanitizedNodes)
.trim();
};
/**
* Recursively extract text from the supplied DOM node and its descendants.
* @param {!Node} node
* @return {string}
* @private
*/
goog.html.textExtractor.extractTextContentFromNode_ = function(node) {
'use strict';
switch (node.nodeType) {
case Node.ELEMENT_NODE:
var element = /** @type {!Element} */ (node);
if (element.tagName == goog.dom.TagName.BR) {
return '\n';
}
var result = Array.prototype.map
.call(
node.childNodes,
goog.html.textExtractor.extractTextContentFromNode_)
.join('');
if (goog.html.textExtractor.isBlockElement_(element)) {
result = '\n' + result + '\n';
}
return result;
case Node.TEXT_NODE:
return node.nodeValue.replace(/\s+/g, ' ').trim();
default:
return '';
}
};
/**
* A set of block elements.
* @private @const {!Object<!goog.dom.TagName, boolean>}
*/
goog.html.textExtractor.BLOCK_ELEMENTS_ = goog.object.createSet(
goog.dom.TagName.ADDRESS, goog.dom.TagName.BLOCKQUOTE,
goog.dom.TagName.CENTER, goog.dom.TagName.DIV, goog.dom.TagName.DL,
goog.dom.TagName.FIELDSET, goog.dom.TagName.FORM, goog.dom.TagName.H1,
goog.dom.TagName.H2, goog.dom.TagName.H3, goog.dom.TagName.H4,
goog.dom.TagName.H5, goog.dom.TagName.H6, goog.dom.TagName.HR,
goog.dom.TagName.OL, goog.dom.TagName.P, goog.dom.TagName.PRE,
goog.dom.TagName.TABLE, goog.dom.TagName.UL);
/**
* Returns true whether this is a block element, i.e. the browser would visually
* separate the text content from the text content of the previous node.
* @param {!Element} element
* @return {boolean}
* @private
*/
goog.html.textExtractor.isBlockElement_ = function(element) {
'use strict';
return element.style.display == 'block' ||
goog.html.textExtractor.BLOCK_ELEMENTS_.hasOwnProperty(element.tagName);
};
/**
* Whether the browser supports the text extractor. The extractor depends on the
* HTML Sanitizer, which only supports IE starting from version 10.
* Visible for testing.
* @return {boolean}
* @package
*/
goog.html.textExtractor.isSupported = function() {
'use strict';
return !goog.userAgent.IE || goog.userAgent.isVersionOrHigher(10);
};
| scheib/chromium | third_party/google-closure-library/closure/goog/html/textextractor.js | JavaScript | bsd-3-clause | 4,176 |
void AddTaskGammaDeltaPID(Int_t gFilterBit = 768,Double_t fPtMin=0.2,Double_t fPtMax=2.0,Double_t fEtaMin=-0.8, Double_t fEtaMax=0.8,Double_t fChi2max=4.0,Int_t gNclustTPC=70, Int_t fparticle=0,Double_t nSigTPC = 3.0, Double_t nSigTOF = 3.0, Bool_t bSkipPileUp=kFALSE, TString sCentEstimator="V0M", Float_t fVzMin = -10.0, Float_t fVzMax = 10.0, TString sTrigger="kINT7", Int_t vnHarmonic=2, TString sDetForEP="TPC", TString sMCfilePath="alien:///alice/cern.ch/user/m/mhaque/nuanue18/HijingMC_LHC18q_FB768_DeftCut.root", TString sNUAFilePath = "alien:///alice/cern.ch/user/m/mhaque/nuanue18/wgtCharge_NUAFB768NoPUcutRun296244.root", TString sDetWgtsFile = "alien:///alice/cern.ch/user/m/mhaque/nuanue18/wgtCharge_NUAFB768NoPUcutRun296244.root", Bool_t bSkipAnalysis=kFALSE, const char *suffix = "")
{
printf("===================================================================================\n");
printf(" Initialising Task: AddTaskGammaDeltaPID \n");
printf("===================================================================================\n");
AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
TString outfileName = AliAnalysisManager::GetCommonFileName();
AliAnalysisDataContainer *cinput = mgr->GetCommonInputContainer(); // AOD event
TString list1OutName = outfileName; // common outfile filename
list1OutName += ":Results"; // This directory contains result histograms
TString TaskName;
TaskName.Form("gTaskGammaDeltaPID%d_%d_%s", gFilterBit, gNclustTPC, suffix);
AliAnalysisTaskGammaDeltaPID *taskGammaPID = new AliAnalysisTaskGammaDeltaPID(TaskName);
///-------> Analysis Object Created, now pass the arguments
if(sTrigger=="kMB" || sTrigger=="kmb" || sTrigger=="MB"){ // if We want MB Trigger
taskGammaPID->SelectCollisionCandidates(AliVEvent::kMB);
printf("\n =========> AddTaskCMW::Info() Trigger = kMB \n");
}
else if(sTrigger=="kSemiCentral" || sTrigger=="SemiCentral" || sTrigger=="semicentral"){
taskGammaPID->SelectCollisionCandidates(AliVEvent::kSemiCentral);
printf("\n =========> AddTaskCMW::Info() Trigger = kSemiCentral \n");
}
else if(sTrigger=="kCentral" || sTrigger=="Central" || sTrigger=="central"){
taskGammaPID->SelectCollisionCandidates(AliVEvent::kCentral);
printf("\n =========> AddTaskCMW::Info() Trigger = kCentral \n");
}
else if(sTrigger=="kAny" || sTrigger=="kAll"){
taskGammaPID->SelectCollisionCandidates(AliVEvent::kINT7 | AliVEvent::kSemiCentral | AliVEvent::kCentral);
}
else{//if trigger==kINT7 or no trigger provided:
taskGammaPID->SelectCollisionCandidates(AliVEvent::kINT7); // default is kINT7
printf("\n =========> AddTaskCMW::Info() Trigger = kINT7 \n");
}
///Set Event cuts:
taskGammaPID->SetVzRangeMin(fVzMin);
taskGammaPID->SetVzRangeMax(fVzMax);
taskGammaPID->SetFlagSkipPileUpCuts(bSkipPileUp);
taskGammaPID->SetFlagSkipAnalysis(bSkipAnalysis);
cout<<"=========> AddTaskCMW::Info() setting Event Plane Det: "<<sDetForEP<<endl;
taskGammaPID->SetDetectorforEventPlane(sDetForEP);
if(sCentEstimator=="V0" || sCentEstimator=="V0M"){
taskGammaPID->SetCentralityEstimator("V0M");
}
else{
taskGammaPID->SetCentralityEstimator(sCentEstimator); // use the Estimator provided in AddTask.
}
//Set Track cuts:
taskGammaPID->SetPtRangeMin(fPtMin);
taskGammaPID->SetPtRangeMax(fPtMax);
taskGammaPID->SetEtaRangeMin(fEtaMin);
taskGammaPID->SetEtaRangeMax(fEtaMax);
taskGammaPID->SetTrackCutChi2Min(0.1);
taskGammaPID->SetTrackCutdEdxMin(10.0);
taskGammaPID->SetFilterBit(gFilterBit);
taskGammaPID->SetNSigmaCutTPC(nSigTPC); /// For PID only.Does not apply to Inclusive Charged Tracks
taskGammaPID->SetNSigmaCutTOF(nSigTOF);
taskGammaPID->SetParticlePID(fparticle);
taskGammaPID->SetTrackCutChi2Max(fChi2max);
taskGammaPID->SetFlagUseKinkTracks(kFALSE);
taskGammaPID->SetCumulantHarmonic(vnHarmonic);
taskGammaPID->SetTrackCutNclusterMin(gNclustTPC);
Bool_t bFillLambda=kFALSE;
taskGammaPID->SetFlagAnalyseLambda(bFillLambda);
/// -----> Separate AddTask Added For Lambda-X correlation
/// AddTaskGammaDeltaPID.C
//========================= Setup Correction Files ======================>
TFile *fMCFile = TFile::Open(sMCfilePath,"READ");
TList *fListMC=NULL;
if(fMCFile) {
fListMC = dynamic_cast <TList*> (fMCFile->FindObjectAny("fMcEffiHij"));
if(fListMC) {
taskGammaPID->SetListForTrkCorr(fListMC);
}
else{
printf("\n\n *** AddTask::WARNING \n => MC file Exist, But TList Not Found!!! \n AddTask::Info() ===> NO MC Correction!! \n\n");
}
}
else{
printf("\n\n *** AddTask::WARNING \n => no MC file!!! \n AddTask::Info() ===> NO MC Correction!! \n\n");
}
//--------------------------------------------------------------------------
std::cout<<" NUA file Path "<<sNUAFilePath.Data()<<std::endl;
TFile* fNUAFile = TFile::Open(sNUAFilePath,"READ");
TList* fListNUA=NULL;
//if(fNUAFile->IsOpen()) {
if(fNUAFile){
fListNUA = dynamic_cast <TList*> (fNUAFile->FindObjectAny("fNUA_ChPosChNeg"));
std::cout<<" \n ==============> TList found for NUA, here is all the histograms : "<<std::endl;
//fListNUA->ls();
if(fListNUA) {
taskGammaPID->SetListForNUACorr(fListNUA);
}
else{
printf("\n\n *** AddTask::WARNING => NUA file Exist,But TList Not Found!!\n AddTask::Info() ===> NO NUA Correction!! \n\n");
}
}
else{
printf("\n\n *** AddTask::WARNING => NUA file not Found or Wrong path Set in AddTask Macro!! \n\n");
}
//-----------------------------------------------------------------------------
TFile* fV0ZDCWgtsFile = TFile::Open(sDetWgtsFile,"READ");
TList* fListDetWgts=NULL;
if(fV0ZDCWgtsFile) {
fListDetWgts = dynamic_cast <TList*> (fV0ZDCWgtsFile->FindObjectAny("fWgtsV0ZDC"));
std::cout<<" \n ==============> TList found for V0/ZDC wgts.. GOOD! ";
// fListDetWgts->ls();
if(fListDetWgts) {
taskGammaPID->SetListForV0MCorr(fListDetWgts);
}
else{
printf("\n\n *** AddTask::WARNING => V0/ZDC Weights file Exist, But TList Not Found!!");
printf("\n May be wrong TList name? No Correction for V0/ZDC !! \n\n");
}
}
else{
printf("\n\n *** AddTask::WARNING => NO File Found for V0/ZDC Wgts!!\n AddTask::Info() ===> No V0/ZDC Correction!! \n\n");
}
//=================================================================================
///---> Now Pass data and containers to Analysis Object ----
mgr->AddTask(taskGammaPID); // connect the task to the analysis manager
mgr->ConnectInput(taskGammaPID, 0, cinput); // give AOD event to my Task..!!
AliAnalysisDataContainer *cOutPut1;
TString sMyOutName;
sMyOutName.Form("SimpleTask_%s",suffix);
cOutPut1 = (AliAnalysisDataContainer *) mgr->CreateContainer(sMyOutName,TList::Class(),AliAnalysisManager::kOutputContainer,list1OutName.Data());
mgr->ConnectOutput(taskGammaPID, 1, cOutPut1);
printf("\n\n ================> AddTask was Configured properly... <==================\n\n");
//return taskGammaPID;
}//Task Ends
| nschmidtALICE/AliPhysics | PWGCF/FLOW/macros/AddTaskGammaDeltaPID.C | C++ | bsd-3-clause | 7,275 |
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreedto in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mysql
import (
"strings"
"testing"
)
func TestParseGTID(t *testing.T) {
flavor := "fake flavor"
gtidParsers[flavor] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "12345"
want := fakeGTID{value: "12345"}
got, err := ParseGTID(flavor, input)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if got != want {
t.Errorf("ParseGTID(%#v, %#v) = %#v, want %#v", flavor, input, got, want)
}
}
func TestMustParseGTID(t *testing.T) {
flavor := "fake flavor"
gtidParsers[flavor] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "12345"
want := fakeGTID{value: "12345"}
got := MustParseGTID(flavor, input)
if got != want {
t.Errorf("MustParseGTID(%#v, %#v) = %#v, want %#v", flavor, input, got, want)
}
}
func TestMustParseGTIDError(t *testing.T) {
defer func() {
want := `parse error: unknown GTID flavor "unknown flavor !@$!@"`
err := recover()
if err == nil {
t.Errorf("wrong error, got %#v, want %#v", err, want)
}
got, ok := err.(error)
if !ok || !strings.HasPrefix(got.Error(), want) {
t.Errorf("wrong error, got %#v, want %#v", got, want)
}
}()
MustParseGTID("unknown flavor !@$!@", "yowzah")
}
func TestParseUnknownFlavor(t *testing.T) {
want := `parse error: unknown GTID flavor "foobar8675309"`
_, err := ParseGTID("foobar8675309", "foo")
if !strings.HasPrefix(err.Error(), want) {
t.Errorf("wrong error, got '%v', want '%v'", err, want)
}
}
func TestEncodeGTID(t *testing.T) {
input := fakeGTID{
flavor: "myflav",
value: "1:2:3-4-5-6",
}
want := "myflav/1:2:3-4-5-6"
if got := EncodeGTID(input); got != want {
t.Errorf("EncodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestDecodeGTID(t *testing.T) {
gtidParsers["flavorflav"] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "flavorflav/123-456:789"
want := fakeGTID{value: "123-456:789"}
got, err := DecodeGTID(input)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if got != want {
t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestMustDecodeGTID(t *testing.T) {
gtidParsers["flavorflav"] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "flavorflav/123-456:789"
want := fakeGTID{value: "123-456:789"}
got := MustDecodeGTID(input)
if got != want {
t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestMustDecodeGTIDError(t *testing.T) {
defer func() {
want := `parse error: unknown GTID flavor "unknown flavor !@$!@"`
err := recover()
if err == nil {
t.Errorf("wrong error, got %#v, want %#v", err, want)
}
got, ok := err.(error)
if !ok || !strings.HasPrefix(got.Error(), want) {
t.Errorf("wrong error, got %#v, want %#v", got, want)
}
}()
MustDecodeGTID("unknown flavor !@$!@/yowzah")
}
func TestEncodeNilGTID(t *testing.T) {
input := GTID(nil)
want := ""
if got := EncodeGTID(input); got != want {
t.Errorf("EncodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestDecodeNilGTID(t *testing.T) {
input := ""
want := GTID(nil)
got, err := DecodeGTID(input)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if got != want {
t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestDecodeNoFlavor(t *testing.T) {
gtidParsers[""] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "12345"
want := fakeGTID{value: "12345"}
got, err := DecodeGTID(input)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if got != want {
t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
func TestDecodeGTIDWithSeparator(t *testing.T) {
gtidParsers["moobar"] = func(s string) (GTID, error) {
return fakeGTID{value: s}, nil
}
input := "moobar/GTID containing / a slash"
want := fakeGTID{value: "GTID containing / a slash"}
got, err := DecodeGTID(input)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
if got != want {
t.Errorf("DecodeGTID(%#v) = %#v, want %#v", input, got, want)
}
}
type fakeGTID struct {
flavor, value string
}
func (f fakeGTID) String() string { return f.value }
func (f fakeGTID) Flavor() string { return f.flavor }
func (fakeGTID) SourceServer() interface{} { return int(1) }
func (fakeGTID) SequenceNumber() interface{} { return int(1) }
func (fakeGTID) SequenceDomain() interface{} { return int(1) }
func (f fakeGTID) GTIDSet() GTIDSet { return nil }
func (fakeGTID) ContainsGTID(GTID) bool { return false }
func (fakeGTID) Contains(GTIDSet) bool { return false }
func (f fakeGTID) Equal(other GTIDSet) bool {
otherFake, ok := other.(fakeGTID)
if !ok {
return false
}
return f == otherFake
}
func (fakeGTID) AddGTID(GTID) GTIDSet { return nil }
| NazarethCollege/heweb2017-devops-presentation | sites/tweetheat/src/backend/vendor/src/github.com/youtube/vitess/go/mysql/gtid_test.go | GO | mit | 5,366 |
define({
root: ({
_widgetLabel: "Demo",
label1: "Ich bin ein Demo-Widget.",
label2: "Dies ist konfigurierbar."
}),
"ar": 0,
"cs": 0,
"da": 0,
"de": 0,
"el": 0,
"es": 0,
"et": 0,
"fi": 0,
"fr": 0,
"he": 0,
"it": 0,
"ja": 0,
"ko": 0,
"lt": 0,
"lv": 0,
"nb": 0,
"nl": 0,
"pl": 0,
"pt-br": 0,
"pt-pt": 0,
"ro": 0,
"ru": 0,
"sv": 0,
"th": 0,
"tr": 0,
"vi": 0,
"zh-cn": 1
});
| cmccullough2/cmv-wab-widgets | wab/2.3/widgets/samplewidgets/Demo/nls/de/strings.js | JavaScript | mit | 443 |
//
// Device.cs
//
// Author:
// Alex Launi <alex.launi@gmail.com>
//
// Copyright (c) 2010 Alex Launi
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#if ENABLE_GIO_HARDWARE
using System;
using System.Collections.Generic;
using System.Linq;
using GLib;
using Banshee.Hardware;
namespace Banshee.Hardware.Gio
{
/// <summary>
/// A Device is a wrapper around the two metadata source, udev and gio. Banshee needs information
/// from both sources, so this Device class is meant to provide a level of abstraction.
/// </summary>
abstract class RawDevice : IEquatable<RawDevice>, IComparable<RawDevice>, IRawDevice
{
const string UdevDevicePath = "DEVNAME";
RawDevice IRawDevice.Device {
get { return this; }
}
public string DeviceNode {
get { return UdevMetadata.GetPropertyString (UdevDevicePath); }
}
internal GioMetadataSource GioMetadata {
get; private set;
}
internal UdevMetadataSource UdevMetadata {
get; private set;
}
public abstract string Identifier {
get;
}
public abstract string IdMediaPlayer {
get;
}
public abstract bool IsRemovable {
get;
}
public IDeviceMediaCapabilities MediaCapabilities {
get; private set;
}
public abstract string Name {
get;
}
public Manager Manager {
get; private set;
}
public abstract string Model {
get;
}
public abstract string Product {
get;
}
public abstract string Serial {
get;
}
public abstract string Subsystem {
get;
}
public abstract string Uuid {
get;
}
public abstract string Vendor {
get;
}
protected RawDevice (Manager manager, GioMetadataSource gioMetadata, UdevMetadataSource udevMetadata)
{
Manager = manager;
GioMetadata = gioMetadata;
UdevMetadata = udevMetadata;
if (!string.IsNullOrEmpty (IdMediaPlayer))
MediaCapabilities = new DeviceMediaCapabilities (IdMediaPlayer);
}
public bool Equals (RawDevice other)
{
return Identifier == other.Identifier;
}
public int CompareTo (RawDevice other)
{
return string.Compare (Identifier, other.Identifier);
}
public override int GetHashCode ()
{
return Identifier.GetHashCode ();
}
public abstract string GetPropertyString (string key);
public abstract double GetPropertyDouble (string key);
public abstract bool GetPropertyBoolean (string key);
public abstract int GetPropertyInteger (string key);
public abstract ulong GetPropertyUInt64 (string key);
public abstract string[] GetPropertyStringList (string key);
public abstract bool PropertyExists (string key);
}
}
#endif
| mono-soc-2011/banshee | src/Backends/Banshee.Gio/Banshee.Hardware.Gio/LowLevel/RawDevice.cs | C# | mit | 4,171 |
#if NETFX_CORE && !UNITY_EDITOR
//using Thread = MarkerMetro.Unity.WinLegacy.Threading.Thread;
//using ParameterizedThreadStart = MarkerMetro.Unity.WinLegacy.Threading.ParameterizedThreadStart;
#endif
using UnityEngine;
using System.Collections;
using System.Threading;
namespace Pathfinding.Threading {
} | MartinHartmannJensen/Unity2Dshooter | topdown shooter/Assets/AstarPathfindingProject/Utilities/AstarParallel.cs | C# | mit | 307 |
package org.knowm.xchange.lakebtc.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
/** User: cristian.lucaci Date: 10/3/2014 Time: 5:31 PM */
public class LakeBTCResponse<V> {
private final String id;
private final V result;
/**
* Constructor
*
* @param id
* @param result
*/
public LakeBTCResponse(@JsonProperty("id") String id, @JsonProperty("result") V result) {
this.id = id;
this.result = result;
}
public V getResult() {
return result;
}
public String getId() {
return id;
}
@Override
public String toString() {
return String.format("LakeBTCResponse{id=%s, result=%s}", id, result);
}
}
| stachon/XChange | xchange-lakebtc/src/main/java/org/knowm/xchange/lakebtc/dto/LakeBTCResponse.java | Java | mit | 669 |
package org.knowm.xchange.bitcoinaverage.dto.marketdata;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/** Data object representing List of Tickers from BitcoinAverage */
public final class BitcoinAverageTickers {
private Map<String, BitcoinAverageTicker> tickers = new HashMap<>();
private Date timestamp;
// Could alternatively add setters, but since these are mandatory
public BitcoinAverageTickers(@JsonProperty("timestamp") String timestamp) {
try {
// Parse the timestamp into a Date object
this.timestamp =
new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss Z", Locale.getDefault()).parse(timestamp);
} catch (Exception e) {
this.timestamp = null;
}
}
@JsonAnySetter
public void setTickers(String name, BitcoinAverageTicker ticker) {
this.tickers.put(name, ticker);
}
public Map<String, BitcoinAverageTicker> getTickers() {
return tickers;
}
public Date getTimestamp() {
return timestamp;
}
@Override
public String toString() {
return "BitcoinAverageTicker [tickers=" + tickers + ", timestamp=" + timestamp + "]";
}
}
| stachon/XChange | xchange-bitcoinaverage/src/main/java/org/knowm/xchange/bitcoinaverage/dto/marketdata/BitcoinAverageTickers.java | Java | mit | 1,315 |
import { SQLValue } from "./prepSQLParams";
export interface SQLInsertParams {
insertColumns: string;
insertValues: string;
insertFields: Record<string, SQLValue>;
}
declare function prepSQLInsertParams<T extends Record<string, SQLValue>>(
params: T,
columns: Array<{ key: keyof T }>,
): SQLInsertParams;
export default prepSQLInsertParams;
| markogresak/DefinitelyTyped | types/lesgo/utils/prepSQLInsertParams.d.ts | TypeScript | mit | 364 |
package parser
import (
"regexp"
"strings"
)
var selfClosingTags = [...]string{
"meta",
"img",
"link",
"input",
"source",
"area",
"base",
"col",
"br",
"hr",
}
var doctypes = map[string]string{
"5": `<!DOCTYPE html>`,
"default": `<!DOCTYPE html>`,
"xml": `<?xml version="1.0" encoding="utf-8" ?>`,
"transitional": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">`,
"strict": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">`,
"frameset": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">`,
"1.1": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">`,
"basic": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">`,
"mobile": `<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">`,
}
type Node interface {
Pos() SourcePosition
}
type SourcePosition struct {
LineNum int
ColNum int
TokenLength int
Filename string
}
func (s *SourcePosition) Pos() SourcePosition {
return *s
}
type Doctype struct {
SourcePosition
Value string
}
func newDoctype(value string) *Doctype {
dt := new(Doctype)
dt.Value = value
return dt
}
func (d *Doctype) String() string {
if defined := doctypes[d.Value]; len(defined) != 0 {
return defined
}
return `<!DOCTYPE ` + d.Value + `>`
}
type Comment struct {
SourcePosition
Value string
Block *Block
Silent bool
}
func newComment(value string) *Comment {
dt := new(Comment)
dt.Value = value
dt.Block = nil
dt.Silent = false
return dt
}
type Text struct {
SourcePosition
Value string
Raw bool
}
func newText(value string, raw bool) *Text {
dt := new(Text)
dt.Value = value
dt.Raw = raw
return dt
}
type Block struct {
SourcePosition
Children []Node
}
func newBlock() *Block {
block := new(Block)
block.Children = make([]Node, 0)
return block
}
func (b *Block) push(node Node) {
b.Children = append(b.Children, node)
}
func (b *Block) pushFront(node Node) {
b.Children = append([]Node{node}, b.Children...)
}
func (b *Block) CanInline() bool {
if len(b.Children) == 0 {
return true
}
allText := true
for _, child := range b.Children {
if txt, ok := child.(*Text); !ok || txt.Raw {
allText = false
break
}
}
return allText
}
const (
NamedBlockDefault = iota
NamedBlockAppend
NamedBlockPrepend
)
type NamedBlock struct {
Block
Name string
Modifier int
}
func newNamedBlock(name string) *NamedBlock {
bb := new(NamedBlock)
bb.Name = name
bb.Block.Children = make([]Node, 0)
bb.Modifier = NamedBlockDefault
return bb
}
type Attribute struct {
SourcePosition
Name string
Value string
IsRaw bool
Condition string
}
type Tag struct {
SourcePosition
Block *Block
Name string
IsInterpolated bool
Attributes []Attribute
}
func newTag(name string) *Tag {
tag := new(Tag)
tag.Block = nil
tag.Name = name
tag.Attributes = make([]Attribute, 0)
tag.IsInterpolated = false
return tag
}
func (t *Tag) IsSelfClosing() bool {
for _, tag := range selfClosingTags {
if tag == t.Name {
return true
}
}
return false
}
func (t *Tag) IsRawText() bool {
return t.Name == "style" || t.Name == "script"
}
type Condition struct {
SourcePosition
Positive *Block
Negative *Block
Expression string
}
func newCondition(exp string) *Condition {
cond := new(Condition)
cond.Expression = exp
return cond
}
type Each struct {
SourcePosition
X string
Y string
Expression string
Block *Block
}
func newEach(exp string) *Each {
each := new(Each)
each.Expression = exp
return each
}
type Assignment struct {
SourcePosition
X string
Expression string
}
func newAssignment(x, expression string) *Assignment {
assgn := new(Assignment)
assgn.X = x
assgn.Expression = expression
return assgn
}
type Mixin struct {
SourcePosition
Block *Block
Name string
Args []string
}
func newMixin(name, args string) *Mixin {
mixin := new(Mixin)
mixin.Name = name
delExp := regexp.MustCompile(`,\s`)
mixin.Args = delExp.Split(args, -1)
for i := 0; i < len(mixin.Args); i++ {
mixin.Args[i] = strings.TrimSpace(mixin.Args[i])
if mixin.Args[i] == "" {
mixin.Args = append(mixin.Args[:i], mixin.Args[i+1:]...)
i--
}
}
return mixin
}
type MixinCall struct {
SourcePosition
Name string
Args []string
}
func newMixinCall(name, args string) *MixinCall {
mixinCall := new(MixinCall)
mixinCall.Name = name
if args != "" {
const t = "%s"
quoteExp := regexp.MustCompile(`"(.*?)"`)
delExp := regexp.MustCompile(`,\s`)
quotes := quoteExp.FindAllString(args, -1)
replaced := quoteExp.ReplaceAllString(args, t)
mixinCall.Args = delExp.Split(replaced, -1)
qi := 0
for i, arg := range mixinCall.Args {
if arg == t {
mixinCall.Args[i] = quotes[qi]
qi++
}
}
}
return mixinCall
}
| grvcoelho/webhulk | vendor/gopkg.in/kataras/iris.v8/vendor/github.com/eknkc/amber/parser/nodes.go | GO | mit | 5,199 |
// { dg-do assemble }
// Copyright (C) 1999, 2000, 2002, 2003 Free Software Foundation, Inc.
// Contributed by Nathan Sidwell 22 Apr 1999 <nathan@acm.org>
// derived from a bug report by <rch@larissa.sd.bi.ruhr-uni-bochum.de>
// http://gcc.gnu.org/ml/gcc-bugs/1999-04n/msg00631.html
// the code is wrong, but we fell over badly
struct A {
int A::fn(); // { dg-error "7:extra qualification" }
int A::m; // { dg-error "7:extra qualification" }
struct e;
struct A::e {int i;}; // { dg-error "10:extra qualification" "qual" }
struct A::expand { // { dg-error "qualified name" }
int m;
};
struct Z;
expand me; // { dg-error "'expand' does not name a type" }
void foo(struct A::e);
void foo(struct A::z); // { dg-error "incomplete" }
};
struct Q;
struct B {
struct A::fink { // { dg-error "does not name a class before" }
int m;
};
struct A::Z { // { dg-error "does not enclose" } A::Z not a member of B
int m;
};
int m;
int n;
struct ::Q { // { dg-error "global qual" } ::Q not a member of B
int m;
};
int A::fn() { // { dg-error "7:cannot define member" } A::fn not a member of B
return 0;
}
void fn(struct ::Q &);
void foo(struct A::y); // { dg-error "does not name a type" } no such member
};
struct ::C { // { dg-error "invalid before" } extra qualification
int i;
};
namespace N {
int fn();
struct F;
}
namespace NMS
{
void NMS::fn(); // { dg-error "should have been" }
int NMS::i; // { dg-error "should have been" }
struct NMS::D { // { dg-error "does not name a class" }
int i;
};
struct N::E { // { dg-error "does not name a class" } no such type
int i;
};
struct ::F { // { dg-error "global qual" } no such type
int i;
};
int N::fn() { // { dg-error "namespace" } N::fn not a member of NMS
return 0;
}
struct N::F { // { dg-error "namespace" } N::F not a member of NMS
int i;
};
}
NMS::D thing; // { dg-error "'D' in namespace 'NMS' does not name a type" }
void NMS::fn()
{
i = 3;
}
// From PR c++/15766 - bad parse error recovery (2 bugs)
void confusion1(const UndefinedType& a) // { dg-error "does not name a type" }
{
}
| Gurgel100/gcc | gcc/testsuite/g++.old-deja/g++.other/decl5.C | C++ | gpl-2.0 | 2,264 |
/**
*
*/
package org.eevolution.model;
import java.sql.ResultSet;
import java.util.Properties;
import org.compiere.util.CCache;
/**
* HR Period
* @author Teo Sarca, www.arhipac.ro
*/
public class MHRPeriod extends X_HR_Period
{
/**
*
*/
private static final long serialVersionUID = -7787966459848200539L;
private static CCache<Integer, MHRPeriod> s_cache = new CCache<Integer, MHRPeriod>(Table_Name, 20);
public static MHRPeriod get(Properties ctx, int HR_Period_ID)
{
if (HR_Period_ID <= 0)
{
return null;
}
//
MHRPeriod period = s_cache.get(HR_Period_ID);
if (period != null)
{
return period;
}
// Try Load
period = new MHRPeriod(ctx, HR_Period_ID, null);
if (period.get_ID() == HR_Period_ID)
{
s_cache.put(HR_Period_ID, period);
}
else
{
period = null;
}
return period;
}
public MHRPeriod(Properties ctx, int HR_Period_ID, String trxName)
{
super(ctx, HR_Period_ID, trxName);
}
public MHRPeriod(Properties ctx, ResultSet rs, String trxName)
{
super(ctx, rs, trxName);
}
}
| erpcya/adempierePOS | org.eevolution.hr_and_payroll/src/main/java/base/org/eevolution/model/MHRPeriod.java | Java | gpl-2.0 | 1,057 |
<?php
namespace GuzzleHttp;
use GuzzleHttp\Event\CompleteEvent;
use GuzzleHttp\Event\ErrorEvent;
use GuzzleHttp\Event\RequestEvents;
use GuzzleHttp\Message\ResponseInterface;
use GuzzleHttp\UriTemplate;
/**
* Send a custom request
*
* @param string $method HTTP request method
* @param string $url URL of the request
* @param array $options Options to use with the request.
*
* @return ResponseInterface
*/
function request($method, $url, array $options = [])
{
static $client;
if (!$client) {
$client = new Client();
}
return $client->send($client->createRequest($method, $url, $options));
}
/**
* Send a GET request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function get($url, array $options = [])
{
return request('GET', $url, $options);
}
/**
* Send a HEAD request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function head($url, array $options = [])
{
return request('HEAD', $url, $options);
}
/**
* Send a DELETE request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function delete($url, array $options = [])
{
return request('DELETE', $url, $options);
}
/**
* Send a POST request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function post($url, array $options = [])
{
return request('POST', $url, $options);
}
/**
* Send a PUT request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function put($url, array $options = [])
{
return request('PUT', $url, $options);
}
/**
* Send a PATCH request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function patch($url, array $options = [])
{
return request('PATCH', $url, $options);
}
/**
* Send an OPTIONS request
*
* @param string $url URL of the request
* @param array $options Array of request options
*
* @return ResponseInterface
*/
function options($url, array $options = [])
{
return request('OPTIONS', $url, $options);
}
/**
* Convenience method for sending multiple requests in parallel and retrieving
* a hash map of requests to response objects or RequestException objects.
*
* Note: This method keeps every request and response in memory, and as such is
* NOT recommended when sending a large number or an indeterminable number of
* requests in parallel.
*
* @param ClientInterface $client Client used to send the requests
* @param array|\Iterator $requests Requests to send in parallel
* @param array $options Passes through the options available in
* {@see GuzzleHttp\ClientInterface::sendAll()}
* @return \SplObjectStorage Requests are the key and each value is a
* {@see GuzzleHttp\Message\ResponseInterface} if the request succeeded or
* a {@see GuzzleHttp\Exception\RequestException} if it failed.
* @throws \InvalidArgumentException if the event format is incorrect.
*/
function batch(ClientInterface $client, $requests, array $options = [])
{
$hash = new \SplObjectStorage();
foreach ($requests as $request) {
$hash->attach($request);
}
$handler = [
'priority' => RequestEvents::EARLY,
'once' => true,
'fn' => function ($e) use ($hash) { $hash[$e->getRequest()] = $e; }
];
// Merge the necessary complete and error events to the event listeners so
// that as each request succeeds or fails, it is added to the result hash.
foreach (['complete', 'error'] as $name) {
if (!isset($options[$name])) {
$options[$name] = $handler;
} elseif (is_callable($options[$name])) {
$options[$name] = [['fn' => $options[$name]], $handler];
} elseif (is_array($options[$name])) {
$options[$name][] = $handler;
} else {
throw new \InvalidArgumentException('Invalid event format');
}
}
// Send the requests in parallel and aggregate the results.
$client->sendAll($requests, $options);
// Update the received value for any of the intercepted requests.
foreach ($hash as $request) {
if ($hash[$request] instanceof CompleteEvent) {
$hash[$request] = $hash[$request]->getResponse();
} elseif ($hash[$request] instanceof ErrorEvent) {
$hash[$request] = $hash[$request]->getException();
}
}
return $hash;
}
/**
* Gets a value from an array using a path syntax to retrieve nested data.
*
* This method does not allow for keys that contain "/". You must traverse
* the array manually or using something more advanced like JMESPath to
* work with keys that contain "/".
*
* // Get the bar key of a set of nested arrays.
* // This is equivalent to $collection['foo']['baz']['bar'] but won't
* // throw warnings for missing keys.
* GuzzleHttp\get_path($data, 'foo/baz/bar');
*
* @param array $data Data to retrieve values from
* @param string $path Path to traverse and retrieve a value from
*
* @return mixed|null
*/
function get_path($data, $path)
{
$path = explode('/', $path);
while (null !== ($part = array_shift($path))) {
if (!is_array($data) || !isset($data[$part])) {
return null;
}
$data = $data[$part];
}
return $data;
}
/**
* Set a value in a nested array key. Keys will be created as needed to set the
* value.
*
* This function does not support keys that contain "/" or "[]" characters
* because these are special tokens used when traversing the data structure.
* A value may be prepended to an existing array by using "[]" as the final
* key of a path.
*
* GuzzleHttp\get_path($data, 'foo/baz'); // null
* GuzzleHttp\set_path($data, 'foo/baz/[]', 'a');
* GuzzleHttp\set_path($data, 'foo/baz/[]', 'b');
* GuzzleHttp\get_path($data, 'foo/baz');
* // Returns ['a', 'b']
*
* @param array $data Data to modify by reference
* @param string $path Path to set
* @param mixed $value Value to set at the key
* @throws \RuntimeException when trying to setPath using a nested path that
* travels through a scalar value.
*/
function set_path(&$data, $path, $value)
{
$current =& $data;
$queue = explode('/', $path);
while (null !== ($key = array_shift($queue))) {
if (!is_array($current)) {
throw new \RuntimeException("Trying to setPath {$path}, but "
. "{$key} is set and is not an array");
} elseif (!$queue) {
if ($key == '[]') {
$current[] = $value;
} else {
$current[$key] = $value;
}
} elseif (isset($current[$key])) {
$current =& $current[$key];
} else {
$current[$key] = [];
$current =& $current[$key];
}
}
}
/**
* Expands a URI template
*
* @param string $template URI template
* @param array $variables Template variables
*
* @return string
*/
function uri_template($template, array $variables)
{
if (function_exists('\\uri_template')) {
return \uri_template($template, $variables);
}
static $uriTemplate;
if (!$uriTemplate) {
$uriTemplate = new UriTemplate();
}
return $uriTemplate->expand($template, $variables);
}
/**
* @internal
*/
function deprecation_proxy($object, $name, $arguments, $map)
{
if (!isset($map[$name])) {
throw new \BadMethodCallException('Unknown method, ' . $name);
}
$message = sprintf('%s is deprecated and will be removed in a future '
. 'version. Update your code to use the equivalent %s method '
. 'instead to avoid breaking changes when this shim is removed.',
get_class($object) . '::' . $name . '()',
get_class($object) . '::' . $map[$name] . '()'
);
trigger_error($message, E_USER_DEPRECATED);
return call_user_func_array([$object, $map[$name]], $arguments);
}
| epoch365/rito-api-challenge | vendor/guzzlehttp/guzzle/src/functions.php | PHP | gpl-2.0 | 8,331 |
#
# Copyright 2013 Tim O'Shea
#
# This file is part of PyBOMBS
#
# PyBOMBS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# PyBOMBS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PyBOMBS; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from globals import *;
import recipe_loader;
from recipe import recipe
from pybombs_ops import *
| scalable-networks/ext | pybombs/mod_pybombs/__init__.py | Python | gpl-2.0 | 852 |
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4: */
// +----------------------------------------------------------------------+
// | PHP version 4.0 |
// +----------------------------------------------------------------------+
// | Copyright (c) 1997, 1998, 1999, 2000, 2001 The PHP Group |
// +----------------------------------------------------------------------+
// | This source file is subject to version 2.0 of the PHP license, |
// | that is bundled with this package in the file LICENSE, and is |
// | available at through the world-wide-web at |
// | http://www.php.net/license/2_02.txt. |
// | If you did not receive a copy of the PHP license and are unable to |
// | obtain it through the world-wide-web, please send a note to |
// | license@php.net so we can mail you a copy immediately. |
// +----------------------------------------------------------------------+
// | Authors: Adam Daniel <adaniel1@eesus.jnj.com> |
// | Bertrand Mansion <bmansion@mamasam.com> |
// +----------------------------------------------------------------------+
//
// $Id: image.php,v 1.2 2010/12/14 17:35:23 moodlerobot Exp $
require_once("HTML/QuickForm/input.php");
/**
* HTML class for a image type element
*
* @author Adam Daniel <adaniel1@eesus.jnj.com>
* @author Bertrand Mansion <bmansion@mamasam.com>
* @version 1.0
* @since PHP4.04pl1
* @access public
*/
class HTML_QuickForm_image extends HTML_QuickForm_input
{
// {{{ constructor
/**
* Class constructor
*
* @param string $elementName (optional)Element name attribute
* @param string $src (optional)Image source
* @param mixed $attributes (optional)Either a typical HTML attribute string
* or an associative array
* @since 1.0
* @access public
* @return void
*/
function HTML_QuickForm_image($elementName=null, $src='', $attributes=null)
{
HTML_QuickForm_input::HTML_QuickForm_input($elementName, null, $attributes);
$this->setType('image');
$this->setSource($src);
} // end class constructor
// }}}
// {{{ setSource()
/**
* Sets source for image element
*
* @param string $src source for image element
* @since 1.0
* @access public
* @return void
*/
function setSource($src)
{
$this->updateAttributes(array('src' => $src));
} // end func setSource
// }}}
// {{{ setBorder()
/**
* Sets border size for image element
*
* @param string $border border for image element
* @since 1.0
* @access public
* @return void
*/
function setBorder($border)
{
$this->updateAttributes(array('border' => $border));
} // end func setBorder
// }}}
// {{{ setAlign()
/**
* Sets alignment for image element
*
* @param string $align alignment for image element
* @since 1.0
* @access public
* @return void
*/
function setAlign($align)
{
$this->updateAttributes(array('align' => $align));
} // end func setAlign
// }}}
// {{{ freeze()
/**
* Freeze the element so that only its value is returned
*
* @access public
* @return void
*/
function freeze()
{
return false;
} //end func freeze
// }}}
} // end class HTML_QuickForm_image
?>
| jenarroyo/moodle-repo | lib/pear/HTML/QuickForm/image.php | PHP | gpl-3.0 | 3,740 |
#ifndef ___SUT_TRACE_H___
#define ___SUT_TRACE_H___
/******************************************************************************/
/* */
/* X r d S u t T r a c e . h h */
/* */
/* (C) 2005 by the Board of Trustees of the Leland Stanford, Jr., University */
/* Produced by Gerri Ganis for CERN */
/* */
/* This file is part of the XRootD software suite. */
/* */
/* XRootD is free software: you can redistribute it and/or modify it under */
/* the terms of the GNU Lesser General Public License as published by the */
/* Free Software Foundation, either version 3 of the License, or (at your */
/* option) any later version. */
/* */
/* XRootD is distributed in the hope that it will be useful, but WITHOUT */
/* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or */
/* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public */
/* License for more details. */
/* */
/* You should have received a copy of the GNU Lesser General Public License */
/* along with XRootD in a file called COPYING.LESSER (LGPL license) and file */
/* COPYING (GPL license). If not, see <http://www.gnu.org/licenses/>. */
/* */
/* The copyright holder's institutional names and contributor's names may not */
/* be used to endorse or promote products derived from this software without */
/* specific prior written permission of the institution or contributor. */
/******************************************************************************/
#ifndef ___OUC_TRACE_H___
#include "XrdOuc/XrdOucTrace.hh"
#endif
#ifndef ___SUT_AUX_H___
#include "XrdSut/XrdSutAux.hh"
#endif
#ifndef NODEBUG
#include "XrdSys/XrdSysHeaders.hh"
#define QTRACE(act) (sutTrace && (sutTrace->What & sutTRACE_ ## act))
#define PRINT(y) {if (sutTrace) {sutTrace->Beg(epname); \
cerr <<y; sutTrace->End();}}
#define TRACE(act,x) if (QTRACE(act)) PRINT(x)
#define DEBUG(y) TRACE(Debug,y)
#define EPNAME(x) static const char *epname = x;
#else
#define QTRACE(x)
#define PRINT(x)
#define TRACE(x,y)
#define DEBUG(x)
#define EPNAME(x)
#endif
//
// For error logging and tracing
extern XrdOucTrace *sutTrace;
#endif
| bbockelm/xrootd_old_git | src/XrdSut/XrdSutTrace.hh | C++ | gpl-3.0 | 2,936 |
#include <AP_HAL/AP_HAL.h>
#include <AP_Common/AP_Common.h>
#include <AP_Math/AP_Math.h>
#include <AP_Notify/AP_Notify.h>
#include "AP_BattMonitor.h"
#include "AP_BattMonitor_SMBus_SUI.h"
extern const AP_HAL::HAL& hal;
#define REG_CELL_VOLTAGE 0x28
#define REG_CURRENT 0x2a
// maximum number of cells that we can read data for
#define SUI_MAX_CELL_READ 4
// Constructor
AP_BattMonitor_SMBus_SUI::AP_BattMonitor_SMBus_SUI(AP_BattMonitor &mon,
AP_BattMonitor::BattMonitor_State &mon_state,
AP_BattMonitor_Params ¶ms,
AP_HAL::OwnPtr<AP_HAL::I2CDevice> dev,
uint8_t _cell_count)
: AP_BattMonitor_SMBus(mon, mon_state, params, std::move(dev)),
cell_count(_cell_count)
{
_pec_supported = false;
_dev->set_retries(2);
}
void AP_BattMonitor_SMBus_SUI::init(void)
{
AP_BattMonitor_SMBus::init();
if (_dev && timer_handle) {
// run twice as fast for two phases
_dev->adjust_periodic_callback(timer_handle, 50000);
}
}
void AP_BattMonitor_SMBus_SUI::timer()
{
uint32_t tnow = AP_HAL::micros();
// we read in two phases as the device can stall if you read
// current too rapidly after voltages
phase_voltages = !phase_voltages;
if (phase_voltages) {
read_cell_voltages();
update_health();
return;
}
// read current
int32_t current_ma;
if (read_block_bare(REG_CURRENT, (uint8_t *)¤t_ma, sizeof(current_ma))) {
_state.current_amps = current_ma * -0.001;
_state.last_time_micros = tnow;
}
read_full_charge_capacity();
read_temp();
read_serial_number();
read_remaining_capacity();
update_health();
}
// read_block - returns true if successful
bool AP_BattMonitor_SMBus_SUI::read_block(uint8_t reg, uint8_t* data, uint8_t len) const
{
// buffer to hold results (2 extra byte returned holding length and PEC)
uint8_t buff[len+2];
// read bytes
if (!_dev->read_registers(reg, buff, sizeof(buff))) {
return false;
}
// get length
uint8_t bufflen = buff[0];
// sanity check length returned by smbus
if (bufflen == 0 || bufflen > len) {
return false;
}
// check PEC
uint8_t pec = get_PEC(AP_BATTMONITOR_SMBUS_I2C_ADDR, reg, true, buff, bufflen+1);
if (pec != buff[bufflen+1]) {
return false;
}
// copy data (excluding PEC)
memcpy(data, &buff[1], bufflen);
// return success
return true;
}
// read_bare_block - returns true if successful
bool AP_BattMonitor_SMBus_SUI::read_block_bare(uint8_t reg, uint8_t* data, uint8_t len) const
{
// read bytes
if (!_dev->read_registers(reg, data, len)) {
return false;
}
// return success
return true;
}
void AP_BattMonitor_SMBus_SUI::read_cell_voltages()
{
// read cell voltages
uint16_t voltbuff[SUI_MAX_CELL_READ];
if (!read_block(REG_CELL_VOLTAGE, (uint8_t *)voltbuff, sizeof(voltbuff))) {
return;
}
float pack_voltage_mv = 0.0f;
for (uint8_t i = 0; i < MIN(SUI_MAX_CELL_READ, cell_count); i++) {
const uint16_t cell = voltbuff[i];
_state.cell_voltages.cells[i] = cell;
pack_voltage_mv += (float)cell;
}
if (cell_count >= SUI_MAX_CELL_READ) {
// we can't read voltage of all cells. get overall pack voltage to work out
// an average for remaining cells
uint16_t total_mv;
if (read_block(BATTMONITOR_SMBUS_VOLTAGE, (uint8_t *)&total_mv, sizeof(total_mv))) {
// if total voltage is below pack_voltage_mv then we will
// read zero volts for the extra cells.
total_mv = MAX(total_mv, pack_voltage_mv);
const uint16_t cell_mv = (total_mv - pack_voltage_mv) / (cell_count - SUI_MAX_CELL_READ);
for (uint8_t i = SUI_MAX_CELL_READ; i < cell_count; i++) {
_state.cell_voltages.cells[i] = cell_mv;
}
pack_voltage_mv = total_mv;
} else {
// we can't get total pack voltage. Use average of cells we have so far
const uint16_t cell_mv = pack_voltage_mv / SUI_MAX_CELL_READ;
for (uint8_t i = SUI_MAX_CELL_READ; i < cell_count; i++) {
_state.cell_voltages.cells[i] = cell_mv;
}
pack_voltage_mv += cell_mv * (cell_count - SUI_MAX_CELL_READ);
}
}
_has_cell_voltages = true;
// accumulate the pack voltage out of the total of the cells
_state.voltage = pack_voltage_mv * 0.001;
last_volt_read_us = AP_HAL::micros();
}
/*
update healthy flag
*/
void AP_BattMonitor_SMBus_SUI::update_health()
{
uint32_t now = AP_HAL::micros();
_state.healthy = (now - last_volt_read_us < AP_BATTMONITOR_SMBUS_TIMEOUT_MICROS) &&
(now - _state.last_time_micros < AP_BATTMONITOR_SMBUS_TIMEOUT_MICROS);
}
| squilter/ardupilot | libraries/AP_BattMonitor/AP_BattMonitor_SMBus_SUI.cpp | C++ | gpl-3.0 | 4,883 |
/**********************************************************************\
RageLib - Models
Copyright (C) 2009 Arushan/Aru <oneforaru at gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
\**********************************************************************/
using RageLib.Common.ResourceTypes;
using RageLib.Models.Resource.Shaders;
namespace RageLib.Models.Data
{
public class MaterialParamVector4 : MaterialParam
{
public Vector4 Value { get; private set; }
internal MaterialParamVector4(uint hash, ShaderParamVector4 vector)
: base(hash)
{
Value = vector.Data;
}
}
} | St0rmDev/gtaivtools | RageLib/Models/Data/MaterialParamVector4.cs | C# | gpl-3.0 | 1,264 |
# Copyright (c) 2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents a connection to the EC2 service.
"""
from boto.ec2.connection import EC2Connection
from boto.vpc.vpc import VPC
from boto.vpc.customergateway import CustomerGateway
from boto.vpc.vpngateway import VpnGateway, Attachment
from boto.vpc.dhcpoptions import DhcpOptions
from boto.vpc.subnet import Subnet
from boto.vpc.vpnconnection import VpnConnection
class VPCConnection(EC2Connection):
# VPC methods
def get_all_vpcs(self, vpc_ids=None, filters=None):
"""
Retrieve information about your VPCs. You can filter results to
return information only about those VPCs that match your search
parameters. Otherwise, all VPCs associated with your account
are returned.
:type vpc_ids: list
:param vpc_ids: A list of strings with the desired VPC ID's
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the VPC (pending or available)
- *cidrBlock*, CIDR block of the VPC
- *dhcpOptionsId*, the ID of a set of DHCP options
:rtype: list
:return: A list of :class:`boto.vpc.vpc.VPC`
"""
params = {}
if vpc_ids:
self.build_list_params(params, vpc_ids, 'VpcId')
if filters:
i = 1
for filter in filters:
params[('Filter.%d.Key' % i)] = filter[0]
params[('Filter.%d.Value.1')] = filter[1]
i += 1
return self.get_list('DescribeVpcs', params, [('item', VPC)])
def create_vpc(self, cidr_block):
"""
Create a new Virtual Private Cloud.
:type cidr_block: str
:param cidr_block: A valid CIDR block
:rtype: The newly created VPC
:return: A :class:`boto.vpc.vpc.VPC` object
"""
params = {'CidrBlock' : cidr_block}
return self.get_object('CreateVpc', params, VPC)
def delete_vpc(self, vpc_id):
"""
Delete a Virtual Private Cloud.
:type vpc_id: str
:param vpc_id: The ID of the vpc to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'VpcId': vpc_id}
return self.get_status('DeleteVpc', params)
# Customer Gateways
def get_all_customer_gateways(self, customer_gateway_ids=None, filters=None):
"""
Retrieve information about your CustomerGateways. You can filter results to
return information only about those CustomerGateways that match your search
parameters. Otherwise, all CustomerGateways associated with your account
are returned.
:type customer_gateway_ids: list
:param customer_gateway_ids: A list of strings with the desired CustomerGateway ID's
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the CustomerGateway
(pending,available,deleting,deleted)
- *type*, the type of customer gateway (ipsec.1)
- *ipAddress* the IP address of customer gateway's
internet-routable external inteface
:rtype: list
:return: A list of :class:`boto.vpc.customergateway.CustomerGateway`
"""
params = {}
if customer_gateway_ids:
self.build_list_params(params, customer_gateway_ids, 'CustomerGatewayId')
if filters:
i = 1
for filter in filters:
params[('Filter.%d.Key' % i)] = filter[0]
params[('Filter.%d.Value.1')] = filter[1]
i += 1
return self.get_list('DescribeCustomerGateways', params, [('item', CustomerGateway)])
def create_customer_gateway(self, type, ip_address, bgp_asn):
"""
Create a new Customer Gateway
:type type: str
:param type: Type of VPN Connection. Only valid valid currently is 'ipsec.1'
:type ip_address: str
:param ip_address: Internet-routable IP address for customer's gateway.
Must be a static address.
:type bgp_asn: str
:param bgp_asn: Customer gateway's Border Gateway Protocol (BGP)
Autonomous System Number (ASN)
:rtype: The newly created CustomerGateway
:return: A :class:`boto.vpc.customergateway.CustomerGateway` object
"""
params = {'Type' : type,
'IpAddress' : ip_address,
'BgpAsn' : bgp_asn}
return self.get_object('CreateCustomerGateway', params, CustomerGateway)
def delete_customer_gateway(self, customer_gateway_id):
"""
Delete a Customer Gateway.
:type customer_gateway_id: str
:param customer_gateway_id: The ID of the customer_gateway to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'CustomerGatewayId': customer_gateway_id}
return self.get_status('DeleteCustomerGateway', params)
# VPN Gateways
def get_all_vpn_gateways(self, vpn_gateway_ids=None, filters=None):
"""
Retrieve information about your VpnGateways. You can filter results to
return information only about those VpnGateways that match your search
parameters. Otherwise, all VpnGateways associated with your account
are returned.
:type vpn_gateway_ids: list
:param vpn_gateway_ids: A list of strings with the desired VpnGateway ID's
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the VpnGateway
(pending,available,deleting,deleted)
- *type*, the type of customer gateway (ipsec.1)
- *availabilityZone*, the Availability zone the
VPN gateway is in.
:rtype: list
:return: A list of :class:`boto.vpc.customergateway.VpnGateway`
"""
params = {}
if vpn_gateway_ids:
self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId')
if filters:
i = 1
for filter in filters:
params[('Filter.%d.Key' % i)] = filter[0]
params[('Filter.%d.Value.1')] = filter[1]
i += 1
return self.get_list('DescribeVpnGateways', params, [('item', VpnGateway)])
def create_vpn_gateway(self, type, availability_zone=None):
"""
Create a new Vpn Gateway
:type type: str
:param type: Type of VPN Connection. Only valid valid currently is 'ipsec.1'
:type availability_zone: str
:param availability_zone: The Availability Zone where you want the VPN gateway.
:rtype: The newly created VpnGateway
:return: A :class:`boto.vpc.vpngateway.VpnGateway` object
"""
params = {'Type' : type}
if availability_zone:
params['AvailabilityZone'] = availability_zone
return self.get_object('CreateVpnGateway', params, VpnGateway)
def delete_vpn_gateway(self, vpn_gateway_id):
"""
Delete a Vpn Gateway.
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the vpn_gateway to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'VpnGatewayId': vpn_gateway_id}
return self.get_status('DeleteVpnGateway', params)
def attach_vpn_gateway(self, vpn_gateway_id, vpc_id):
"""
Attaches a VPN gateway to a VPC.
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the vpn_gateway to attach
:type vpc_id: str
:param vpc_id: The ID of the VPC you want to attach the gateway to.
:rtype: An attachment
:return: a :class:`boto.vpc.vpngateway.Attachment`
"""
params = {'VpnGatewayId': vpn_gateway_id,
'VpcId' : vpc_id}
return self.get_object('AttachVpnGateway', params, Attachment)
# Subnets
def get_all_subnets(self, subnet_ids=None, filters=None):
"""
Retrieve information about your Subnets. You can filter results to
return information only about those Subnets that match your search
parameters. Otherwise, all Subnets associated with your account
are returned.
:type subnet_ids: list
:param subnet_ids: A list of strings with the desired Subnet ID's
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the Subnet
(pending,available)
- *vpdId*, the ID of teh VPC the subnet is in.
- *cidrBlock*, CIDR block of the subnet
- *availabilityZone*, the Availability Zone
the subnet is in.
:rtype: list
:return: A list of :class:`boto.vpc.subnet.Subnet`
"""
params = {}
if subnet_ids:
self.build_list_params(params, subnet_ids, 'SubnetId')
if filters:
i = 1
for filter in filters:
params[('Filter.%d.Key' % i)] = filter[0]
params[('Filter.%d.Value.1' % i)] = filter[1]
i += 1
return self.get_list('DescribeSubnets', params, [('item', Subnet)])
def create_subnet(self, vpc_id, cidr_block, availability_zone=None):
"""
Create a new Subnet
:type vpc_id: str
:param vpc_id: The ID of the VPC where you want to create the subnet.
:type cidr_block: str
:param cidr_block: The CIDR block you want the subnet to cover.
:type availability_zone: str
:param availability_zone: The AZ you want the subnet in
:rtype: The newly created Subnet
:return: A :class:`boto.vpc.customergateway.Subnet` object
"""
params = {'VpcId' : vpc_id,
'CidrBlock' : cidr_block}
if availability_zone:
params['AvailabilityZone'] = availability_zone
return self.get_object('CreateSubnet', params, Subnet)
def delete_subnet(self, subnet_id):
"""
Delete a subnet.
:type subnet_id: str
:param subnet_id: The ID of the subnet to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'SubnetId': subnet_id}
return self.get_status('DeleteSubnet', params)
# DHCP Options
def get_all_dhcp_options(self, dhcp_options_ids=None):
"""
Retrieve information about your DhcpOptions.
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
:rtype: list
:return: A list of :class:`boto.vpc.dhcpoptions.DhcpOptions`
"""
params = {}
if dhcp_options_ids:
self.build_list_params(params, dhcp_options_ids, 'DhcpOptionsId')
return self.get_list('DescribeDhcpOptions', params, [('item', DhcpOptions)])
def create_dhcp_options(self, vpc_id, cidr_block, availability_zone=None):
"""
Create a new DhcpOption
:type vpc_id: str
:param vpc_id: The ID of the VPC where you want to create the subnet.
:type cidr_block: str
:param cidr_block: The CIDR block you want the subnet to cover.
:type availability_zone: str
:param availability_zone: The AZ you want the subnet in
:rtype: The newly created DhcpOption
:return: A :class:`boto.vpc.customergateway.DhcpOption` object
"""
params = {'VpcId' : vpc_id,
'CidrBlock' : cidr_block}
if availability_zone:
params['AvailabilityZone'] = availability_zone
return self.get_object('CreateDhcpOption', params, DhcpOptions)
def delete_dhcp_options(self, dhcp_options_id):
"""
Delete a DHCP Options
:type dhcp_options_id: str
:param dhcp_options_id: The ID of the DHCP Options to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'DhcpOptionsId': dhcp_options_id}
return self.get_status('DeleteDhcpOptions', params)
def associate_dhcp_options(self, dhcp_options_id, vpc_id):
"""
Associate a set of Dhcp Options with a VPC.
:type dhcp_options_id: str
:param dhcp_options_id: The ID of the Dhcp Options
:type vpc_id: str
:param vpc_id: The ID of the VPC.
:rtype: bool
:return: True if successful
"""
params = {'DhcpOptionsId': dhcp_options_id,
'VpcId' : vpc_id}
return self.get_status('AssociateDhcpOptions', params)
# VPN Connection
def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None):
"""
Retrieve information about your VPN_CONNECTIONs. You can filter results to
return information only about those VPN_CONNECTIONs that match your search
parameters. Otherwise, all VPN_CONNECTIONs associated with your account
are returned.
:type vpn_connection_ids: list
:param vpn_connection_ids: A list of strings with the desired VPN_CONNECTION ID's
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the VPN_CONNECTION
pending,available,deleting,deleted
- *type*, the type of connection, currently 'ipsec.1'
- *customerGatewayId*, the ID of the customer gateway
associated with the VPN
- *vpnGatewayId*, the ID of the VPN gateway associated
with the VPN connection
:rtype: list
:return: A list of :class:`boto.vpn_connection.vpnconnection.VpnConnection`
"""
params = {}
if vpn_connection_ids:
self.build_list_params(params, vpn_connection_ids, 'Vpn_ConnectionId')
if filters:
i = 1
for filter in filters:
params[('Filter.%d.Key' % i)] = filter[0]
params[('Filter.%d.Value.1')] = filter[1]
i += 1
return self.get_list('DescribeVpnConnections', params, [('item', VpnConnection)])
def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id):
"""
Create a new VPN Connection.
:type type: str
:param type: The type of VPN Connection. Currently only 'ipsec.1'
is supported
:type customer_gateway_id: str
:param customer_gateway_id: The ID of the customer gateway.
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the VPN gateway.
:rtype: The newly created VpnConnection
:return: A :class:`boto.vpc.vpnconnection.VpnConnection` object
"""
params = {'Type' : type,
'CustomerGatewayId' : customer_gateway_id,
'VpnGatewayId' : vpn_gateway_id}
return self.get_object('CreateVpnConnection', params, VpnConnection)
def delete_vpn_connection(self, vpn_connection_id):
"""
Delete a VPN Connection.
:type vpn_connection_id: str
:param vpn_connection_id: The ID of the vpn_connection to be deleted.
:rtype: bool
:return: True if successful
"""
params = {'VpnConnectionId': vpn_connection_id}
return self.get_status('DeleteVpnConnection', params)
| apavlo/h-store | third_party/python/boto/vpc/__init__.py | Python | gpl-3.0 | 17,897 |
/*!
* Piwik - free/libre analytics platform
*
* @link http://piwik.org
* @license http://www.gnu.org/licenses/gpl-3.0.html GPL v3 or later
*/
(function () {
angular.module('piwikApp.filter').filter('htmldecode', htmldecode);
htmldecode.$inject = ['piwik'];
/**
* Be aware that this filter can cause XSS so only use it when you're sure it is safe.
* Eg it should be safe when it is afterwards escaped by angular sanitize again.
*/
function htmldecode(piwik) {
return function(text) {
if (text && text.length) {
return piwik.helper.htmlDecode(text);
}
return text;
};
}
})();
| befair/soulShape | wp/soulshape.earth/piwik/plugins/CoreHome/angularjs/common/filters/htmldecode.js | JavaScript | agpl-3.0 | 686 |
"""
Management command to resend all lti scores for the requested course.
"""
import textwrap
from django.core.management import BaseCommand
from opaque_keys.edx.keys import CourseKey
from lti_provider.models import GradedAssignment
from lti_provider import tasks
class Command(BaseCommand):
"""
Send all lti scores for the requested courses to the registered consumers.
If no arguments are provided, send all scores for all courses.
Examples:
./manage.py lms resend_lti_scores
./manage.py lms resend_lti_scores course-v1:edX+DemoX+Demo_Course course-v1:UBCx+course+2016-01
"""
help = textwrap.dedent(__doc__)
def add_arguments(self, parser):
parser.add_argument(u'course_keys', type=CourseKey.from_string, nargs='*')
def handle(self, *args, **options):
if options[u'course_keys']:
for course_key in options[u'course_keys']:
for assignment in self._iter_course_assignments(course_key):
self._send_score(assignment)
else:
for assignment in self._iter_all_assignments():
self._send_score(assignment)
def _send_score(self, assignment):
"""
Send the score to the LTI consumer for a single assignment.
"""
tasks.send_composite_outcome.delay(
assignment.user_id,
unicode(assignment.course_key),
assignment.id,
assignment.version_number,
)
def _iter_all_assignments(self):
"""
Get all the graded assignments in the system.
"""
return GradedAssignment.objects.all()
def _iter_course_assignments(self, course_key):
"""
Get all the graded assignments for the given course.
"""
return GradedAssignment.objects.filter(course_key=course_key)
| caesar2164/edx-platform | lms/djangoapps/lti_provider/management/commands/resend_lti_scores.py | Python | agpl-3.0 | 1,857 |
#!/usr/bin/env python
#***************************************************************************
#* *
#* Copyright (c) 2009 Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
__title__="wiki2qhelp"
__author__ = "Yorik van Havre <yorik@uncreated.net>"
__url__ = "http://www.freecadweb.org"
"""
This script builds qhrlp files from a local copy of the wiki
"""
import sys, os, re, tempfile, getopt, shutil
from urllib2 import urlopen, HTTPError
# CONFIGURATION #################################################
FOLDER = "./localwiki"
INDEX = "Online_Help_Toc" # the start page from where to crawl the wiki
VERBOSE = True # to display what's going on. Otherwise, runs totally silent.
QHELPCOMPILER = 'qhelpgenerator'
QCOLLECTIOMGENERATOR = 'qcollectiongenerator'
RELEASE = '0.16'
# END CONFIGURATION ##############################################
fcount = dcount = 0
def crawl():
"downloads an entire wiki site"
# tests ###############################################
if os.system(QHELPCOMPILER +' -v'):
print "Error: QAssistant not fully installed, exiting."
return 1
if os.system(QCOLLECTIOMGENERATOR +' -v'):
print "Error: QAssistant not fully installed, exiting."
return 1
# run ########################################################
qhp = buildtoc()
qhcp = createCollProjectFile()
if generate(qhcp) or compile(qhp):
print "Error at compiling"
return 1
if VERBOSE: print "All done!"
i=raw_input("Copy the files to their correct location in the source tree? y/n (default=no) ")
if i.upper() in ["Y","YES"]:
shutil.copy("localwiki/freecad.qch","../../Doc/freecad.qch")
shutil.copy("localwiki/freecad.qhc","../../Doc/freecad.qhc")
else:
print 'Files are in localwiki. Test with "assistant -collectionFile localwiki/freecad.qhc"'
return 0
def compile(qhpfile):
"compiles the whole html doc with qassistant"
qchfile = FOLDER + os.sep + "freecad.qch"
if not os.system(QHELPCOMPILER + ' '+qhpfile+' -o '+qchfile):
if VERBOSE: print "Successfully created",qchfile
return 0
def generate(qhcpfile):
"generates qassistant-specific settings like icon, title, ..."
txt="""
<center>FreeCAD """+RELEASE+""" help files<br/>
<a href="http://www.freecadweb.org">http://www.freecadweb.org</a></center>
"""
about=open(FOLDER + os.sep + "about.txt","w")
about.write(txt)
about.close()
qhcfile = FOLDER + os.sep + "freecad.qhc"
if not os.system(QCOLLECTIOMGENERATOR+' '+qhcpfile+' -o '+qhcfile):
if VERBOSE: print "Successfully created ",qhcfile
return 0
def createCollProjectFile():
qprojectfile = '''<?xml version="1.0" encoding="UTF-8"?>
<QHelpCollectionProject version="1.0">
<assistant>
<title>FreeCAD User Manual</title>
<applicationIcon>64px-FreeCAD05.svg.png</applicationIcon>
<cacheDirectory>freecad/freecad</cacheDirectory>
<startPage>qthelp://org.freecad.usermanual/doc/Online_Help_Startpage.html</startPage>
<aboutMenuText>
<text>About FreeCAD</text>
</aboutMenuText>
<aboutDialog>
<file>about.txt</file>
<!--
<icon>images/icon.png</icon>
-->
<icon>64px-FreeCAD05.svg.png</icon>
</aboutDialog>
<enableDocumentationManager>true</enableDocumentationManager>
<enableAddressBar>true</enableAddressBar>
<enableFilterFunctionality>true</enableFilterFunctionality>
</assistant>
<docFiles>
<generate>
<file>
<input>freecad.qhp</input>
<output>freecad.qch</output>
</file>
</generate>
<register>
<file>freecad.qch</file>
</register>
</docFiles>
</QHelpCollectionProject>
'''
if VERBOSE: print "Building project file..."
qfilename = FOLDER + os.sep + "freecad.qhcp"
f = open(qfilename,'w')
f.write(qprojectfile)
f.close()
if VERBOSE: print "Done writing qhcp file",qfilename
return qfilename
def buildtoc():
'''
gets the table of contents page and parses its
contents into a clean lists structure
'''
qhelpfile = '''<?xml version="1.0" encoding="UTF-8"?>
<QtHelpProject version="1.0">
<namespace>org.freecad.usermanual</namespace>
<virtualFolder>doc</virtualFolder>
<!--
<customFilter name="FreeCAD '''+RELEASE+'''">
<filterAttribute>FreeCAD</filterAttribute>
<filterAttribute>'''+RELEASE+'''</filterAttribute>
</customFilter>
-->
<filterSection>
<!--
<filterAttribute>FreeCAD</filterAttribute>
<filterAttribute>'''+RELEASE+'''</filterAttribute>
-->
<toc>
<inserttoc>
</toc>
<keywords>
<insertkeywords>
</keywords>
<insertfiles>
</filterSection>
</QtHelpProject>
'''
def getname(line):
line = re.compile('<li>').sub('',line)
line = re.compile('</li>').sub('',line)
title = line.strip()
link = ''
if "<a" in line:
title = re.findall('<a[^>]*>(.*?)</a>',line)[0].strip()
link = re.findall('href="(.*?)"',line)[0].strip()
if not link: link = 'default.html'
return title,link
if VERBOSE: print "Building table of contents..."
f = open(FOLDER+os.sep+INDEX+'.html')
html = ''
for line in f: html += line
f.close()
html = html.replace("\n"," ")
html = html.replace("> <","><")
html = re.findall("<ul.*/ul>",html)[0]
items = re.findall('<li[^>]*>.*?</li>|</ul></li>',html)
inserttoc = '<section title="FreeCAD Documentation" ref="Online_Help_Toc.html">\n'
insertkeywords = ''
for item in items:
if not ("<ul>" in item):
if ("</ul>" in item):
inserttoc += '</section>\n'
else:
link = ''
title,link=getname(item)
if link:
link='" ref="'+link
insertkeywords += ('<keyword name="'+title+link+'"/>\n')
inserttoc += ('<section title="'+title+link+'"></section>\n')
else:
subitems = item.split("<ul>")
for i in range(len(subitems)):
link = ''
title,link=getname(subitems[i])
if link:
link='" ref="'+link
insertkeywords += ('<keyword name="'+title+link+'"/>\n')
trail = ''
if i == len(subitems)-1: trail = '</section>'
inserttoc += ('<section title="'+title+link+'">'+trail+'\n')
inserttoc += '</section>\n'
insertfiles = "<files>\n"
for fil in os.listdir(FOLDER):
insertfiles += ("<file>"+fil+"</file>\n")
insertfiles += "</files>\n"
qhelpfile = re.compile('<insertkeywords>').sub(insertkeywords,qhelpfile)
qhelpfile = re.compile('<inserttoc>').sub(inserttoc,qhelpfile)
qhelpfile = re.compile('<insertfiles>').sub(insertfiles,qhelpfile)
qfilename = FOLDER + os.sep + "freecad.qhp"
f = open(qfilename,'wb')
f.write(qhelpfile)
f.close()
if VERBOSE: print "Done writing qhp file",qfilename
return qfilename
if __name__ == "__main__":
crawl()
| kkoksvik/FreeCAD | src/Tools/offlinedoc/buildqhelp.py | Python | lgpl-2.1 | 8,832 |
/*
* Copyright (C) 2007, 2008 Apple Inc. All rights reserved.
* Copyright (C) 2008 Matt Lilek <webkit@mattlilek.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
WebInspector.ElementsPanel = function()
{
WebInspector.Panel.call(this);
this.element.addStyleClass("elements");
this.contentElement = document.createElement("div");
this.contentElement.id = "elements-content";
this.contentElement.className = "outline-disclosure";
this.treeOutline = new WebInspector.ElementsTreeOutline();
this.treeOutline.panel = this;
this.treeOutline.includeRootDOMNode = false;
this.treeOutline.selectEnabled = true;
this.treeOutline.focusedNodeChanged = function(forceUpdate)
{
if (this.panel.visible && WebInspector.currentFocusElement !== document.getElementById("search"))
WebInspector.currentFocusElement = document.getElementById("main-panels");
this.panel.updateBreadcrumb(forceUpdate);
for (var pane in this.panel.sidebarPanes)
this.panel.sidebarPanes[pane].needsUpdate = true;
this.panel.updateStyles(true);
this.panel.updateMetrics();
this.panel.updateProperties();
if (InspectorController.searchingForNode()) {
InspectorController.toggleNodeSearch();
this.panel.nodeSearchButton.removeStyleClass("toggled-on");
}
};
this.contentElement.appendChild(this.treeOutline.element);
this.crumbsElement = document.createElement("div");
this.crumbsElement.className = "crumbs";
this.crumbsElement.addEventListener("mousemove", this._mouseMovedInCrumbs.bind(this), false);
this.crumbsElement.addEventListener("mouseout", this._mouseMovedOutOfCrumbs.bind(this), false);
this.sidebarPanes = {};
this.sidebarPanes.styles = new WebInspector.StylesSidebarPane();
this.sidebarPanes.metrics = new WebInspector.MetricsSidebarPane();
this.sidebarPanes.properties = new WebInspector.PropertiesSidebarPane();
this.sidebarPanes.styles.onexpand = this.updateStyles.bind(this);
this.sidebarPanes.metrics.onexpand = this.updateMetrics.bind(this);
this.sidebarPanes.properties.onexpand = this.updateProperties.bind(this);
this.sidebarPanes.styles.expanded = true;
this.sidebarPanes.styles.addEventListener("style edited", this._stylesPaneEdited, this);
this.sidebarPanes.styles.addEventListener("style property toggled", this._stylesPaneEdited, this);
this.sidebarPanes.metrics.addEventListener("metrics edited", this._metricsPaneEdited, this);
this.sidebarElement = document.createElement("div");
this.sidebarElement.id = "elements-sidebar";
this.sidebarElement.appendChild(this.sidebarPanes.styles.element);
this.sidebarElement.appendChild(this.sidebarPanes.metrics.element);
this.sidebarElement.appendChild(this.sidebarPanes.properties.element);
this.sidebarResizeElement = document.createElement("div");
this.sidebarResizeElement.className = "sidebar-resizer-vertical";
this.sidebarResizeElement.addEventListener("mousedown", this.rightSidebarResizerDragStart.bind(this), false);
this.nodeSearchButton = document.createElement("button");
this.nodeSearchButton.title = WebInspector.UIString("Select an element in the page to inspect it.");
this.nodeSearchButton.id = "node-search-status-bar-item";
this.nodeSearchButton.className = "status-bar-item";
this.nodeSearchButton.addEventListener("click", this._nodeSearchButtonClicked.bind(this), false);
this.searchingForNode = false;
this.element.appendChild(this.contentElement);
this.element.appendChild(this.sidebarElement);
this.element.appendChild(this.sidebarResizeElement);
this._mutationMonitoredWindows = [];
this._nodeInsertedEventListener = InspectorController.wrapCallback(this._nodeInserted.bind(this));
this._nodeRemovedEventListener = InspectorController.wrapCallback(this._nodeRemoved.bind(this));
this._contentLoadedEventListener = InspectorController.wrapCallback(this._contentLoaded.bind(this));
this.reset();
}
WebInspector.ElementsPanel.prototype = {
toolbarItemClass: "elements",
get toolbarItemLabel()
{
return WebInspector.UIString("Elements");
},
get statusBarItems()
{
return [this.nodeSearchButton, this.crumbsElement];
},
updateStatusBarItems: function()
{
this.updateBreadcrumbSizes();
},
show: function()
{
WebInspector.Panel.prototype.show.call(this);
this.sidebarResizeElement.style.right = (this.sidebarElement.offsetWidth - 3) + "px";
this.updateBreadcrumb();
this.treeOutline.updateSelection();
if (this.recentlyModifiedNodes.length)
this._updateModifiedNodes();
},
hide: function()
{
WebInspector.Panel.prototype.hide.call(this);
WebInspector.hoveredDOMNode = null;
if (InspectorController.searchingForNode()) {
InspectorController.toggleNodeSearch();
this.nodeSearchButton.removeStyleClass("toggled-on");
}
},
resize: function()
{
this.treeOutline.updateSelection();
this.updateBreadcrumbSizes();
},
reset: function()
{
this.rootDOMNode = null;
this.focusedDOMNode = null;
WebInspector.hoveredDOMNode = null;
if (InspectorController.searchingForNode()) {
InspectorController.toggleNodeSearch();
this.nodeSearchButton.removeStyleClass("toggled-on");
}
this.recentlyModifiedNodes = [];
this.unregisterAllMutationEventListeners();
delete this.currentQuery;
this.searchCanceled();
var inspectedWindow = InspectorController.inspectedWindow();
if (!inspectedWindow || !inspectedWindow.document)
return;
if (!inspectedWindow.document.firstChild) {
function contentLoaded()
{
inspectedWindow.document.removeEventListener("DOMContentLoaded", contentLoadedCallback, false);
this.reset();
}
var contentLoadedCallback = InspectorController.wrapCallback(contentLoaded.bind(this));
inspectedWindow.document.addEventListener("DOMContentLoaded", contentLoadedCallback, false);
return;
}
// If the window isn't visible, return early so the DOM tree isn't built
// and mutation event listeners are not added.
if (!InspectorController.isWindowVisible())
return;
this.registerMutationEventListeners(inspectedWindow);
var inspectedRootDocument = inspectedWindow.document;
this.rootDOMNode = inspectedRootDocument;
var canidateFocusNode = inspectedRootDocument.body || inspectedRootDocument.documentElement;
if (canidateFocusNode) {
this.treeOutline.suppressSelectHighlight = true;
this.focusedDOMNode = canidateFocusNode;
this.treeOutline.suppressSelectHighlight = false;
if (this.treeOutline.selectedTreeElement)
this.treeOutline.selectedTreeElement.expand();
}
},
includedInSearchResultsPropertyName: "__includedInInspectorSearchResults",
searchCanceled: function()
{
if (this._searchResults) {
const searchResultsProperty = this.includedInSearchResultsPropertyName;
for (var i = 0; i < this._searchResults.length; ++i) {
var node = this._searchResults[i];
// Remove the searchResultsProperty since there might be an unfinished search.
delete node[searchResultsProperty];
var treeElement = this.treeOutline.findTreeElement(node);
if (treeElement)
treeElement.highlighted = false;
}
}
WebInspector.updateSearchMatchesCount(0, this);
if (this._currentSearchChunkIntervalIdentifier) {
clearInterval(this._currentSearchChunkIntervalIdentifier);
delete this._currentSearchChunkIntervalIdentifier;
}
this._currentSearchResultIndex = 0;
this._searchResults = [];
},
performSearch: function(query)
{
// Call searchCanceled since it will reset everything we need before doing a new search.
this.searchCanceled();
const whitespaceTrimmedQuery = query.trimWhitespace();
if (!whitespaceTrimmedQuery.length)
return;
var tagNameQuery = whitespaceTrimmedQuery;
var attributeNameQuery = whitespaceTrimmedQuery;
var startTagFound = (tagNameQuery.indexOf("<") === 0);
var endTagFound = (tagNameQuery.lastIndexOf(">") === (tagNameQuery.length - 1));
if (startTagFound || endTagFound) {
var tagNameQueryLength = tagNameQuery.length;
tagNameQuery = tagNameQuery.substring((startTagFound ? 1 : 0), (endTagFound ? (tagNameQueryLength - 1) : tagNameQueryLength));
}
// Check the tagNameQuery is it is a possibly valid tag name.
if (!/^[a-zA-Z0-9\-_:]+$/.test(tagNameQuery))
tagNameQuery = null;
// Check the attributeNameQuery is it is a possibly valid tag name.
if (!/^[a-zA-Z0-9\-_:]+$/.test(attributeNameQuery))
attributeNameQuery = null;
const escapedQuery = query.escapeCharacters("'");
const escapedTagNameQuery = (tagNameQuery ? tagNameQuery.escapeCharacters("'") : null);
const escapedWhitespaceTrimmedQuery = whitespaceTrimmedQuery.escapeCharacters("'");
const searchResultsProperty = this.includedInSearchResultsPropertyName;
var updatedMatchCountOnce = false;
var matchesCountUpdateTimeout = null;
function updateMatchesCount()
{
WebInspector.updateSearchMatchesCount(this._searchResults.length, this);
matchesCountUpdateTimeout = null;
updatedMatchCountOnce = true;
}
function updateMatchesCountSoon()
{
if (!updatedMatchCountOnce)
return updateMatchesCount.call(this);
if (matchesCountUpdateTimeout)
return;
// Update the matches count every half-second so it doesn't feel twitchy.
matchesCountUpdateTimeout = setTimeout(updateMatchesCount.bind(this), 500);
}
function addNodesToResults(nodes, length, getItem)
{
if (!length)
return;
for (var i = 0; i < length; ++i) {
var node = getItem.call(nodes, i);
// Skip this node if it already has the property.
if (searchResultsProperty in node)
continue;
if (!this._searchResults.length) {
this._currentSearchResultIndex = 0;
this.focusedDOMNode = node;
}
node[searchResultsProperty] = true;
this._searchResults.push(node);
// Highlight the tree element to show it matched the search.
// FIXME: highlight the substrings in text nodes and attributes.
var treeElement = this.treeOutline.findTreeElement(node);
if (treeElement)
treeElement.highlighted = true;
}
updateMatchesCountSoon.call(this);
}
function matchExactItems(doc)
{
matchExactId.call(this, doc);
matchExactClassNames.call(this, doc);
matchExactTagNames.call(this, doc);
matchExactAttributeNames.call(this, doc);
}
function matchExactId(doc)
{
const result = doc.__proto__.getElementById.call(doc, whitespaceTrimmedQuery);
addNodesToResults.call(this, result, (result ? 1 : 0), function() { return this });
}
function matchExactClassNames(doc)
{
const result = doc.__proto__.getElementsByClassName.call(doc, whitespaceTrimmedQuery);
addNodesToResults.call(this, result, result.length, result.item);
}
function matchExactTagNames(doc)
{
if (!tagNameQuery)
return;
const result = doc.__proto__.getElementsByTagName.call(doc, tagNameQuery);
addNodesToResults.call(this, result, result.length, result.item);
}
function matchExactAttributeNames(doc)
{
if (!attributeNameQuery)
return;
const result = doc.__proto__.querySelectorAll.call(doc, "[" + attributeNameQuery + "]");
addNodesToResults.call(this, result, result.length, result.item);
}
function matchPartialTagNames(doc)
{
if (!tagNameQuery)
return;
const result = doc.__proto__.evaluate.call(doc, "//*[contains(name(), '" + escapedTagNameQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function matchStartOfTagNames(doc)
{
if (!tagNameQuery)
return;
const result = doc.__proto__.evaluate.call(doc, "//*[starts-with(name(), '" + escapedTagNameQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function matchPartialTagNamesAndAttributeValues(doc)
{
if (!tagNameQuery) {
matchPartialAttributeValues.call(this, doc);
return;
}
const result = doc.__proto__.evaluate.call(doc, "//*[contains(name(), '" + escapedTagNameQuery + "') or contains(@*, '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function matchPartialAttributeValues(doc)
{
const result = doc.__proto__.evaluate.call(doc, "//*[contains(@*, '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function matchStyleSelector(doc)
{
const result = doc.__proto__.querySelectorAll.call(doc, whitespaceTrimmedQuery);
addNodesToResults.call(this, result, result.length, result.item);
}
function matchPlainText(doc)
{
const result = doc.__proto__.evaluate.call(doc, "//text()[contains(., '" + escapedQuery + "')] | //comment()[contains(., '" + escapedQuery + "')]", doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function matchXPathQuery(doc)
{
const result = doc.__proto__.evaluate.call(doc, whitespaceTrimmedQuery, doc, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE);
addNodesToResults.call(this, result, result.snapshotLength, result.snapshotItem);
}
function finishedSearching()
{
// Remove the searchResultsProperty now that the search is finished.
for (var i = 0; i < this._searchResults.length; ++i)
delete this._searchResults[i][searchResultsProperty];
}
const mainFrameDocument = InspectorController.inspectedWindow().document;
const searchDocuments = [mainFrameDocument];
if (tagNameQuery && startTagFound && endTagFound)
const searchFunctions = [matchExactTagNames, matchPlainText];
else if (tagNameQuery && startTagFound)
const searchFunctions = [matchStartOfTagNames, matchPlainText];
else if (tagNameQuery && endTagFound) {
// FIXME: we should have a matchEndOfTagNames search function if endTagFound is true but not startTagFound.
// This requires ends-with() support in XPath, WebKit only supports starts-with() and contains().
const searchFunctions = [matchPartialTagNames, matchPlainText];
} else if (whitespaceTrimmedQuery === "//*" || whitespaceTrimmedQuery === "*") {
// These queries will match every node. Matching everything isn't useful and can be slow for large pages,
// so limit the search functions list to plain text and attribute matching.
const searchFunctions = [matchPartialAttributeValues, matchPlainText];
} else
const searchFunctions = [matchExactItems, matchStyleSelector, matchPartialTagNamesAndAttributeValues, matchPlainText, matchXPathQuery];
// Find all frames, iframes and object elements to search their documents.
const querySelectorAllFunction = InspectorController.inspectedWindow().Document.prototype.querySelectorAll;
const subdocumentResult = querySelectorAllFunction.call(mainFrameDocument, "iframe, frame, object");
for (var i = 0; i < subdocumentResult.length; ++i) {
var element = subdocumentResult.item(i);
if (element.contentDocument)
searchDocuments.push(element.contentDocument);
}
const panel = this;
var documentIndex = 0;
var searchFunctionIndex = 0;
var chunkIntervalIdentifier = null;
// Split up the work into chunks so we don't block the UI thread while processing.
function processChunk()
{
var searchDocument = searchDocuments[documentIndex];
var searchFunction = searchFunctions[searchFunctionIndex];
if (++searchFunctionIndex > searchFunctions.length) {
searchFunction = searchFunctions[0];
searchFunctionIndex = 0;
if (++documentIndex > searchDocuments.length) {
if (panel._currentSearchChunkIntervalIdentifier === chunkIntervalIdentifier)
delete panel._currentSearchChunkIntervalIdentifier;
clearInterval(chunkIntervalIdentifier);
finishedSearching.call(panel);
return;
}
searchDocument = searchDocuments[documentIndex];
}
if (!searchDocument || !searchFunction)
return;
try {
searchFunction.call(panel, searchDocument);
} catch(err) {
// ignore any exceptions. the query might be malformed, but we allow that.
}
}
processChunk();
chunkIntervalIdentifier = setInterval(processChunk, 25);
this._currentSearchChunkIntervalIdentifier = chunkIntervalIdentifier;
},
jumpToNextSearchResult: function()
{
if (!this._searchResults || !this._searchResults.length)
return;
if (++this._currentSearchResultIndex >= this._searchResults.length)
this._currentSearchResultIndex = 0;
this.focusedDOMNode = this._searchResults[this._currentSearchResultIndex];
},
jumpToPreviousSearchResult: function()
{
if (!this._searchResults || !this._searchResults.length)
return;
if (--this._currentSearchResultIndex < 0)
this._currentSearchResultIndex = (this._searchResults.length - 1);
this.focusedDOMNode = this._searchResults[this._currentSearchResultIndex];
},
inspectedWindowCleared: function(window)
{
if (InspectorController.isWindowVisible())
this.updateMutationEventListeners(window);
},
_addMutationEventListeners: function(monitoredWindow)
{
monitoredWindow.document.addEventListener("DOMNodeInserted", this._nodeInsertedEventListener, true);
monitoredWindow.document.addEventListener("DOMNodeRemoved", this._nodeRemovedEventListener, true);
if (monitoredWindow.frameElement)
monitoredWindow.addEventListener("DOMContentLoaded", this._contentLoadedEventListener, true);
},
_removeMutationEventListeners: function(monitoredWindow)
{
if (monitoredWindow.frameElement)
monitoredWindow.removeEventListener("DOMContentLoaded", this._contentLoadedEventListener, true);
if (!monitoredWindow.document)
return;
monitoredWindow.document.removeEventListener("DOMNodeInserted", this._nodeInsertedEventListener, true);
monitoredWindow.document.removeEventListener("DOMNodeRemoved", this._nodeRemovedEventListener, true);
},
updateMutationEventListeners: function(monitoredWindow)
{
this._addMutationEventListeners(monitoredWindow);
},
registerMutationEventListeners: function(monitoredWindow)
{
if (!monitoredWindow || this._mutationMonitoredWindows.indexOf(monitoredWindow) !== -1)
return;
this._mutationMonitoredWindows.push(monitoredWindow);
if (InspectorController.isWindowVisible())
this._addMutationEventListeners(monitoredWindow);
},
unregisterMutationEventListeners: function(monitoredWindow)
{
if (!monitoredWindow || this._mutationMonitoredWindows.indexOf(monitoredWindow) === -1)
return;
this._mutationMonitoredWindows.remove(monitoredWindow);
this._removeMutationEventListeners(monitoredWindow);
},
unregisterAllMutationEventListeners: function()
{
for (var i = 0; i < this._mutationMonitoredWindows.length; ++i)
this._removeMutationEventListeners(this._mutationMonitoredWindows[i]);
this._mutationMonitoredWindows = [];
},
get rootDOMNode()
{
return this.treeOutline.rootDOMNode;
},
set rootDOMNode(x)
{
this.treeOutline.rootDOMNode = x;
},
get focusedDOMNode()
{
return this.treeOutline.focusedDOMNode;
},
set focusedDOMNode(x)
{
this.treeOutline.focusedDOMNode = x;
},
_contentLoaded: function(event)
{
this.recentlyModifiedNodes.push({node: event.target, parent: event.target.defaultView.frameElement, replaced: true});
if (this.visible)
this._updateModifiedNodesSoon();
},
_nodeInserted: function(event)
{
this.recentlyModifiedNodes.push({node: event.target, parent: event.relatedNode, inserted: true});
if (this.visible)
this._updateModifiedNodesSoon();
},
_nodeRemoved: function(event)
{
this.recentlyModifiedNodes.push({node: event.target, parent: event.relatedNode, removed: true});
if (this.visible)
this._updateModifiedNodesSoon();
},
_updateModifiedNodesSoon: function()
{
if ("_updateModifiedNodesTimeout" in this)
return;
this._updateModifiedNodesTimeout = setTimeout(this._updateModifiedNodes.bind(this), 0);
},
_updateModifiedNodes: function()
{
if ("_updateModifiedNodesTimeout" in this) {
clearTimeout(this._updateModifiedNodesTimeout);
delete this._updateModifiedNodesTimeout;
}
var updatedParentTreeElements = [];
var updateBreadcrumbs = false;
for (var i = 0; i < this.recentlyModifiedNodes.length; ++i) {
var replaced = this.recentlyModifiedNodes[i].replaced;
var parent = this.recentlyModifiedNodes[i].parent;
if (!parent)
continue;
var parentNodeItem = this.treeOutline.findTreeElement(parent, null, null, objectsAreSame);
if (parentNodeItem && !parentNodeItem.alreadyUpdatedChildren) {
parentNodeItem.updateChildren(replaced);
parentNodeItem.alreadyUpdatedChildren = true;
updatedParentTreeElements.push(parentNodeItem);
}
if (!updateBreadcrumbs && (objectsAreSame(this.focusedDOMNode, parent) || isAncestorIncludingParentFrames(this.focusedDOMNode, parent)))
updateBreadcrumbs = true;
}
for (var i = 0; i < updatedParentTreeElements.length; ++i)
delete updatedParentTreeElements[i].alreadyUpdatedChildren;
this.recentlyModifiedNodes = [];
if (updateBreadcrumbs)
this.updateBreadcrumb(true);
},
_stylesPaneEdited: function()
{
this.sidebarPanes.metrics.needsUpdate = true;
this.updateMetrics();
},
_metricsPaneEdited: function()
{
this.sidebarPanes.styles.needsUpdate = true;
this.updateStyles(true);
},
_mouseMovedInCrumbs: function(event)
{
var nodeUnderMouse = document.elementFromPoint(event.pageX, event.pageY);
var crumbElement = nodeUnderMouse.enclosingNodeOrSelfWithClass("crumb");
WebInspector.hoveredDOMNode = (crumbElement ? crumbElement.representedObject : null);
if ("_mouseOutOfCrumbsTimeout" in this) {
clearTimeout(this._mouseOutOfCrumbsTimeout);
delete this._mouseOutOfCrumbsTimeout;
}
},
_mouseMovedOutOfCrumbs: function(event)
{
var nodeUnderMouse = document.elementFromPoint(event.pageX, event.pageY);
if (nodeUnderMouse.isDescendant(this.crumbsElement))
return;
WebInspector.hoveredDOMNode = null;
this._mouseOutOfCrumbsTimeout = setTimeout(this.updateBreadcrumbSizes.bind(this), 1000);
},
updateBreadcrumb: function(forceUpdate)
{
if (!this.visible)
return;
var crumbs = this.crumbsElement;
var handled = false;
var foundRoot = false;
var crumb = crumbs.firstChild;
while (crumb) {
if (objectsAreSame(crumb.representedObject, this.rootDOMNode))
foundRoot = true;
if (foundRoot)
crumb.addStyleClass("dimmed");
else
crumb.removeStyleClass("dimmed");
if (objectsAreSame(crumb.representedObject, this.focusedDOMNode)) {
crumb.addStyleClass("selected");
handled = true;
} else {
crumb.removeStyleClass("selected");
}
crumb = crumb.nextSibling;
}
if (handled && !forceUpdate) {
// We don't need to rebuild the crumbs, but we need to adjust sizes
// to reflect the new focused or root node.
this.updateBreadcrumbSizes();
return;
}
crumbs.removeChildren();
var panel = this;
function selectCrumbFunction(event)
{
var crumb = event.currentTarget;
if (crumb.hasStyleClass("collapsed")) {
// Clicking a collapsed crumb will expose the hidden crumbs.
if (crumb === panel.crumbsElement.firstChild) {
// If the focused crumb is the first child, pick the farthest crumb
// that is still hidden. This allows the user to expose every crumb.
var currentCrumb = crumb;
while (currentCrumb) {
var hidden = currentCrumb.hasStyleClass("hidden");
var collapsed = currentCrumb.hasStyleClass("collapsed");
if (!hidden && !collapsed)
break;
crumb = currentCrumb;
currentCrumb = currentCrumb.nextSibling;
}
}
panel.updateBreadcrumbSizes(crumb);
} else {
// Clicking a dimmed crumb or double clicking (event.detail >= 2)
// will change the root node in addition to the focused node.
if (event.detail >= 2 || crumb.hasStyleClass("dimmed"))
panel.rootDOMNode = crumb.representedObject.parentNode;
panel.focusedDOMNode = crumb.representedObject;
}
event.preventDefault();
}
foundRoot = false;
for (var current = this.focusedDOMNode; current; current = parentNodeOrFrameElement(current)) {
if (current.nodeType === Node.DOCUMENT_NODE)
continue;
if (objectsAreSame(current, this.rootDOMNode))
foundRoot = true;
var crumb = document.createElement("span");
crumb.className = "crumb";
crumb.representedObject = current;
crumb.addEventListener("mousedown", selectCrumbFunction, false);
var crumbTitle;
switch (current.nodeType) {
case Node.ELEMENT_NODE:
crumbTitle = current.nodeName.toLowerCase();
var nameElement = document.createElement("span");
nameElement.textContent = crumbTitle;
crumb.appendChild(nameElement);
var idAttribute = current.getAttribute("id");
if (idAttribute) {
var idElement = document.createElement("span");
crumb.appendChild(idElement);
var part = "#" + idAttribute;
crumbTitle += part;
idElement.appendChild(document.createTextNode(part));
// Mark the name as extra, since the ID is more important.
nameElement.className = "extra";
}
var classAttribute = current.getAttribute("class");
if (classAttribute) {
var classes = classAttribute.split(/\s+/);
var foundClasses = {};
if (classes.length) {
var classesElement = document.createElement("span");
classesElement.className = "extra";
crumb.appendChild(classesElement);
for (var i = 0; i < classes.length; ++i) {
var className = classes[i];
if (className && !(className in foundClasses)) {
var part = "." + className;
crumbTitle += part;
classesElement.appendChild(document.createTextNode(part));
foundClasses[className] = true;
}
}
}
}
break;
case Node.TEXT_NODE:
if (isNodeWhitespace.call(current))
crumbTitle = WebInspector.UIString("(whitespace)");
else
crumbTitle = WebInspector.UIString("(text)");
break
case Node.COMMENT_NODE:
crumbTitle = "<!-->";
break;
case Node.DOCUMENT_TYPE_NODE:
crumbTitle = "<!DOCTYPE>";
break;
default:
crumbTitle = current.nodeName.toLowerCase();
}
if (!crumb.childNodes.length) {
var nameElement = document.createElement("span");
nameElement.textContent = crumbTitle;
crumb.appendChild(nameElement);
}
crumb.title = crumbTitle;
if (foundRoot)
crumb.addStyleClass("dimmed");
if (objectsAreSame(current, this.focusedDOMNode))
crumb.addStyleClass("selected");
if (!crumbs.childNodes.length)
crumb.addStyleClass("end");
crumbs.appendChild(crumb);
}
if (crumbs.hasChildNodes())
crumbs.lastChild.addStyleClass("start");
this.updateBreadcrumbSizes();
},
updateBreadcrumbSizes: function(focusedCrumb)
{
if (!this.visible)
return;
if (document.body.offsetWidth <= 0) {
// The stylesheet hasn't loaded yet or the window is closed,
// so we can't calculate what is need. Return early.
return;
}
var crumbs = this.crumbsElement;
if (!crumbs.childNodes.length || crumbs.offsetWidth <= 0)
return; // No crumbs, do nothing.
// A Zero index is the right most child crumb in the breadcrumb.
var selectedIndex = 0;
var focusedIndex = 0;
var selectedCrumb;
var i = 0;
var crumb = crumbs.firstChild;
while (crumb) {
// Find the selected crumb and index.
if (!selectedCrumb && crumb.hasStyleClass("selected")) {
selectedCrumb = crumb;
selectedIndex = i;
}
// Find the focused crumb index.
if (crumb === focusedCrumb)
focusedIndex = i;
// Remove any styles that affect size before
// deciding to shorten any crumbs.
if (crumb !== crumbs.lastChild)
crumb.removeStyleClass("start");
if (crumb !== crumbs.firstChild)
crumb.removeStyleClass("end");
crumb.removeStyleClass("compact");
crumb.removeStyleClass("collapsed");
crumb.removeStyleClass("hidden");
crumb = crumb.nextSibling;
++i;
}
// Restore the start and end crumb classes in case they got removed in coalesceCollapsedCrumbs().
// The order of the crumbs in the document is opposite of the visual order.
crumbs.firstChild.addStyleClass("end");
crumbs.lastChild.addStyleClass("start");
function crumbsAreSmallerThanContainer()
{
var rightPadding = 20;
var errorWarningElement = document.getElementById("error-warning-count");
if (!WebInspector.console.visible && errorWarningElement)
rightPadding += errorWarningElement.offsetWidth;
return ((crumbs.totalOffsetLeft + crumbs.offsetWidth + rightPadding) < window.innerWidth);
}
if (crumbsAreSmallerThanContainer())
return; // No need to compact the crumbs, they all fit at full size.
var BothSides = 0;
var AncestorSide = -1;
var ChildSide = 1;
function makeCrumbsSmaller(shrinkingFunction, direction, significantCrumb)
{
if (!significantCrumb)
significantCrumb = (focusedCrumb || selectedCrumb);
if (significantCrumb === selectedCrumb)
var significantIndex = selectedIndex;
else if (significantCrumb === focusedCrumb)
var significantIndex = focusedIndex;
else {
var significantIndex = 0;
for (var i = 0; i < crumbs.childNodes.length; ++i) {
if (crumbs.childNodes[i] === significantCrumb) {
significantIndex = i;
break;
}
}
}
function shrinkCrumbAtIndex(index)
{
var shrinkCrumb = crumbs.childNodes[index];
if (shrinkCrumb && shrinkCrumb !== significantCrumb)
shrinkingFunction(shrinkCrumb);
if (crumbsAreSmallerThanContainer())
return true; // No need to compact the crumbs more.
return false;
}
// Shrink crumbs one at a time by applying the shrinkingFunction until the crumbs
// fit in the container or we run out of crumbs to shrink.
if (direction) {
// Crumbs are shrunk on only one side (based on direction) of the signifcant crumb.
var index = (direction > 0 ? 0 : crumbs.childNodes.length - 1);
while (index !== significantIndex) {
if (shrinkCrumbAtIndex(index))
return true;
index += (direction > 0 ? 1 : -1);
}
} else {
// Crumbs are shrunk in order of descending distance from the signifcant crumb,
// with a tie going to child crumbs.
var startIndex = 0;
var endIndex = crumbs.childNodes.length - 1;
while (startIndex != significantIndex || endIndex != significantIndex) {
var startDistance = significantIndex - startIndex;
var endDistance = endIndex - significantIndex;
if (startDistance >= endDistance)
var index = startIndex++;
else
var index = endIndex--;
if (shrinkCrumbAtIndex(index))
return true;
}
}
// We are not small enough yet, return false so the caller knows.
return false;
}
function coalesceCollapsedCrumbs()
{
var crumb = crumbs.firstChild;
var collapsedRun = false;
var newStartNeeded = false;
var newEndNeeded = false;
while (crumb) {
var hidden = crumb.hasStyleClass("hidden");
if (!hidden) {
var collapsed = crumb.hasStyleClass("collapsed");
if (collapsedRun && collapsed) {
crumb.addStyleClass("hidden");
crumb.removeStyleClass("compact");
crumb.removeStyleClass("collapsed");
if (crumb.hasStyleClass("start")) {
crumb.removeStyleClass("start");
newStartNeeded = true;
}
if (crumb.hasStyleClass("end")) {
crumb.removeStyleClass("end");
newEndNeeded = true;
}
continue;
}
collapsedRun = collapsed;
if (newEndNeeded) {
newEndNeeded = false;
crumb.addStyleClass("end");
}
} else
collapsedRun = true;
crumb = crumb.nextSibling;
}
if (newStartNeeded) {
crumb = crumbs.lastChild;
while (crumb) {
if (!crumb.hasStyleClass("hidden")) {
crumb.addStyleClass("start");
break;
}
crumb = crumb.previousSibling;
}
}
}
function compact(crumb)
{
if (crumb.hasStyleClass("hidden"))
return;
crumb.addStyleClass("compact");
}
function collapse(crumb, dontCoalesce)
{
if (crumb.hasStyleClass("hidden"))
return;
crumb.addStyleClass("collapsed");
crumb.removeStyleClass("compact");
if (!dontCoalesce)
coalesceCollapsedCrumbs();
}
function compactDimmed(crumb)
{
if (crumb.hasStyleClass("dimmed"))
compact(crumb);
}
function collapseDimmed(crumb)
{
if (crumb.hasStyleClass("dimmed"))
collapse(crumb);
}
if (!focusedCrumb) {
// When not focused on a crumb we can be biased and collapse less important
// crumbs that the user might not care much about.
// Compact child crumbs.
if (makeCrumbsSmaller(compact, ChildSide))
return;
// Collapse child crumbs.
if (makeCrumbsSmaller(collapse, ChildSide))
return;
// Compact dimmed ancestor crumbs.
if (makeCrumbsSmaller(compactDimmed, AncestorSide))
return;
// Collapse dimmed ancestor crumbs.
if (makeCrumbsSmaller(collapseDimmed, AncestorSide))
return;
}
// Compact ancestor crumbs, or from both sides if focused.
if (makeCrumbsSmaller(compact, (focusedCrumb ? BothSides : AncestorSide)))
return;
// Collapse ancestor crumbs, or from both sides if focused.
if (makeCrumbsSmaller(collapse, (focusedCrumb ? BothSides : AncestorSide)))
return;
if (!selectedCrumb)
return;
// Compact the selected crumb.
compact(selectedCrumb);
if (crumbsAreSmallerThanContainer())
return;
// Collapse the selected crumb as a last resort. Pass true to prevent coalescing.
collapse(selectedCrumb, true);
},
updateStyles: function(forceUpdate)
{
var stylesSidebarPane = this.sidebarPanes.styles;
if (!stylesSidebarPane.expanded || !stylesSidebarPane.needsUpdate)
return;
stylesSidebarPane.update(this.focusedDOMNode, null, forceUpdate);
stylesSidebarPane.needsUpdate = false;
},
updateMetrics: function()
{
var metricsSidebarPane = this.sidebarPanes.metrics;
if (!metricsSidebarPane.expanded || !metricsSidebarPane.needsUpdate)
return;
metricsSidebarPane.update(this.focusedDOMNode);
metricsSidebarPane.needsUpdate = false;
},
updateProperties: function()
{
var propertiesSidebarPane = this.sidebarPanes.properties;
if (!propertiesSidebarPane.expanded || !propertiesSidebarPane.needsUpdate)
return;
propertiesSidebarPane.update(this.focusedDOMNode);
propertiesSidebarPane.needsUpdate = false;
},
handleKeyEvent: function(event)
{
this.treeOutline.handleKeyEvent(event);
},
handleCopyEvent: function(event)
{
// Don't prevent the normal copy if the user has a selection.
if (!window.getSelection().isCollapsed)
return;
switch (this.focusedDOMNode.nodeType) {
case Node.ELEMENT_NODE:
var data = this.focusedDOMNode.outerHTML;
break;
case Node.COMMENT_NODE:
var data = "<!--" + this.focusedDOMNode.nodeValue + "-->";
break;
default:
case Node.TEXT_NODE:
var data = this.focusedDOMNode.nodeValue;
}
event.clipboardData.clearData();
event.preventDefault();
if (data)
event.clipboardData.setData("text/plain", data);
},
rightSidebarResizerDragStart: function(event)
{
WebInspector.elementDragStart(this.sidebarElement, this.rightSidebarResizerDrag.bind(this), this.rightSidebarResizerDragEnd.bind(this), event, "col-resize");
},
rightSidebarResizerDragEnd: function(event)
{
WebInspector.elementDragEnd(event);
},
rightSidebarResizerDrag: function(event)
{
var x = event.pageX;
var newWidth = Number.constrain(window.innerWidth - x, Preferences.minElementsSidebarWidth, window.innerWidth * 0.66);
this.sidebarElement.style.width = newWidth + "px";
this.contentElement.style.right = newWidth + "px";
this.sidebarResizeElement.style.right = (newWidth - 3) + "px";
this.treeOutline.updateSelection();
event.preventDefault();
},
_nodeSearchButtonClicked: function(event)
{
InspectorController.toggleNodeSearch();
if (InspectorController.searchingForNode())
this.nodeSearchButton.addStyleClass("toggled-on");
else
this.nodeSearchButton.removeStyleClass("toggled-on");
}
}
WebInspector.ElementsPanel.prototype.__proto__ = WebInspector.Panel.prototype;
| RLovelett/qt | src/3rdparty/webkit/WebCore/inspector/front-end/ElementsPanel.js | JavaScript | lgpl-2.1 | 45,561 |
/*
** 2014 December 16
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
*/
package info.ata4.disunity.cli.command;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.Parameters;
import info.ata4.disunity.cli.converters.PathConverter;
import info.ata4.io.util.PathUtils;
import info.ata4.unity.assetbundle.AssetBundleUtils;
import java.io.IOException;
import java.nio.file.Path;
/**
*
* @author Nico Bergemann <barracuda415 at yahoo.de>
*/
@Parameters(
commandNames = "bundle-build",
commandDescription = "Builds an asset bundle from a .json property file."
)
public class BundleBuildCommand extends SingleFileCommand {
@Parameter(
names = {"-o", "--output"},
description = "Asset bundle output file",
converter = PathConverter.class
)
private Path outFile;
@Override
public void handleFile(Path file) throws IOException {
if (outFile == null) {
String fileName = PathUtils.getBaseName(file);
outFile = file.getParent().resolve(fileName + ".unity3d");
}
AssetBundleUtils.build(file, outFile);
}
}
| catinred2/disunity | disunity-cli/src/main/java/info/ata4/disunity/cli/command/BundleBuildCommand.java | Java | unlicense | 1,357 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.slm;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
public class SnapshotLifecycleStats implements ToXContentObject {
private final long retentionRunCount;
private final long retentionFailedCount;
private final long retentionTimedOut;
private final long retentionTimeMs;
private final Map<String, SnapshotPolicyStats> policyStats;
public static final ParseField RETENTION_RUNS = new ParseField("retention_runs");
public static final ParseField RETENTION_FAILED = new ParseField("retention_failed");
public static final ParseField RETENTION_TIMED_OUT = new ParseField("retention_timed_out");
public static final ParseField RETENTION_TIME = new ParseField("retention_deletion_time");
public static final ParseField RETENTION_TIME_MILLIS = new ParseField("retention_deletion_time_millis");
public static final ParseField POLICY_STATS = new ParseField("policy_stats");
public static final ParseField TOTAL_TAKEN = new ParseField("total_snapshots_taken");
public static final ParseField TOTAL_FAILED = new ParseField("total_snapshots_failed");
public static final ParseField TOTAL_DELETIONS = new ParseField("total_snapshots_deleted");
public static final ParseField TOTAL_DELETION_FAILURES = new ParseField("total_snapshot_deletion_failures");
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<SnapshotLifecycleStats, Void> PARSER =
new ConstructingObjectParser<>("snapshot_policy_stats", true,
a -> {
long runs = (long) a[0];
long failed = (long) a[1];
long timedOut = (long) a[2];
long timeMs = (long) a[3];
Map<String, SnapshotPolicyStats> policyStatsMap = ((List<SnapshotPolicyStats>) a[4]).stream()
.collect(Collectors.toMap(m -> m.policyId, Function.identity()));
return new SnapshotLifecycleStats(runs, failed, timedOut, timeMs, policyStatsMap);
});
static {
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_RUNS);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_FAILED);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIMED_OUT);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), RETENTION_TIME_MILLIS);
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshotPolicyStats.PARSER, POLICY_STATS);
}
// Package visible for testing
private SnapshotLifecycleStats(long retentionRuns, long retentionFailed, long retentionTimedOut, long retentionTimeMs,
Map<String, SnapshotPolicyStats> policyStats) {
this.retentionRunCount = retentionRuns;
this.retentionFailedCount = retentionFailed;
this.retentionTimedOut = retentionTimedOut;
this.retentionTimeMs = retentionTimeMs;
this.policyStats = policyStats;
}
public static SnapshotLifecycleStats parse(XContentParser parser) {
return PARSER.apply(parser, null);
}
public long getRetentionRunCount() {
return retentionRunCount;
}
public long getRetentionFailedCount() {
return retentionFailedCount;
}
public long getRetentionTimedOut() {
return retentionTimedOut;
}
public long getRetentionTimeMillis() {
return retentionTimeMs;
}
/**
* @return a map of per-policy stats for each SLM policy
*/
public Map<String, SnapshotPolicyStats> getMetrics() {
return Collections.unmodifiableMap(this.policyStats);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(RETENTION_RUNS.getPreferredName(), this.retentionRunCount);
builder.field(RETENTION_FAILED.getPreferredName(), this.retentionFailedCount);
builder.field(RETENTION_TIMED_OUT.getPreferredName(), this.retentionTimedOut);
TimeValue retentionTime = TimeValue.timeValueMillis(this.retentionTimeMs);
builder.field(RETENTION_TIME.getPreferredName(), retentionTime);
builder.field(RETENTION_TIME_MILLIS.getPreferredName(), retentionTime.millis());
Map<String, SnapshotPolicyStats> metrics = getMetrics();
long totalTaken = metrics.values().stream().mapToLong(s -> s.snapshotsTaken).sum();
long totalFailed = metrics.values().stream().mapToLong(s -> s.snapshotsFailed).sum();
long totalDeleted = metrics.values().stream().mapToLong(s -> s.snapshotsDeleted).sum();
long totalDeleteFailures = metrics.values().stream().mapToLong(s -> s.snapshotDeleteFailures).sum();
builder.field(TOTAL_TAKEN.getPreferredName(), totalTaken);
builder.field(TOTAL_FAILED.getPreferredName(), totalFailed);
builder.field(TOTAL_DELETIONS.getPreferredName(), totalDeleted);
builder.field(TOTAL_DELETION_FAILURES.getPreferredName(), totalDeleteFailures);
builder.startObject(POLICY_STATS.getPreferredName());
for (Map.Entry<String, SnapshotPolicyStats> policy : metrics.entrySet()) {
SnapshotPolicyStats perPolicyMetrics = policy.getValue();
builder.startObject(perPolicyMetrics.policyId);
perPolicyMetrics.toXContent(builder, params);
builder.endObject();
}
builder.endObject();
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(retentionRunCount, retentionFailedCount, retentionTimedOut, retentionTimeMs, policyStats);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
SnapshotLifecycleStats other = (SnapshotLifecycleStats) obj;
return retentionRunCount == other.retentionRunCount &&
retentionFailedCount == other.retentionFailedCount &&
retentionTimedOut == other.retentionTimedOut &&
retentionTimeMs == other.retentionTimeMs &&
Objects.equals(policyStats, other.policyStats);
}
@Override
public String toString() {
return Strings.toString(this);
}
public static class SnapshotPolicyStats implements ToXContentFragment {
private final String policyId;
private final long snapshotsTaken;
private final long snapshotsFailed;
private final long snapshotsDeleted;
private final long snapshotDeleteFailures;
public static final ParseField POLICY_ID = new ParseField("policy");
static final ParseField SNAPSHOTS_TAKEN = new ParseField("snapshots_taken");
static final ParseField SNAPSHOTS_FAILED = new ParseField("snapshots_failed");
static final ParseField SNAPSHOTS_DELETED = new ParseField("snapshots_deleted");
static final ParseField SNAPSHOT_DELETION_FAILURES = new ParseField("snapshot_deletion_failures");
private static final ConstructingObjectParser<SnapshotPolicyStats, Void> PARSER =
new ConstructingObjectParser<>("snapshot_policy_stats", true,
a -> {
String id = (String) a[0];
long taken = (long) a[1];
long failed = (long) a[2];
long deleted = (long) a[3];
long deleteFailed = (long) a[4];
return new SnapshotPolicyStats(id, taken, failed, deleted, deleteFailed);
});
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), POLICY_ID);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_TAKEN);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_FAILED);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOTS_DELETED);
PARSER.declareLong(ConstructingObjectParser.constructorArg(), SNAPSHOT_DELETION_FAILURES);
}
public SnapshotPolicyStats(String policyId, long snapshotsTaken, long snapshotsFailed, long deleted, long failedDeletes) {
this.policyId = policyId;
this.snapshotsTaken = snapshotsTaken;
this.snapshotsFailed = snapshotsFailed;
this.snapshotsDeleted = deleted;
this.snapshotDeleteFailures = failedDeletes;
}
public static SnapshotPolicyStats parse(XContentParser parser, String policyId) {
return PARSER.apply(parser, null);
}
public String getPolicyId() {
return policyId;
}
public long getSnapshotsTaken() {
return snapshotsTaken;
}
public long getSnapshotsFailed() {
return snapshotsFailed;
}
public long getSnapshotsDeleted() {
return snapshotsDeleted;
}
public long getSnapshotDeleteFailures() {
return snapshotDeleteFailures;
}
@Override
public int hashCode() {
return Objects.hash(policyId, snapshotsTaken, snapshotsFailed, snapshotsDeleted, snapshotDeleteFailures);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
SnapshotPolicyStats other = (SnapshotPolicyStats) obj;
return Objects.equals(policyId, other.policyId) &&
snapshotsTaken == other.snapshotsTaken &&
snapshotsFailed == other.snapshotsFailed &&
snapshotsDeleted == other.snapshotsDeleted &&
snapshotDeleteFailures == other.snapshotDeleteFailures;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(SnapshotPolicyStats.SNAPSHOTS_TAKEN.getPreferredName(), snapshotsTaken);
builder.field(SnapshotPolicyStats.SNAPSHOTS_FAILED.getPreferredName(), snapshotsFailed);
builder.field(SnapshotPolicyStats.SNAPSHOTS_DELETED.getPreferredName(), snapshotsDeleted);
builder.field(SnapshotPolicyStats.SNAPSHOT_DELETION_FAILURES.getPreferredName(), snapshotDeleteFailures);
return builder;
}
}
}
| gingerwizard/elasticsearch | client/rest-high-level/src/main/java/org/elasticsearch/client/slm/SnapshotLifecycleStats.java | Java | apache-2.0 | 11,928 |
package org.osmdroid.samplefragments;
public final class SampleFactory {
private final BaseSampleFragment[] mSamples;
private static SampleFactory _instance;
public static SampleFactory getInstance() {
if (_instance == null) {
_instance = new SampleFactory();
}
return _instance;
}
private SampleFactory() {
mSamples = new BaseSampleFragment[] { new SampleWithMinimapItemizedOverlayWithFocus(),
new SampleLimitedScrollArea(), new SampleFragmentXmlLayout(), new SampleOsmPath() };
}
public BaseSampleFragment getSample(int index) {
return mSamples[index];
}
public int count() {
return mSamples.length;
}
}
| DT9/osmdroid | OpenStreetMapViewer/src/org/osmdroid/samplefragments/SampleFactory.java | Java | apache-2.0 | 642 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools;
import java.io.BufferedReader;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.Stack;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Reader;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.InvalidInputException;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
/**
* A Map-reduce program to recursively copy directories between
* different file-systems.
*/
@Deprecated
public class DistCpV1 implements Tool {
public static final Log LOG = LogFactory.getLog(DistCpV1.class);
private static final String NAME = "distcp";
private static final String usage = NAME
+ " [OPTIONS] <srcurl>* <desturl>" +
"\n\nOPTIONS:" +
"\n-p[rbugpt] Preserve status" +
"\n r: replication number" +
"\n b: block size" +
"\n u: user" +
"\n g: group" +
"\n p: permission" +
"\n t: modification and access times" +
"\n -p alone is equivalent to -prbugpt" +
"\n-i Ignore failures" +
"\n-basedir <basedir> Use <basedir> as the base directory when copying files from <srcurl>" +
"\n-log <logdir> Write logs to <logdir>" +
"\n-m <num_maps> Maximum number of simultaneous copies" +
"\n-overwrite Overwrite destination" +
"\n-update Overwrite if src size different from dst size" +
"\n-skipcrccheck Do not use CRC check to determine if src is " +
"\n different from dest. Relevant only if -update" +
"\n is specified" +
"\n-f <urilist_uri> Use list at <urilist_uri> as src list" +
"\n-filelimit <n> Limit the total number of files to be <= n" +
"\n-sizelimit <n> Limit the total size to be <= n bytes" +
"\n-delete Delete the files existing in the dst but not in src" +
"\n-dryrun Display count of files and total size of files" +
"\n in src and then exit. Copy is not done at all." +
"\n desturl should not be speicified with out -update." +
"\n-mapredSslConf <f> Filename of SSL configuration for mapper task" +
"\n\nNOTE 1: if -overwrite or -update are set, each source URI is " +
"\n interpreted as an isomorphic update to an existing directory." +
"\nFor example:" +
"\nhadoop " + NAME + " -p -update \"hdfs://A:8020/user/foo/bar\" " +
"\"hdfs://B:8020/user/foo/baz\"\n" +
"\n would update all descendants of 'baz' also in 'bar'; it would " +
"\n *not* update /user/foo/baz/bar" +
"\n\nNOTE 2: The parameter <n> in -filelimit and -sizelimit can be " +
"\n specified with symbolic representation. For examples," +
"\n 1230k = 1230 * 1024 = 1259520" +
"\n 891g = 891 * 1024^3 = 956703965184" +
"\n";
private static final long BYTES_PER_MAP = 256 * 1024 * 1024;
private static final int MAX_MAPS_PER_NODE = 20;
private static final int SYNC_FILE_MAX = 10;
private static final int DEFAULT_FILE_RETRIES = 3;
static enum Counter { COPY, SKIP, FAIL, BYTESCOPIED, BYTESEXPECTED }
static enum Options {
DELETE("-delete", NAME + ".delete"),
FILE_LIMIT("-filelimit", NAME + ".limit.file"),
SIZE_LIMIT("-sizelimit", NAME + ".limit.size"),
IGNORE_READ_FAILURES("-i", NAME + ".ignore.read.failures"),
PRESERVE_STATUS("-p", NAME + ".preserve.status"),
OVERWRITE("-overwrite", NAME + ".overwrite.always"),
UPDATE("-update", NAME + ".overwrite.ifnewer"),
SKIPCRC("-skipcrccheck", NAME + ".skip.crc.check");
final String cmd, propertyname;
private Options(String cmd, String propertyname) {
this.cmd = cmd;
this.propertyname = propertyname;
}
private long parseLong(String[] args, int offset) {
if (offset == args.length) {
throw new IllegalArgumentException("<n> not specified in " + cmd);
}
long n = StringUtils.TraditionalBinaryPrefix.string2long(args[offset]);
if (n <= 0) {
throw new IllegalArgumentException("n = " + n + " <= 0 in " + cmd);
}
return n;
}
}
static enum FileAttribute {
BLOCK_SIZE, REPLICATION, USER, GROUP, PERMISSION, TIMES;
final char symbol;
private FileAttribute() {
symbol = StringUtils.toLowerCase(toString()).charAt(0);
}
static EnumSet<FileAttribute> parse(String s) {
if (s == null || s.length() == 0) {
return EnumSet.allOf(FileAttribute.class);
}
EnumSet<FileAttribute> set = EnumSet.noneOf(FileAttribute.class);
FileAttribute[] attributes = values();
for(char c : s.toCharArray()) {
int i = 0;
for(; i < attributes.length && c != attributes[i].symbol; i++);
if (i < attributes.length) {
if (!set.contains(attributes[i])) {
set.add(attributes[i]);
} else {
throw new IllegalArgumentException("There are more than one '"
+ attributes[i].symbol + "' in " + s);
}
} else {
throw new IllegalArgumentException("'" + c + "' in " + s
+ " is undefined.");
}
}
return set;
}
}
static final String TMP_DIR_LABEL = NAME + ".tmp.dir";
static final String DST_DIR_LABEL = NAME + ".dest.path";
static final String JOB_DIR_LABEL = NAME + ".job.dir";
static final String MAX_MAPS_LABEL = NAME + ".max.map.tasks";
static final String SRC_LIST_LABEL = NAME + ".src.list";
static final String SRC_COUNT_LABEL = NAME + ".src.count";
static final String TOTAL_SIZE_LABEL = NAME + ".total.size";
static final String DST_DIR_LIST_LABEL = NAME + ".dst.dir.list";
static final String BYTES_PER_MAP_LABEL = NAME + ".bytes.per.map";
static final String PRESERVE_STATUS_LABEL
= Options.PRESERVE_STATUS.propertyname + ".value";
static final String FILE_RETRIES_LABEL = NAME + ".file.retries";
private JobConf conf;
public void setConf(Configuration conf) {
if (conf instanceof JobConf) {
this.conf = (JobConf) conf;
} else {
this.conf = new JobConf(conf);
}
}
public Configuration getConf() {
return conf;
}
public DistCpV1(Configuration conf) {
setConf(conf);
}
/**
* An input/output pair of filenames.
*/
static class FilePair implements Writable {
FileStatus input = new FileStatus();
String output;
FilePair() { }
FilePair(FileStatus input, String output) {
this.input = input;
this.output = output;
}
public void readFields(DataInput in) throws IOException {
input.readFields(in);
output = Text.readString(in);
}
public void write(DataOutput out) throws IOException {
input.write(out);
Text.writeString(out, output);
}
public String toString() {
return input + " : " + output;
}
}
/**
* InputFormat of a distcp job responsible for generating splits of the src
* file list.
*/
static class CopyInputFormat implements InputFormat<Text, Text> {
/**
* Produce splits such that each is no greater than the quotient of the
* total size and the number of splits requested.
* @param job The handle to the JobConf object
* @param numSplits Number of splits requested
*/
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
int cnfiles = job.getInt(SRC_COUNT_LABEL, -1);
long cbsize = job.getLong(TOTAL_SIZE_LABEL, -1);
String srcfilelist = job.get(SRC_LIST_LABEL, "");
if (cnfiles < 0 || cbsize < 0 || "".equals(srcfilelist)) {
throw new RuntimeException("Invalid metadata: #files(" + cnfiles +
") total_size(" + cbsize + ") listuri(" +
srcfilelist + ")");
}
Path src = new Path(srcfilelist);
FileSystem fs = src.getFileSystem(job);
FileStatus srcst = fs.getFileStatus(src);
ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
LongWritable key = new LongWritable();
FilePair value = new FilePair();
final long targetsize = cbsize / numSplits;
long pos = 0L;
long last = 0L;
long acc = 0L;
long cbrem = srcst.getLen();
try (SequenceFile.Reader sl =
new SequenceFile.Reader(job, Reader.file(src))) {
for (; sl.next(key, value); last = sl.getPosition()) {
// if adding this split would put this split past the target size,
// cut the last split and put this next file in the next split.
if (acc + key.get() > targetsize && acc != 0) {
long splitsize = last - pos;
splits.add(new FileSplit(src, pos, splitsize, (String[])null));
cbrem -= splitsize;
pos = last;
acc = 0L;
}
acc += key.get();
}
}
if (cbrem != 0) {
splits.add(new FileSplit(src, pos, cbrem, (String[])null));
}
return splits.toArray(new FileSplit[splits.size()]);
}
/**
* Returns a reader for this split of the src file list.
*/
public RecordReader<Text, Text> getRecordReader(InputSplit split,
JobConf job, Reporter reporter) throws IOException {
return new SequenceFileRecordReader<Text, Text>(job, (FileSplit)split);
}
}
/**
* FSCopyFilesMapper: The mapper for copying files between FileSystems.
*/
static class CopyFilesMapper
implements Mapper<LongWritable, FilePair, WritableComparable<?>, Text> {
// config
private int sizeBuf = 128 * 1024;
private FileSystem destFileSys = null;
private boolean ignoreReadFailures;
private boolean preserve_status;
private EnumSet<FileAttribute> preseved;
private boolean overwrite;
private boolean update;
private Path destPath = null;
private byte[] buffer = null;
private JobConf job;
private boolean skipCRCCheck = false;
// stats
private int failcount = 0;
private int skipcount = 0;
private int copycount = 0;
private String getCountString() {
return "Copied: " + copycount + " Skipped: " + skipcount
+ " Failed: " + failcount;
}
private void updateStatus(Reporter reporter) {
reporter.setStatus(getCountString());
}
/**
* Return true if dst should be replaced by src and the update flag is set.
* Right now, this merely checks that the src and dst len are not equal.
* This should be improved on once modification times, CRCs, etc. can
* be meaningful in this context.
* @throws IOException
*/
private boolean needsUpdate(FileStatus srcstatus,
FileSystem dstfs, Path dstpath) throws IOException {
return update && !sameFile(srcstatus.getPath().getFileSystem(job),
srcstatus, dstfs, dstpath, skipCRCCheck);
}
private FSDataOutputStream create(Path f, Reporter reporter,
FileStatus srcstat) throws IOException {
if (destFileSys.exists(f)) {
destFileSys.delete(f, false);
}
if (!preserve_status) {
return destFileSys.create(f, true, sizeBuf, reporter);
}
FsPermission permission = preseved.contains(FileAttribute.PERMISSION)?
srcstat.getPermission(): null;
short replication = preseved.contains(FileAttribute.REPLICATION)?
srcstat.getReplication(): destFileSys.getDefaultReplication(f);
long blockSize = preseved.contains(FileAttribute.BLOCK_SIZE)?
srcstat.getBlockSize(): destFileSys.getDefaultBlockSize(f);
return destFileSys.create(f, permission, true, sizeBuf, replication,
blockSize, reporter);
}
/**
* Validates copy by checking the sizes of files first and then
* checksums, if the filesystems support checksums.
* @param srcstat src path and metadata
* @param absdst dst path
* @return true if src & destination files are same
*/
private boolean validateCopy(FileStatus srcstat, Path absdst)
throws IOException {
if (destFileSys.exists(absdst)) {
if (sameFile(srcstat.getPath().getFileSystem(job), srcstat,
destFileSys, absdst, skipCRCCheck)) {
return true;
}
}
return false;
}
/**
* Increment number of files copied and bytes copied and then report status
*/
void updateCopyStatus(FileStatus srcstat, Reporter reporter) {
copycount++;
reporter.incrCounter(Counter.BYTESCOPIED, srcstat.getLen());
reporter.incrCounter(Counter.COPY, 1);
updateStatus(reporter);
}
/**
* Skip copying this file if already exists at the destination.
* Updates counters and copy status if skipping this file.
* @return true if copy of this file can be skipped
*/
private boolean skipCopyFile(FileStatus srcstat, Path absdst,
OutputCollector<WritableComparable<?>, Text> outc,
Reporter reporter) throws IOException {
if (destFileSys.exists(absdst) && !overwrite
&& !needsUpdate(srcstat, destFileSys, absdst)) {
outc.collect(null, new Text("SKIP: " + srcstat.getPath()));
++skipcount;
reporter.incrCounter(Counter.SKIP, 1);
updateStatus(reporter);
return true;
}
return false;
}
/**
* Copies single file to the path specified by tmpfile.
* @param srcstat src path and metadata
* @param tmpfile temporary file to which copy is to be done
* @param absdst actual destination path to which copy is to be done
* @param reporter
* @return Number of bytes copied
*/
private long doCopyFile(FileStatus srcstat, Path tmpfile, Path absdst,
Reporter reporter) throws IOException {
long bytesCopied = 0L;
Path srcPath = srcstat.getPath();
// open src file
try (FSDataInputStream in = srcPath.getFileSystem(job).open(srcPath)) {
reporter.incrCounter(Counter.BYTESEXPECTED, srcstat.getLen());
// open tmp file
try (FSDataOutputStream out = create(tmpfile, reporter, srcstat)) {
LOG.info("Copying file " + srcPath + " of size " +
srcstat.getLen() + " bytes...");
// copy file
for(int bytesRead; (bytesRead = in.read(buffer)) >= 0; ) {
out.write(buffer, 0, bytesRead);
bytesCopied += bytesRead;
reporter.setStatus(
String.format("%.2f ", bytesCopied*100.0/srcstat.getLen())
+ absdst + " [ " +
TraditionalBinaryPrefix.long2String(bytesCopied, "", 1) + " / "
+ TraditionalBinaryPrefix.long2String(srcstat.getLen(), "", 1)
+ " ]");
}
}
}
return bytesCopied;
}
/**
* Copy a file to a destination.
* @param srcstat src path and metadata
* @param relativedst relative dst path
* @param outc Log of skipped files
* @param reporter
* @throws IOException if copy fails(even if the validation of copy fails)
*/
private void copy(FileStatus srcstat, Path relativedst,
OutputCollector<WritableComparable<?>, Text> outc, Reporter reporter)
throws IOException {
Path absdst = new Path(destPath, relativedst);
int totfiles = job.getInt(SRC_COUNT_LABEL, -1);
assert totfiles >= 0 : "Invalid file count " + totfiles;
if (totfiles == 1) {
// Copying a single file; use dst path provided by user as
// destination file rather than destination directory
Path dstparent = absdst.getParent();
if (!(destFileSys.exists(dstparent) &&
destFileSys.getFileStatus(dstparent).isDirectory())) {
absdst = dstparent;
}
}
// if a directory, ensure created even if empty
if (srcstat.isDirectory()) {
if (destFileSys.exists(absdst)) {
if (destFileSys.getFileStatus(absdst).isFile()) {
throw new IOException("Failed to mkdirs: " + absdst+" is a file.");
}
}
else if (!destFileSys.mkdirs(absdst)) {
throw new IOException("Failed to mkdirs " + absdst);
}
// TODO: when modification times can be set, directories should be
// emitted to reducers so they might be preserved. Also, mkdirs does
// not currently return an error when the directory already exists;
// if this changes, all directory work might as well be done in reduce
return;
}
// Can we skip copying this file ?
if (skipCopyFile(srcstat, absdst, outc, reporter)) {
return;
}
Path tmpfile = new Path(job.get(TMP_DIR_LABEL), relativedst);
// do the actual copy to tmpfile
long bytesCopied = doCopyFile(srcstat, tmpfile, absdst, reporter);
if (bytesCopied != srcstat.getLen()) {
throw new IOException("File size not matched: copied "
+ bytesString(bytesCopied) + " to tmpfile (=" + tmpfile
+ ") but expected " + bytesString(srcstat.getLen())
+ " from " + srcstat.getPath());
}
else {
if (destFileSys.exists(absdst) &&
destFileSys.getFileStatus(absdst).isDirectory()) {
throw new IOException(absdst + " is a directory");
}
if (!destFileSys.mkdirs(absdst.getParent())) {
throw new IOException("Failed to create parent dir: " + absdst.getParent());
}
rename(tmpfile, absdst);
if (!validateCopy(srcstat, absdst)) {
destFileSys.delete(absdst, false);
throw new IOException("Validation of copy of file "
+ srcstat.getPath() + " failed.");
}
updateDestStatus(srcstat, destFileSys.getFileStatus(absdst));
}
// report at least once for each file
updateCopyStatus(srcstat, reporter);
}
/** rename tmp to dst, delete dst if already exists */
private void rename(Path tmp, Path dst) throws IOException {
try {
if (destFileSys.exists(dst)) {
destFileSys.delete(dst, true);
}
if (!destFileSys.rename(tmp, dst)) {
throw new IOException();
}
}
catch(IOException cause) {
throw (IOException)new IOException("Fail to rename tmp file (=" + tmp
+ ") to destination file (=" + dst + ")").initCause(cause);
}
}
private void updateDestStatus(FileStatus src, FileStatus dst
) throws IOException {
if (preserve_status) {
DistCpV1.updateDestStatus(src, dst, preseved, destFileSys);
}
}
static String bytesString(long b) {
return b + " bytes (" +
TraditionalBinaryPrefix.long2String(b, "", 1) + ")";
}
/**
* Copies a file and validates the copy by checking the checksums.
* If validation fails, retries (max number of tries is distcp.file.retries)
* to copy the file.
*/
void copyWithRetries(FileStatus srcstat, Path relativedst,
OutputCollector<WritableComparable<?>, Text> out,
Reporter reporter) throws IOException {
// max tries to copy when validation of copy fails
final int maxRetries = job.getInt(FILE_RETRIES_LABEL, DEFAULT_FILE_RETRIES);
// save update flag for later copies within the same map task
final boolean saveUpdate = update;
int retryCnt = 1;
for (; retryCnt <= maxRetries; retryCnt++) {
try {
//copy the file and validate copy
copy(srcstat, relativedst, out, reporter);
break;// copy successful
} catch (IOException e) {
LOG.warn("Copy of " + srcstat.getPath() + " failed.", e);
if (retryCnt < maxRetries) {// copy failed and need to retry
LOG.info("Retrying copy of file " + srcstat.getPath());
update = true; // set update flag for retries
}
else {// no more retries... Give up
update = saveUpdate;
throw new IOException("Copy of file failed even with " + retryCnt
+ " tries.", e);
}
}
}
}
/** Mapper configuration.
* Extracts source and destination file system, as well as
* top-level paths on source and destination directories.
* Gets the named file systems, to be used later in map.
*/
public void configure(JobConf job)
{
destPath = new Path(job.get(DST_DIR_LABEL, "/"));
try {
destFileSys = destPath.getFileSystem(job);
} catch (IOException ex) {
throw new RuntimeException("Unable to get the named file system.", ex);
}
sizeBuf = job.getInt("copy.buf.size", 128 * 1024);
buffer = new byte[sizeBuf];
ignoreReadFailures = job.getBoolean(Options.IGNORE_READ_FAILURES.propertyname, false);
preserve_status = job.getBoolean(Options.PRESERVE_STATUS.propertyname, false);
if (preserve_status) {
preseved = FileAttribute.parse(job.get(PRESERVE_STATUS_LABEL));
}
update = job.getBoolean(Options.UPDATE.propertyname, false);
overwrite = !update && job.getBoolean(Options.OVERWRITE.propertyname, false);
skipCRCCheck = job.getBoolean(Options.SKIPCRC.propertyname, false);
this.job = job;
}
/** Map method. Copies one file from source file system to destination.
* @param key src len
* @param value FilePair (FileStatus src, Path dst)
* @param out Log of failed copies
* @param reporter
*/
public void map(LongWritable key,
FilePair value,
OutputCollector<WritableComparable<?>, Text> out,
Reporter reporter) throws IOException {
final FileStatus srcstat = value.input;
final Path relativedst = new Path(value.output);
try {
copyWithRetries(srcstat, relativedst, out, reporter);
} catch (IOException e) {
++failcount;
reporter.incrCounter(Counter.FAIL, 1);
updateStatus(reporter);
final String sfailure = "FAIL " + relativedst + " : " +
StringUtils.stringifyException(e);
out.collect(null, new Text(sfailure));
LOG.info(sfailure);
if (e instanceof FileNotFoundException) {
final String s = "Possible Cause for failure: Either the filesystem "
+ srcstat.getPath().getFileSystem(job)
+ " is not accessible or the file is deleted";
LOG.error(s);
out.collect(null, new Text(s));
}
try {
for (int i = 0; i < 3; ++i) {
try {
final Path tmp = new Path(job.get(TMP_DIR_LABEL), relativedst);
if (destFileSys.delete(tmp, true))
break;
} catch (Throwable ex) {
// ignore, we are just cleaning up
LOG.debug("Ignoring cleanup exception", ex);
}
// update status, so we don't get timed out
updateStatus(reporter);
Thread.sleep(3 * 1000);
}
} catch (InterruptedException inte) {
throw (IOException)new IOException().initCause(inte);
}
} finally {
updateStatus(reporter);
}
}
public void close() throws IOException {
if (0 == failcount || ignoreReadFailures) {
return;
}
throw new IOException(getCountString());
}
}
private static List<Path> fetchFileList(Configuration conf, Path srcList)
throws IOException {
List<Path> result = new ArrayList<Path>();
FileSystem fs = srcList.getFileSystem(conf);
try (BufferedReader input = new BufferedReader(new InputStreamReader(fs.open(srcList),
Charset.forName("UTF-8")))) {
String line = input.readLine();
while (line != null) {
result.add(new Path(line));
line = input.readLine();
}
}
return result;
}
@Deprecated
public static void copy(Configuration conf, String srcPath,
String destPath, Path logPath,
boolean srcAsList, boolean ignoreReadFailures)
throws IOException {
final Path src = new Path(srcPath);
List<Path> tmp = new ArrayList<Path>();
if (srcAsList) {
tmp.addAll(fetchFileList(conf, src));
} else {
tmp.add(src);
}
EnumSet<Options> flags = ignoreReadFailures
? EnumSet.of(Options.IGNORE_READ_FAILURES)
: EnumSet.noneOf(Options.class);
final Path dst = new Path(destPath);
copy(conf, new Arguments(tmp, null, dst, logPath, flags, null,
Long.MAX_VALUE, Long.MAX_VALUE, null, false));
}
/** Sanity check for srcPath */
private static void checkSrcPath(JobConf jobConf, List<Path> srcPaths)
throws IOException {
List<IOException> rslt = new ArrayList<IOException>();
List<Path> unglobbed = new LinkedList<Path>();
Path[] ps = new Path[srcPaths.size()];
ps = srcPaths.toArray(ps);
TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(), ps, jobConf);
for (Path p : srcPaths) {
FileSystem fs = p.getFileSystem(jobConf);
FileStatus[] inputs = fs.globStatus(p);
if(inputs != null && inputs.length > 0) {
for (FileStatus onePath: inputs) {
unglobbed.add(onePath.getPath());
}
} else {
rslt.add(new IOException("Input source " + p + " does not exist."));
}
}
if (!rslt.isEmpty()) {
throw new InvalidInputException(rslt);
}
srcPaths.clear();
srcPaths.addAll(unglobbed);
}
/**
* Driver to copy srcPath to destPath depending on required protocol.
* @param conf configuration
* @param args arguments
*/
static void copy(final Configuration conf, final Arguments args
) throws IOException {
LOG.info("srcPaths=" + args.srcs);
if (!args.dryrun || args.flags.contains(Options.UPDATE)) {
LOG.info("destPath=" + args.dst);
}
JobConf job = createJobConf(conf);
checkSrcPath(job, args.srcs);
if (args.preservedAttributes != null) {
job.set(PRESERVE_STATUS_LABEL, args.preservedAttributes);
}
if (args.mapredSslConf != null) {
job.set("dfs.https.client.keystore.resource", args.mapredSslConf);
}
//Initialize the mapper
try {
if (setup(conf, job, args)) {
JobClient.runJob(job);
}
if(!args.dryrun) {
finalize(conf, job, args.dst, args.preservedAttributes);
}
} finally {
if (!args.dryrun) {
//delete tmp
fullyDelete(job.get(TMP_DIR_LABEL), job);
}
//delete jobDirectory
fullyDelete(job.get(JOB_DIR_LABEL), job);
}
}
private static void updateDestStatus(FileStatus src, FileStatus dst,
EnumSet<FileAttribute> preseved, FileSystem destFileSys
) throws IOException {
String owner = null;
String group = null;
if (preseved.contains(FileAttribute.USER)
&& !src.getOwner().equals(dst.getOwner())) {
owner = src.getOwner();
}
if (preseved.contains(FileAttribute.GROUP)
&& !src.getGroup().equals(dst.getGroup())) {
group = src.getGroup();
}
if (owner != null || group != null) {
destFileSys.setOwner(dst.getPath(), owner, group);
}
if (preseved.contains(FileAttribute.PERMISSION)
&& !src.getPermission().equals(dst.getPermission())) {
destFileSys.setPermission(dst.getPath(), src.getPermission());
}
if (preseved.contains(FileAttribute.TIMES)) {
destFileSys.setTimes(dst.getPath(), src.getModificationTime(), src.getAccessTime());
}
}
static private void finalize(Configuration conf, JobConf jobconf,
final Path destPath, String presevedAttributes) throws IOException {
if (presevedAttributes == null) {
return;
}
EnumSet<FileAttribute> preseved = FileAttribute.parse(presevedAttributes);
if (!preseved.contains(FileAttribute.USER)
&& !preseved.contains(FileAttribute.GROUP)
&& !preseved.contains(FileAttribute.PERMISSION)) {
return;
}
FileSystem dstfs = destPath.getFileSystem(conf);
Path dstdirlist = new Path(jobconf.get(DST_DIR_LIST_LABEL));
try (SequenceFile.Reader in =
new SequenceFile.Reader(jobconf, Reader.file(dstdirlist))) {
Text dsttext = new Text();
FilePair pair = new FilePair();
for(; in.next(dsttext, pair); ) {
Path absdst = new Path(destPath, pair.output);
updateDestStatus(pair.input, dstfs.getFileStatus(absdst),
preseved, dstfs);
}
}
}
static class Arguments {
final List<Path> srcs;
final Path basedir;
final Path dst;
final Path log;
final EnumSet<Options> flags;
final String preservedAttributes;
final long filelimit;
final long sizelimit;
final String mapredSslConf;
final boolean dryrun;
/**
* Arguments for distcp
* @param srcs List of source paths
* @param basedir Base directory for copy
* @param dst Destination path
* @param log Log output directory
* @param flags Command-line flags
* @param preservedAttributes Preserved attributes
* @param filelimit File limit
* @param sizelimit Size limit
* @param mapredSslConf ssl configuration
* @param dryrun
*/
Arguments(List<Path> srcs, Path basedir, Path dst, Path log,
EnumSet<Options> flags, String preservedAttributes,
long filelimit, long sizelimit, String mapredSslConf,
boolean dryrun) {
this.srcs = srcs;
this.basedir = basedir;
this.dst = dst;
this.log = log;
this.flags = flags;
this.preservedAttributes = preservedAttributes;
this.filelimit = filelimit;
this.sizelimit = sizelimit;
this.mapredSslConf = mapredSslConf;
this.dryrun = dryrun;
if (LOG.isTraceEnabled()) {
LOG.trace("this = " + this);
}
}
static Arguments valueOf(String[] args, Configuration conf
) throws IOException {
List<Path> srcs = new ArrayList<Path>();
Path dst = null;
Path log = null;
Path basedir = null;
EnumSet<Options> flags = EnumSet.noneOf(Options.class);
String presevedAttributes = null;
String mapredSslConf = null;
long filelimit = Long.MAX_VALUE;
long sizelimit = Long.MAX_VALUE;
boolean dryrun = false;
for (int idx = 0; idx < args.length; idx++) {
Options[] opt = Options.values();
int i = 0;
for(; i < opt.length && !args[idx].startsWith(opt[i].cmd); i++);
if (i < opt.length) {
flags.add(opt[i]);
if (opt[i] == Options.PRESERVE_STATUS) {
presevedAttributes = args[idx].substring(2);
FileAttribute.parse(presevedAttributes); //validation
}
else if (opt[i] == Options.FILE_LIMIT) {
filelimit = Options.FILE_LIMIT.parseLong(args, ++idx);
}
else if (opt[i] == Options.SIZE_LIMIT) {
sizelimit = Options.SIZE_LIMIT.parseLong(args, ++idx);
}
} else if ("-f".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("urilist_uri not specified in -f");
}
srcs.addAll(fetchFileList(conf, new Path(args[idx])));
} else if ("-log".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("logdir not specified in -log");
}
log = new Path(args[idx]);
} else if ("-basedir".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("basedir not specified in -basedir");
}
basedir = new Path(args[idx]);
} else if ("-mapredSslConf".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("ssl conf file not specified in -mapredSslConf");
}
mapredSslConf = args[idx];
} else if ("-dryrun".equals(args[idx])) {
dryrun = true;
dst = new Path("/tmp/distcp_dummy_dest");//dummy destination
} else if ("-m".equals(args[idx])) {
if (++idx == args.length) {
throw new IllegalArgumentException("num_maps not specified in -m");
}
try {
conf.setInt(MAX_MAPS_LABEL, Integer.parseInt(args[idx]));
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid argument to -m: " +
args[idx]);
}
} else if ('-' == args[idx].codePointAt(0)) {
throw new IllegalArgumentException("Invalid switch " + args[idx]);
} else if (idx == args.length -1 &&
(!dryrun || flags.contains(Options.UPDATE))) {
dst = new Path(args[idx]);
} else {
srcs.add(new Path(args[idx]));
}
}
// mandatory command-line parameters
if (srcs.isEmpty() || dst == null) {
throw new IllegalArgumentException("Missing "
+ (dst == null ? "dst path" : "src"));
}
// incompatible command-line flags
final boolean isOverwrite = flags.contains(Options.OVERWRITE);
final boolean isUpdate = flags.contains(Options.UPDATE);
final boolean isDelete = flags.contains(Options.DELETE);
final boolean skipCRC = flags.contains(Options.SKIPCRC);
if (isOverwrite && isUpdate) {
throw new IllegalArgumentException("Conflicting overwrite policies");
}
if (!isUpdate && skipCRC) {
throw new IllegalArgumentException(
Options.SKIPCRC.cmd + " is relevant only with the " +
Options.UPDATE.cmd + " option");
}
if (isDelete && !isOverwrite && !isUpdate) {
throw new IllegalArgumentException(Options.DELETE.cmd
+ " must be specified with " + Options.OVERWRITE + " or "
+ Options.UPDATE + ".");
}
return new Arguments(srcs, basedir, dst, log, flags, presevedAttributes,
filelimit, sizelimit, mapredSslConf, dryrun);
}
/** {@inheritDoc} */
public String toString() {
return getClass().getName() + "{"
+ "\n srcs = " + srcs
+ "\n dst = " + dst
+ "\n log = " + log
+ "\n flags = " + flags
+ "\n preservedAttributes = " + preservedAttributes
+ "\n filelimit = " + filelimit
+ "\n sizelimit = " + sizelimit
+ "\n mapredSslConf = " + mapredSslConf
+ "\n}";
}
}
/**
* This is the main driver for recursively copying directories
* across file systems. It takes at least two cmdline parameters. A source
* URL and a destination URL. It then essentially does an "ls -lR" on the
* source URL, and writes the output in a round-robin manner to all the map
* input files. The mapper actually copies the files allotted to it. The
* reduce is empty.
*/
public int run(String[] args) {
try {
copy(conf, Arguments.valueOf(args, conf));
return 0;
} catch (IllegalArgumentException e) {
System.err.println(StringUtils.stringifyException(e) + "\n" + usage);
ToolRunner.printGenericCommandUsage(System.err);
return -1;
} catch (DuplicationException e) {
System.err.println(StringUtils.stringifyException(e));
return DuplicationException.ERROR_CODE;
} catch (RemoteException e) {
final IOException unwrapped = e.unwrapRemoteException(
FileNotFoundException.class,
AccessControlException.class,
QuotaExceededException.class);
System.err.println(StringUtils.stringifyException(unwrapped));
return -3;
} catch (Exception e) {
System.err.println("With failures, global counters are inaccurate; " +
"consider running with -i");
System.err.println("Copy failed: " + StringUtils.stringifyException(e));
return -999;
}
}
public static void main(String[] args) throws Exception {
JobConf job = new JobConf(DistCpV1.class);
DistCpV1 distcp = new DistCpV1(job);
int res = ToolRunner.run(distcp, args);
System.exit(res);
}
/**
* Make a path relative with respect to a root path.
* absPath is always assumed to descend from root.
* Otherwise returned path is null.
*/
static String makeRelative(Path root, Path absPath) {
if (!absPath.isAbsolute()) {
throw new IllegalArgumentException("!absPath.isAbsolute(), absPath="
+ absPath);
}
String p = absPath.toUri().getPath();
StringTokenizer pathTokens = new StringTokenizer(p, "/");
for(StringTokenizer rootTokens = new StringTokenizer(
root.toUri().getPath(), "/"); rootTokens.hasMoreTokens(); ) {
if (!rootTokens.nextToken().equals(pathTokens.nextToken())) {
return null;
}
}
StringBuilder sb = new StringBuilder();
for(; pathTokens.hasMoreTokens(); ) {
sb.append(pathTokens.nextToken());
if (pathTokens.hasMoreTokens()) { sb.append(Path.SEPARATOR); }
}
return sb.length() == 0? ".": sb.toString();
}
/**
* Calculate how many maps to run.
* Number of maps is bounded by a minimum of the cumulative size of the
* copy / (distcp.bytes.per.map, default BYTES_PER_MAP or -m on the
* command line) and at most (distcp.max.map.tasks, default
* MAX_MAPS_PER_NODE * nodes in the cluster).
* @param totalBytes Count of total bytes for job
* @param job The job to configure
* @return Count of maps to run.
*/
private static int setMapCount(long totalBytes, JobConf job)
throws IOException {
int numMaps =
(int)(totalBytes / job.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP));
numMaps = Math.min(numMaps,
job.getInt(MAX_MAPS_LABEL, MAX_MAPS_PER_NODE *
new JobClient(job).getClusterStatus().getTaskTrackers()));
numMaps = Math.max(numMaps, 1);
job.setNumMapTasks(numMaps);
return numMaps;
}
/** Fully delete dir */
static void fullyDelete(String dir, Configuration conf) throws IOException {
if (dir != null) {
Path tmp = new Path(dir);
boolean success = tmp.getFileSystem(conf).delete(tmp, true);
if (!success) {
LOG.warn("Could not fully delete " + tmp);
}
}
}
//Job configuration
private static JobConf createJobConf(Configuration conf) {
JobConf jobconf = new JobConf(conf, DistCpV1.class);
jobconf.setJobName(conf.get("mapred.job.name", NAME));
// turn off speculative execution, because DFS doesn't handle
// multiple writers to the same file.
jobconf.setMapSpeculativeExecution(false);
jobconf.setInputFormat(CopyInputFormat.class);
jobconf.setOutputKeyClass(Text.class);
jobconf.setOutputValueClass(Text.class);
jobconf.setMapperClass(CopyFilesMapper.class);
jobconf.setNumReduceTasks(0);
return jobconf;
}
private static final Random RANDOM = new Random();
public static String getRandomId() {
return Integer.toString(RANDOM.nextInt(Integer.MAX_VALUE), 36);
}
/**
* Increase the replication factor of _distcp_src_files to
* sqrt(min(maxMapsOnCluster, numMaps)). This is to reduce the chance of
* failing of distcp because of "not having a replication of _distcp_src_files
* available for reading for some maps".
*/
private static void setReplication(Configuration conf, JobConf jobConf,
Path srcfilelist, int numMaps) throws IOException {
int numMaxMaps = new JobClient(jobConf).getClusterStatus().getMaxMapTasks();
short replication = (short) Math.ceil(
Math.sqrt(Math.min(numMaxMaps, numMaps)));
FileSystem fs = srcfilelist.getFileSystem(conf);
FileStatus srcStatus = fs.getFileStatus(srcfilelist);
if (srcStatus.getReplication() < replication) {
if (!fs.setReplication(srcfilelist, replication)) {
throw new IOException("Unable to increase the replication of file " +
srcfilelist);
}
}
}
/**
* Does the dir already exist at destination ?
* @return true if the dir already exists at destination
*/
private static boolean dirExists(Configuration conf, Path dst)
throws IOException {
FileSystem destFileSys = dst.getFileSystem(conf);
FileStatus status = null;
try {
status = destFileSys.getFileStatus(dst);
}catch (FileNotFoundException e) {
return false;
}
if (status.isFile()) {
throw new FileAlreadyExistsException("Not a dir: " + dst+" is a file.");
}
return true;
}
/**
* Initialize DFSCopyFileMapper specific job-configuration.
* @param conf : The dfs/mapred configuration.
* @param jobConf : The handle to the jobConf object to be initialized.
* @param args Arguments
* @return true if it is necessary to launch a job.
*/
static boolean setup(Configuration conf, JobConf jobConf,
final Arguments args)
throws IOException {
jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString());
//set boolean values
final boolean update = args.flags.contains(Options.UPDATE);
final boolean skipCRCCheck = args.flags.contains(Options.SKIPCRC);
final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE)
&& !args.dryrun;
jobConf.setBoolean(Options.UPDATE.propertyname, update);
jobConf.setBoolean(Options.SKIPCRC.propertyname, skipCRCCheck);
jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite);
jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname,
args.flags.contains(Options.IGNORE_READ_FAILURES));
jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname,
args.flags.contains(Options.PRESERVE_STATUS));
final String randomId = getRandomId();
JobClient jClient = new JobClient(jobConf);
Path stagingArea;
try {
stagingArea =
JobSubmissionFiles.getStagingDir(jClient.getClusterHandle(), conf);
} catch (InterruptedException ie) {
throw new IOException(ie);
}
Path jobDirectory = new Path(stagingArea + NAME + "_" + randomId);
FsPermission mapredSysPerms =
new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION);
FileSystem.mkdirs(jClient.getFs(), jobDirectory, mapredSysPerms);
jobConf.set(JOB_DIR_LABEL, jobDirectory.toString());
long maxBytesPerMap = conf.getLong(BYTES_PER_MAP_LABEL, BYTES_PER_MAP);
FileSystem dstfs = args.dst.getFileSystem(conf);
// get tokens for all the required FileSystems..
TokenCache.obtainTokensForNamenodes(jobConf.getCredentials(),
new Path[] {args.dst}, conf);
boolean dstExists = dstfs.exists(args.dst);
boolean dstIsDir = false;
if (dstExists) {
dstIsDir = dstfs.getFileStatus(args.dst).isDirectory();
}
// default logPath
Path logPath = args.log;
if (logPath == null) {
String filename = "_distcp_logs_" + randomId;
if (!dstExists || !dstIsDir) {
Path parent = args.dst.getParent();
if (null == parent) {
// If dst is '/' on S3, it might not exist yet, but dst.getParent()
// will return null. In this case, use '/' as its own parent to prevent
// NPE errors below.
parent = args.dst;
}
if (!dstfs.exists(parent)) {
dstfs.mkdirs(parent);
}
logPath = new Path(parent, filename);
} else {
logPath = new Path(args.dst, filename);
}
}
FileOutputFormat.setOutputPath(jobConf, logPath);
// create src list, dst list
FileSystem jobfs = jobDirectory.getFileSystem(jobConf);
Path srcfilelist = new Path(jobDirectory, "_distcp_src_files");
Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files");
Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs");
jobConf.set(SRC_LIST_LABEL, srcfilelist.toString());
jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString());
int srcCount = 0, cnsyncf = 0, dirsyn = 0;
long fileCount = 0L, dirCount = 0L, byteCount = 0L, cbsyncs = 0L,
skipFileCount = 0L, skipByteCount = 0L;
try (
SequenceFile.Writer src_writer = SequenceFile.createWriter(jobConf,
Writer.file(srcfilelist), Writer.keyClass(LongWritable.class),
Writer.valueClass(FilePair.class), Writer.compression(
SequenceFile.CompressionType.NONE));
SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobConf,
Writer.file(dstfilelist), Writer.keyClass(Text.class),
Writer.valueClass(Text.class), Writer.compression(
SequenceFile.CompressionType.NONE));
SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobConf,
Writer.file(dstdirlist), Writer.keyClass(Text.class),
Writer.valueClass(FilePair.class), Writer.compression(
SequenceFile.CompressionType.NONE));
) {
// handle the case where the destination directory doesn't exist
// and we've only a single src directory OR we're updating/overwriting
// the contents of the destination directory.
final boolean special =
(args.srcs.size() == 1 && !dstExists) || update || overwrite;
Path basedir = null;
HashSet<Path> parentDirsToCopy = new HashSet<Path>();
if (args.basedir != null) {
FileSystem basefs = args.basedir.getFileSystem(conf);
basedir = args.basedir.makeQualified(
basefs.getUri(), basefs.getWorkingDirectory());
if (!basefs.isDirectory(basedir)) {
throw new IOException("Basedir " + basedir + " is not a directory.");
}
}
for(Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext(); ) {
final Path src = srcItr.next();
FileSystem srcfs = src.getFileSystem(conf);
FileStatus srcfilestat = srcfs.getFileStatus(src);
Path root = special && srcfilestat.isDirectory()? src: src.getParent();
if (dstExists && !dstIsDir &&
(args.srcs.size() > 1 || srcfilestat.isDirectory())) {
// destination should not be a file
throw new IOException("Destination " + args.dst + " should be a dir" +
" if multiple source paths are there OR if" +
" the source path is a dir");
}
if (basedir != null) {
root = basedir;
Path parent = src.getParent().makeQualified(
srcfs.getUri(), srcfs.getWorkingDirectory());
while (parent != null && !parent.equals(basedir)) {
if (!parentDirsToCopy.contains(parent)){
parentDirsToCopy.add(parent);
String dst = makeRelative(root, parent);
FileStatus pst = srcfs.getFileStatus(parent);
src_writer.append(new LongWritable(0), new FilePair(pst, dst));
dst_writer.append(new Text(dst), new Text(parent.toString()));
dir_writer.append(new Text(dst), new FilePair(pst, dst));
if (++dirsyn > SYNC_FILE_MAX) {
dirsyn = 0;
dir_writer.sync();
}
}
parent = parent.getParent();
}
if (parent == null) {
throw new IOException("Basedir " + basedir +
" is not a prefix of source path " + src);
}
}
if (srcfilestat.isDirectory()) {
++srcCount;
final String dst = makeRelative(root,src);
if (!update || !dirExists(conf, new Path(args.dst, dst))) {
++dirCount;
src_writer.append(new LongWritable(0),
new FilePair(srcfilestat, dst));
}
dst_writer.append(new Text(dst), new Text(src.toString()));
}
Stack<FileStatus> pathstack = new Stack<FileStatus>();
for(pathstack.push(srcfilestat); !pathstack.empty(); ) {
FileStatus cur = pathstack.pop();
FileStatus[] children = srcfs.listStatus(cur.getPath());
for(int i = 0; i < children.length; i++) {
boolean skipPath = false;
final FileStatus child = children[i];
final String dst = makeRelative(root, child.getPath());
++srcCount;
if (child.isDirectory()) {
pathstack.push(child);
if (!update || !dirExists(conf, new Path(args.dst, dst))) {
++dirCount;
}
else {
skipPath = true; // skip creating dir at destination
}
}
else {
Path destPath = new Path(args.dst, dst);
if (cur.isFile() && (args.srcs.size() == 1)) {
// Copying a single file; use dst path provided by user as
// destination file rather than destination directory
Path dstparent = destPath.getParent();
FileSystem destFileSys = destPath.getFileSystem(jobConf);
if (!(destFileSys.exists(dstparent) &&
destFileSys.getFileStatus(dstparent).isDirectory())) {
destPath = dstparent;
}
}
//skip path if the src and the dst files are the same.
skipPath = update &&
sameFile(srcfs, child, dstfs, destPath, skipCRCCheck);
//skip path if it exceed file limit or size limit
skipPath |= fileCount == args.filelimit
|| byteCount + child.getLen() > args.sizelimit;
if (!skipPath) {
++fileCount;
byteCount += child.getLen();
if (LOG.isTraceEnabled()) {
LOG.trace("adding file " + child.getPath());
}
++cnsyncf;
cbsyncs += child.getLen();
if (cnsyncf > SYNC_FILE_MAX || cbsyncs > maxBytesPerMap) {
src_writer.sync();
dst_writer.sync();
cnsyncf = 0;
cbsyncs = 0L;
}
}
else {
++skipFileCount;
skipByteCount += child.getLen();
if (LOG.isTraceEnabled()) {
LOG.trace("skipping file " + child.getPath());
}
}
}
if (!skipPath) {
src_writer.append(new LongWritable(child.isDirectory()? 0: child.getLen()),
new FilePair(child, dst));
}
dst_writer.append(new Text(dst),
new Text(child.getPath().toString()));
}
if (cur.isDirectory()) {
String dst = makeRelative(root, cur.getPath());
dir_writer.append(new Text(dst), new FilePair(cur, dst));
if (++dirsyn > SYNC_FILE_MAX) {
dirsyn = 0;
dir_writer.sync();
}
}
}
}
}
LOG.info("sourcePathsCount(files+directories)=" + srcCount);
LOG.info("filesToCopyCount=" + fileCount);
LOG.info("bytesToCopyCount=" +
TraditionalBinaryPrefix.long2String(byteCount, "", 1));
if (update) {
LOG.info("filesToSkipCopyCount=" + skipFileCount);
LOG.info("bytesToSkipCopyCount=" +
TraditionalBinaryPrefix.long2String(skipByteCount, "", 1));
}
if (args.dryrun) {
return false;
}
int mapCount = setMapCount(byteCount, jobConf);
// Increase the replication of _distcp_src_files, if needed
setReplication(conf, jobConf, srcfilelist, mapCount);
FileStatus dststatus = null;
try {
dststatus = dstfs.getFileStatus(args.dst);
} catch(FileNotFoundException fnfe) {
LOG.info(args.dst + " does not exist.");
}
// create dest path dir if copying > 1 file
if (dststatus == null) {
if (srcCount > 1 && !dstfs.mkdirs(args.dst)) {
throw new IOException("Failed to create" + args.dst);
}
}
final Path sorted = new Path(jobDirectory, "_distcp_sorted");
checkDuplication(jobfs, dstfilelist, sorted, conf);
if (dststatus != null && args.flags.contains(Options.DELETE)) {
long deletedPathsCount = deleteNonexisting(dstfs, dststatus, sorted,
jobfs, jobDirectory, jobConf, conf);
LOG.info("deletedPathsFromDestCount(files+directories)=" +
deletedPathsCount);
}
Path tmpDir = new Path(
(dstExists && !dstIsDir) || (!dstExists && srcCount == 1)?
args.dst.getParent(): args.dst, "_distcp_tmp_" + randomId);
jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString());
// Explicitly create the tmpDir to ensure that it can be cleaned
// up by fullyDelete() later.
tmpDir.getFileSystem(conf).mkdirs(tmpDir);
LOG.info("sourcePathsCount=" + srcCount);
LOG.info("filesToCopyCount=" + fileCount);
LOG.info("bytesToCopyCount=" +
TraditionalBinaryPrefix.long2String(byteCount, "", 1));
jobConf.setInt(SRC_COUNT_LABEL, srcCount);
jobConf.setLong(TOTAL_SIZE_LABEL, byteCount);
return (fileCount + dirCount) > 0;
}
/**
* Check whether the contents of src and dst are the same.
*
* Return false if dstpath does not exist
*
* If the files have different sizes, return false.
*
* If the files have the same sizes, the file checksums will be compared.
*
* When file checksum is not supported in any of file systems,
* two files are considered as the same if they have the same size.
*/
static private boolean sameFile(FileSystem srcfs, FileStatus srcstatus,
FileSystem dstfs, Path dstpath, boolean skipCRCCheck) throws IOException {
FileStatus dststatus;
try {
dststatus = dstfs.getFileStatus(dstpath);
} catch(FileNotFoundException fnfe) {
return false;
}
//same length?
if (srcstatus.getLen() != dststatus.getLen()) {
return false;
}
if (skipCRCCheck) {
LOG.debug("Skipping the CRC check");
return true;
}
//get src checksum
final FileChecksum srccs;
try {
srccs = srcfs.getFileChecksum(srcstatus.getPath());
} catch(FileNotFoundException fnfe) {
/*
* Two possible cases:
* (1) src existed once but was deleted between the time period that
* srcstatus was obtained and the try block above.
* (2) srcfs does not support file checksum and (incorrectly) throws
* FNFE, e.g. some previous versions of HftpFileSystem.
* For case (1), it is okay to return true since src was already deleted.
* For case (2), true should be returned.
*/
return true;
}
//compare checksums
try {
final FileChecksum dstcs = dstfs.getFileChecksum(dststatus.getPath());
//return true if checksum is not supported
//(i.e. some of the checksums is null)
return srccs == null || dstcs == null || srccs.equals(dstcs);
} catch(FileNotFoundException fnfe) {
return false;
}
}
/**
* Delete the dst files/dirs which do not exist in src
*
* @return total count of files and directories deleted from destination
* @throws IOException
*/
static private long deleteNonexisting(
FileSystem dstfs, FileStatus dstroot, Path dstsorted,
FileSystem jobfs, Path jobdir, JobConf jobconf, Configuration conf
) throws IOException {
if (dstroot.isFile()) {
throw new IOException("dst must be a directory when option "
+ Options.DELETE.cmd + " is set, but dst (= " + dstroot.getPath()
+ ") is not a directory.");
}
//write dst lsr results
final Path dstlsr = new Path(jobdir, "_distcp_dst_lsr");
try (final SequenceFile.Writer writer = SequenceFile.createWriter(jobconf,
Writer.file(dstlsr), Writer.keyClass(Text.class),
Writer.valueClass(NullWritable.class), Writer.compression(
SequenceFile.CompressionType.NONE))) {
//do lsr to get all file statuses in dstroot
final Stack<FileStatus> lsrstack = new Stack<FileStatus>();
for(lsrstack.push(dstroot); !lsrstack.isEmpty(); ) {
final FileStatus status = lsrstack.pop();
if (status.isDirectory()) {
for(FileStatus child : dstfs.listStatus(status.getPath())) {
String relative = makeRelative(dstroot.getPath(), child.getPath());
writer.append(new Text(relative), NullWritable.get());
lsrstack.push(child);
}
}
}
}
//sort lsr results
final Path sortedlsr = new Path(jobdir, "_distcp_dst_lsr_sorted");
SequenceFile.Sorter sorter = new SequenceFile.Sorter(jobfs,
new Text.Comparator(), Text.class, NullWritable.class, jobconf);
sorter.sort(dstlsr, sortedlsr);
//compare lsr list and dst list
long deletedPathsCount = 0;
try (SequenceFile.Reader lsrin =
new SequenceFile.Reader(jobconf, Reader.file(sortedlsr));
SequenceFile.Reader dstin =
new SequenceFile.Reader(jobconf, Reader.file(dstsorted))) {
//compare sorted lsr list and sorted dst list
final Text lsrpath = new Text();
final Text dstpath = new Text();
final Text dstfrom = new Text();
final Trash trash = new Trash(dstfs, conf);
Path lastpath = null;
boolean hasnext = dstin.next(dstpath, dstfrom);
while (lsrin.next(lsrpath, NullWritable.get())) {
int dst_cmp_lsr = dstpath.compareTo(lsrpath);
while (hasnext && dst_cmp_lsr < 0) {
hasnext = dstin.next(dstpath, dstfrom);
dst_cmp_lsr = dstpath.compareTo(lsrpath);
}
if (dst_cmp_lsr == 0) {
//lsrpath exists in dst, skip it
hasnext = dstin.next(dstpath, dstfrom);
} else {
//lsrpath does not exist, delete it
final Path rmpath = new Path(dstroot.getPath(), lsrpath.toString());
++deletedPathsCount;
if ((lastpath == null || !isAncestorPath(lastpath, rmpath))) {
if (!(trash.moveToTrash(rmpath) || dstfs.delete(rmpath, true))) {
throw new IOException("Failed to delete " + rmpath);
}
lastpath = rmpath;
}
}
}
}
return deletedPathsCount;
}
//is x an ancestor path of y?
static private boolean isAncestorPath(Path xp, Path yp) {
final String x = xp.toString();
final String y = yp.toString();
if (!y.startsWith(x)) {
return false;
}
final int len = x.length();
return y.length() == len || y.charAt(len) == Path.SEPARATOR_CHAR;
}
/** Check whether the file list have duplication. */
static private void checkDuplication(FileSystem fs, Path file, Path sorted,
Configuration conf) throws IOException {
SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs,
new Text.Comparator(), Text.class, Text.class, conf);
sorter.sort(file, sorted);
try (SequenceFile.Reader in =
new SequenceFile.Reader(conf, Reader.file(sorted))) {
Text prevdst = null, curdst = new Text();
Text prevsrc = null, cursrc = new Text();
for(; in.next(curdst, cursrc); ) {
if (prevdst != null && curdst.equals(prevdst)) {
throw new DuplicationException(
"Invalid input, there are duplicated files in the sources: "
+ prevsrc + ", " + cursrc);
}
prevdst = curdst;
curdst = new Text();
prevsrc = cursrc;
cursrc = new Text();
}
}
}
/** An exception class for duplicated source files. */
public static class DuplicationException extends IOException {
private static final long serialVersionUID = 1L;
/** Error code for this exception */
public static final int ERROR_CODE = -2;
DuplicationException(String message) {super(message);}
}
}
| robzor92/hops | hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java | Java | apache-2.0 | 63,413 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.mysvc.impl3;
import org.apache.aries.spifly.mysvc.SPIProvider;
public class SPIProviderImpl extends SPIProvider {
@Override
public String doit() {
return "Doing it as well!";
}
}
| fwassmer/aries | spi-fly/spi-fly-examples/spi-fly-example-provider3-bundle/src/main/java/org/apache/aries/spifly/mysvc/impl3/SPIProviderImpl.java | Java | apache-2.0 | 1,055 |
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
(function() {
function foo(p) { return p.catch(); }
%PrepareFunctionForOptimization(foo);
foo(Promise.resolve(1));
foo(Promise.resolve(1));
%OptimizeFunctionOnNextCall(foo);
foo(Promise.resolve(1));
})();
(function() {
function foo(p) { return p.catch(foo); }
%PrepareFunctionForOptimization(foo);
foo(Promise.resolve(1));
foo(Promise.resolve(1));
%OptimizeFunctionOnNextCall(foo);
foo(Promise.resolve(1));
})();
(function() {
function foo(p) { return p.catch(foo, undefined); }
%PrepareFunctionForOptimization(foo);
foo(Promise.resolve(1));
foo(Promise.resolve(1));
%OptimizeFunctionOnNextCall(foo);
foo(Promise.resolve(1));
})();
| zero-rp/miniblink49 | v8_7_5/test/mjsunit/compiler/promise-prototype-catch.js | JavaScript | apache-2.0 | 872 |
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package namespace
import (
"fmt"
"sync"
"time"
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api/errors"
"k8s.io/kubernetes/pkg/api/v1"
metav1 "k8s.io/kubernetes/pkg/apis/meta/v1"
"k8s.io/kubernetes/pkg/apis/meta/v1/unstructured"
clientset "k8s.io/kubernetes/pkg/client/clientset_generated/clientset"
"k8s.io/kubernetes/pkg/client/typed/discovery"
"k8s.io/kubernetes/pkg/client/typed/dynamic"
"k8s.io/kubernetes/pkg/runtime/schema"
"k8s.io/kubernetes/pkg/util/sets"
)
// contentRemainingError is used to inform the caller that content is not fully removed from the namespace
type contentRemainingError struct {
Estimate int64
}
func (e *contentRemainingError) Error() string {
return fmt.Sprintf("some content remains in the namespace, estimate %d seconds before it is removed", e.Estimate)
}
// operation is used for caching if an operation is supported on a dynamic client.
type operation string
const (
operationDeleteCollection operation = "deleteCollection"
operationList operation = "list"
// assume a default estimate for finalizers to complete when found on items pending deletion.
finalizerEstimateSeconds int64 = int64(15)
)
// operationKey is an entry in a cache.
type operationKey struct {
op operation
gvr schema.GroupVersionResource
}
// operationNotSupportedCache is a simple cache to remember if an operation is not supported for a resource.
// if the operationKey maps to true, it means the operation is not supported.
type operationNotSupportedCache struct {
lock sync.RWMutex
m map[operationKey]bool
}
// isSupported returns true if the operation is supported
func (o *operationNotSupportedCache) isSupported(key operationKey) bool {
o.lock.RLock()
defer o.lock.RUnlock()
return !o.m[key]
}
func (o *operationNotSupportedCache) setNotSupported(key operationKey) {
o.lock.Lock()
defer o.lock.Unlock()
o.m[key] = true
}
// updateNamespaceFunc is a function that makes an update to a namespace
type updateNamespaceFunc func(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error)
// retryOnConflictError retries the specified fn if there was a conflict error
// it will return an error if the UID for an object changes across retry operations.
// TODO RetryOnConflict should be a generic concept in client code
func retryOnConflictError(kubeClient clientset.Interface, namespace *v1.Namespace, fn updateNamespaceFunc) (result *v1.Namespace, err error) {
latestNamespace := namespace
for {
result, err = fn(kubeClient, latestNamespace)
if err == nil {
return result, nil
}
if !errors.IsConflict(err) {
return nil, err
}
prevNamespace := latestNamespace
latestNamespace, err = kubeClient.Core().Namespaces().Get(latestNamespace.Name, metav1.GetOptions{})
if err != nil {
return nil, err
}
if prevNamespace.UID != latestNamespace.UID {
return nil, fmt.Errorf("namespace uid has changed across retries")
}
}
}
// updateNamespaceStatusFunc will verify that the status of the namespace is correct
func updateNamespaceStatusFunc(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error) {
if namespace.DeletionTimestamp.IsZero() || namespace.Status.Phase == v1.NamespaceTerminating {
return namespace, nil
}
newNamespace := v1.Namespace{}
newNamespace.ObjectMeta = namespace.ObjectMeta
newNamespace.Status = namespace.Status
newNamespace.Status.Phase = v1.NamespaceTerminating
return kubeClient.Core().Namespaces().UpdateStatus(&newNamespace)
}
// finalized returns true if the namespace.Spec.Finalizers is an empty list
func finalized(namespace *v1.Namespace) bool {
return len(namespace.Spec.Finalizers) == 0
}
// finalizeNamespaceFunc returns a function that knows how to finalize a namespace for specified token.
func finalizeNamespaceFunc(finalizerToken v1.FinalizerName) updateNamespaceFunc {
return func(kubeClient clientset.Interface, namespace *v1.Namespace) (*v1.Namespace, error) {
return finalizeNamespace(kubeClient, namespace, finalizerToken)
}
}
// finalizeNamespace removes the specified finalizerToken and finalizes the namespace
func finalizeNamespace(kubeClient clientset.Interface, namespace *v1.Namespace, finalizerToken v1.FinalizerName) (*v1.Namespace, error) {
namespaceFinalize := v1.Namespace{}
namespaceFinalize.ObjectMeta = namespace.ObjectMeta
namespaceFinalize.Spec = namespace.Spec
finalizerSet := sets.NewString()
for i := range namespace.Spec.Finalizers {
if namespace.Spec.Finalizers[i] != finalizerToken {
finalizerSet.Insert(string(namespace.Spec.Finalizers[i]))
}
}
namespaceFinalize.Spec.Finalizers = make([]v1.FinalizerName, 0, len(finalizerSet))
for _, value := range finalizerSet.List() {
namespaceFinalize.Spec.Finalizers = append(namespaceFinalize.Spec.Finalizers, v1.FinalizerName(value))
}
namespace, err := kubeClient.Core().Namespaces().Finalize(&namespaceFinalize)
if err != nil {
// it was removed already, so life is good
if errors.IsNotFound(err) {
return namespace, nil
}
}
return namespace, err
}
// deleteCollection is a helper function that will delete the collection of resources
// it returns true if the operation was supported on the server.
// it returns an error if the operation was supported on the server but was unable to complete.
func deleteCollection(
dynamicClient *dynamic.Client,
opCache *operationNotSupportedCache,
gvr schema.GroupVersionResource,
namespace string,
) (bool, error) {
glog.V(5).Infof("namespace controller - deleteCollection - namespace: %s, gvr: %v", namespace, gvr)
key := operationKey{op: operationDeleteCollection, gvr: gvr}
if !opCache.isSupported(key) {
glog.V(5).Infof("namespace controller - deleteCollection ignored since not supported - namespace: %s, gvr: %v", namespace, gvr)
return false, nil
}
apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true}
// namespace controller does not want the garbage collector to insert the orphan finalizer since it calls
// resource deletions generically. it will ensure all resources in the namespace are purged prior to releasing
// namespace itself.
orphanDependents := false
err := dynamicClient.Resource(&apiResource, namespace).DeleteCollection(&v1.DeleteOptions{OrphanDependents: &orphanDependents}, &v1.ListOptions{})
if err == nil {
return true, nil
}
// this is strange, but we need to special case for both MethodNotSupported and NotFound errors
// TODO: https://github.com/kubernetes/kubernetes/issues/22413
// we have a resource returned in the discovery API that supports no top-level verbs:
// /apis/extensions/v1beta1/namespaces/default/replicationcontrollers
// when working with this resource type, we will get a literal not found error rather than expected method not supported
// remember next time that this resource does not support delete collection...
if errors.IsMethodNotSupported(err) || errors.IsNotFound(err) {
glog.V(5).Infof("namespace controller - deleteCollection not supported - namespace: %s, gvr: %v", namespace, gvr)
opCache.setNotSupported(key)
return false, nil
}
glog.V(5).Infof("namespace controller - deleteCollection unexpected error - namespace: %s, gvr: %v, error: %v", namespace, gvr, err)
return true, err
}
// listCollection will list the items in the specified namespace
// it returns the following:
// the list of items in the collection (if found)
// a boolean if the operation is supported
// an error if the operation is supported but could not be completed.
func listCollection(
dynamicClient *dynamic.Client,
opCache *operationNotSupportedCache,
gvr schema.GroupVersionResource,
namespace string,
) (*unstructured.UnstructuredList, bool, error) {
glog.V(5).Infof("namespace controller - listCollection - namespace: %s, gvr: %v", namespace, gvr)
key := operationKey{op: operationList, gvr: gvr}
if !opCache.isSupported(key) {
glog.V(5).Infof("namespace controller - listCollection ignored since not supported - namespace: %s, gvr: %v", namespace, gvr)
return nil, false, nil
}
apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true}
obj, err := dynamicClient.Resource(&apiResource, namespace).List(&v1.ListOptions{})
if err == nil {
unstructuredList, ok := obj.(*unstructured.UnstructuredList)
if !ok {
return nil, false, fmt.Errorf("resource: %s, expected *unstructured.UnstructuredList, got %#v", apiResource.Name, obj)
}
return unstructuredList, true, nil
}
// this is strange, but we need to special case for both MethodNotSupported and NotFound errors
// TODO: https://github.com/kubernetes/kubernetes/issues/22413
// we have a resource returned in the discovery API that supports no top-level verbs:
// /apis/extensions/v1beta1/namespaces/default/replicationcontrollers
// when working with this resource type, we will get a literal not found error rather than expected method not supported
// remember next time that this resource does not support delete collection...
if errors.IsMethodNotSupported(err) || errors.IsNotFound(err) {
glog.V(5).Infof("namespace controller - listCollection not supported - namespace: %s, gvr: %v", namespace, gvr)
opCache.setNotSupported(key)
return nil, false, nil
}
return nil, true, err
}
// deleteEachItem is a helper function that will list the collection of resources and delete each item 1 by 1.
func deleteEachItem(
dynamicClient *dynamic.Client,
opCache *operationNotSupportedCache,
gvr schema.GroupVersionResource,
namespace string,
) error {
glog.V(5).Infof("namespace controller - deleteEachItem - namespace: %s, gvr: %v", namespace, gvr)
unstructuredList, listSupported, err := listCollection(dynamicClient, opCache, gvr, namespace)
if err != nil {
return err
}
if !listSupported {
return nil
}
apiResource := metav1.APIResource{Name: gvr.Resource, Namespaced: true}
for _, item := range unstructuredList.Items {
if err = dynamicClient.Resource(&apiResource, namespace).Delete(item.GetName(), nil); err != nil && !errors.IsNotFound(err) && !errors.IsMethodNotSupported(err) {
return err
}
}
return nil
}
// deleteAllContentForGroupVersionResource will use the dynamic client to delete each resource identified in gvr.
// It returns an estimate of the time remaining before the remaining resources are deleted.
// If estimate > 0, not all resources are guaranteed to be gone.
func deleteAllContentForGroupVersionResource(
kubeClient clientset.Interface,
clientPool dynamic.ClientPool,
opCache *operationNotSupportedCache,
gvr schema.GroupVersionResource,
namespace string,
namespaceDeletedAt metav1.Time,
) (int64, error) {
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - namespace: %s, gvr: %v", namespace, gvr)
// estimate how long it will take for the resource to be deleted (needed for objects that support graceful delete)
estimate, err := estimateGracefulTermination(kubeClient, gvr, namespace, namespaceDeletedAt)
if err != nil {
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - unable to estimate - namespace: %s, gvr: %v, err: %v", namespace, gvr, err)
return estimate, err
}
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - estimate - namespace: %s, gvr: %v, estimate: %v", namespace, gvr, estimate)
// get a client for this group version...
dynamicClient, err := clientPool.ClientForGroupVersionResource(gvr)
if err != nil {
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - unable to get client - namespace: %s, gvr: %v, err: %v", namespace, gvr, err)
return estimate, err
}
// first try to delete the entire collection
deleteCollectionSupported, err := deleteCollection(dynamicClient, opCache, gvr, namespace)
if err != nil {
return estimate, err
}
// delete collection was not supported, so we list and delete each item...
if !deleteCollectionSupported {
err = deleteEachItem(dynamicClient, opCache, gvr, namespace)
if err != nil {
return estimate, err
}
}
// verify there are no more remaining items
// it is not an error condition for there to be remaining items if local estimate is non-zero
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - checking for no more items in namespace: %s, gvr: %v", namespace, gvr)
unstructuredList, listSupported, err := listCollection(dynamicClient, opCache, gvr, namespace)
if err != nil {
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - error verifying no items in namespace: %s, gvr: %v, err: %v", namespace, gvr, err)
return estimate, err
}
if !listSupported {
return estimate, nil
}
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - items remaining - namespace: %s, gvr: %v, items: %v", namespace, gvr, len(unstructuredList.Items))
if len(unstructuredList.Items) != 0 && estimate == int64(0) {
// if any item has a finalizer, we treat that as a normal condition, and use a default estimation to allow for GC to complete.
for _, item := range unstructuredList.Items {
if len(item.GetFinalizers()) > 0 {
glog.V(5).Infof("namespace controller - deleteAllContentForGroupVersionResource - items remaining with finalizers - namespace: %s, gvr: %v, finalizers: %v", namespace, gvr, item.GetFinalizers())
return finalizerEstimateSeconds, nil
}
}
// nothing reported a finalizer, so something was unexpected as it should have been deleted.
return estimate, fmt.Errorf("unexpected items still remain in namespace: %s for gvr: %v", namespace, gvr)
}
return estimate, nil
}
// deleteAllContent will use the dynamic client to delete each resource identified in groupVersionResources.
// It returns an estimate of the time remaining before the remaining resources are deleted.
// If estimate > 0, not all resources are guaranteed to be gone.
func deleteAllContent(
kubeClient clientset.Interface,
clientPool dynamic.ClientPool,
opCache *operationNotSupportedCache,
groupVersionResources map[schema.GroupVersionResource]struct{},
namespace string,
namespaceDeletedAt metav1.Time,
) (int64, error) {
estimate := int64(0)
glog.V(4).Infof("namespace controller - deleteAllContent - namespace: %s, gvrs: %v", namespace, groupVersionResources)
for gvr := range groupVersionResources {
gvrEstimate, err := deleteAllContentForGroupVersionResource(kubeClient, clientPool, opCache, gvr, namespace, namespaceDeletedAt)
if err != nil {
return estimate, err
}
if gvrEstimate > estimate {
estimate = gvrEstimate
}
}
glog.V(4).Infof("namespace controller - deleteAllContent - namespace: %s, estimate: %v", namespace, estimate)
return estimate, nil
}
// syncNamespace orchestrates deletion of a Namespace and its associated content.
func syncNamespace(
kubeClient clientset.Interface,
clientPool dynamic.ClientPool,
opCache *operationNotSupportedCache,
discoverResourcesFn func() ([]*metav1.APIResourceList, error),
namespace *v1.Namespace,
finalizerToken v1.FinalizerName,
) error {
if namespace.DeletionTimestamp == nil {
return nil
}
// multiple controllers may edit a namespace during termination
// first get the latest state of the namespace before proceeding
// if the namespace was deleted already, don't do anything
namespace, err := kubeClient.Core().Namespaces().Get(namespace.Name, metav1.GetOptions{})
if err != nil {
if errors.IsNotFound(err) {
return nil
}
return err
}
glog.V(5).Infof("namespace controller - syncNamespace - namespace: %s, finalizerToken: %s", namespace.Name, finalizerToken)
// ensure that the status is up to date on the namespace
// if we get a not found error, we assume the namespace is truly gone
namespace, err = retryOnConflictError(kubeClient, namespace, updateNamespaceStatusFunc)
if err != nil {
if errors.IsNotFound(err) {
return nil
}
return err
}
// the latest view of the namespace asserts that namespace is no longer deleting..
if namespace.DeletionTimestamp.IsZero() {
return nil
}
// if the namespace is already finalized, delete it
if finalized(namespace) {
var opts *v1.DeleteOptions
uid := namespace.UID
if len(uid) > 0 {
opts = &v1.DeleteOptions{Preconditions: &v1.Preconditions{UID: &uid}}
}
err = kubeClient.Core().Namespaces().Delete(namespace.Name, opts)
if err != nil && !errors.IsNotFound(err) {
return err
}
return nil
}
// there may still be content for us to remove
resources, err := discoverResourcesFn()
if err != nil {
return err
}
// TODO(sttts): get rid of opCache and pass the verbs (especially "deletecollection") down into the deleter
deletableResources := discovery.FilteredBy(discovery.SupportsAllVerbs{Verbs: []string{"delete"}}, resources)
groupVersionResources, err := discovery.GroupVersionResources(deletableResources)
if err != nil {
return err
}
estimate, err := deleteAllContent(kubeClient, clientPool, opCache, groupVersionResources, namespace.Name, *namespace.DeletionTimestamp)
if err != nil {
return err
}
if estimate > 0 {
return &contentRemainingError{estimate}
}
// we have removed content, so mark it finalized by us
result, err := retryOnConflictError(kubeClient, namespace, finalizeNamespaceFunc(finalizerToken))
if err != nil {
// in normal practice, this should not be possible, but if a deployment is running
// two controllers to do namespace deletion that share a common finalizer token it's
// possible that a not found could occur since the other controller would have finished the delete.
if errors.IsNotFound(err) {
return nil
}
return err
}
// now check if all finalizers have reported that we delete now
if finalized(result) {
err = kubeClient.Core().Namespaces().Delete(namespace.Name, nil)
if err != nil && !errors.IsNotFound(err) {
return err
}
}
return nil
}
// estimateGrracefulTermination will estimate the graceful termination required for the specific entity in the namespace
func estimateGracefulTermination(kubeClient clientset.Interface, groupVersionResource schema.GroupVersionResource, ns string, namespaceDeletedAt metav1.Time) (int64, error) {
groupResource := groupVersionResource.GroupResource()
glog.V(5).Infof("namespace controller - estimateGracefulTermination - group %s, resource: %s", groupResource.Group, groupResource.Resource)
estimate := int64(0)
var err error
switch groupResource {
case schema.GroupResource{Group: "", Resource: "pods"}:
estimate, err = estimateGracefulTerminationForPods(kubeClient, ns)
}
if err != nil {
return estimate, err
}
// determine if the estimate is greater than the deletion timestamp
duration := time.Since(namespaceDeletedAt.Time)
allowedEstimate := time.Duration(estimate) * time.Second
if duration >= allowedEstimate {
estimate = int64(0)
}
return estimate, nil
}
// estimateGracefulTerminationForPods determines the graceful termination period for pods in the namespace
func estimateGracefulTerminationForPods(kubeClient clientset.Interface, ns string) (int64, error) {
glog.V(5).Infof("namespace controller - estimateGracefulTerminationForPods - namespace %s", ns)
estimate := int64(0)
items, err := kubeClient.Core().Pods(ns).List(v1.ListOptions{})
if err != nil {
return estimate, err
}
for i := range items.Items {
// filter out terminal pods
phase := items.Items[i].Status.Phase
if v1.PodSucceeded == phase || v1.PodFailed == phase {
continue
}
if items.Items[i].Spec.TerminationGracePeriodSeconds != nil {
grace := *items.Items[i].Spec.TerminationGracePeriodSeconds
if grace > estimate {
estimate = grace
}
}
}
return estimate, nil
}
| ikropotov/kops | vendor/k8s.io/kubernetes/pkg/controller/namespace/namespace_controller_utils.go | GO | apache-2.0 | 20,246 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
var LoginController = function($scope, $log, $uibModal, authService, userService) {
$scope.credentials = {
username: '',
password: ''
};
$scope.login = function($event, credentials) {
var $btn = $($event.target);
$btn.prop('disabled', true); // disable the login button to prevent multiple clicks
authService.login(credentials.username, credentials.password)
.then(
function() {
$btn.prop('disabled', false); // re-enable it
}
);
};
$scope.resetPassword = function() {
var modalInstance = $uibModal.open({
templateUrl: 'common/modules/dialog/reset/dialog.reset.tpl.html',
controller: 'DialogResetController'
});
modalInstance.result.then(function(email) {
userService.resetPassword(email);
}, function () {
});
};
var init = function() {};
init();
};
LoginController.$inject = ['$scope', '$log', '$uibModal', 'authService', 'userService'];
module.exports = LoginController;
| jeffmart/incubator-trafficcontrol | traffic_portal/app/src/modules/public/login/LoginController.js | JavaScript | apache-2.0 | 1,916 |
import { j as _inherits, k as _createSuper, c as _classCallCheck, T as Type, b as _createClass, R as Range, N as Node, g as YAMLSemanticError, l as _get, m as _getPrototypeOf, Y as YAMLSyntaxError, C as Char, e as _defineProperty, P as PlainValue } from './PlainValue-b8036b75.js';
var BlankLine = /*#__PURE__*/function (_Node) {
_inherits(BlankLine, _Node);
var _super = _createSuper(BlankLine);
function BlankLine() {
_classCallCheck(this, BlankLine);
return _super.call(this, Type.BLANK_LINE);
}
/* istanbul ignore next */
_createClass(BlankLine, [{
key: "includesTrailingLines",
get: function get() {
// This is never called from anywhere, but if it were,
// this is the value it should return.
return true;
}
/**
* Parses a blank line from the source
*
* @param {ParseContext} context
* @param {number} start - Index of first \n character
* @returns {number} - Index of the character after this
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
this.range = new Range(start, start + 1);
return start + 1;
}
}]);
return BlankLine;
}(Node);
var CollectionItem = /*#__PURE__*/function (_Node) {
_inherits(CollectionItem, _Node);
var _super = _createSuper(CollectionItem);
function CollectionItem(type, props) {
var _this;
_classCallCheck(this, CollectionItem);
_this = _super.call(this, type, props);
_this.node = null;
return _this;
}
_createClass(CollectionItem, [{
key: "includesTrailingLines",
get: function get() {
return !!this.node && this.node.includesTrailingLines;
}
/**
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var parseNode = context.parseNode,
src = context.src;
var atLineStart = context.atLineStart,
lineStart = context.lineStart;
if (!atLineStart && this.type === Type.SEQ_ITEM) this.error = new YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line');
var indent = atLineStart ? start - lineStart : context.indent;
var offset = Node.endOfWhiteSpace(src, start + 1);
var ch = src[offset];
var inlineComment = ch === '#';
var comments = [];
var blankLine = null;
while (ch === '\n' || ch === '#') {
if (ch === '#') {
var _end = Node.endOfLine(src, offset + 1);
comments.push(new Range(offset, _end));
offset = _end;
} else {
atLineStart = true;
lineStart = offset + 1;
var wsEnd = Node.endOfWhiteSpace(src, lineStart);
if (src[wsEnd] === '\n' && comments.length === 0) {
blankLine = new BlankLine();
lineStart = blankLine.parse({
src: src
}, lineStart);
}
offset = Node.endOfIndent(src, lineStart);
}
ch = src[offset];
}
if (Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== Type.SEQ_ITEM)) {
this.node = parseNode({
atLineStart: atLineStart,
inCollection: false,
indent: indent,
lineStart: lineStart,
parent: this
}, offset);
} else if (ch && lineStart > start + 1) {
offset = lineStart - 1;
}
if (this.node) {
if (blankLine) {
// Only blank lines preceding non-empty nodes are captured. Note that
// this means that collection item range start indices do not always
// increase monotonically. -- eemeli/yaml#126
var items = context.parent.items || context.parent.contents;
if (items) items.push(blankLine);
}
if (comments.length) Array.prototype.push.apply(this.props, comments);
offset = this.node.range.end;
} else {
if (inlineComment) {
var c = comments[0];
this.props.push(c);
offset = c.end;
} else {
offset = Node.endOfLine(src, start + 1);
}
}
var end = this.node ? this.node.valueRange.end : offset;
this.valueRange = new Range(start, end);
return offset;
}
}, {
key: "setOrigRanges",
value: function setOrigRanges(cr, offset) {
offset = _get(_getPrototypeOf(CollectionItem.prototype), "setOrigRanges", this).call(this, cr, offset);
return this.node ? this.node.setOrigRanges(cr, offset) : offset;
}
}, {
key: "toString",
value: function toString() {
var src = this.context.src,
node = this.node,
range = this.range,
value = this.value;
if (value != null) return value;
var str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end);
return Node.addStringTerminator(src, range.end, str);
}
}]);
return CollectionItem;
}(Node);
var Comment = /*#__PURE__*/function (_Node) {
_inherits(Comment, _Node);
var _super = _createSuper(Comment);
function Comment() {
_classCallCheck(this, Comment);
return _super.call(this, Type.COMMENT);
}
/**
* Parses a comment line from the source
*
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this scalar
*/
_createClass(Comment, [{
key: "parse",
value: function parse(context, start) {
this.context = context;
var offset = this.parseComment(start);
this.range = new Range(start, offset);
return offset;
}
}]);
return Comment;
}(Node);
function grabCollectionEndComments(node) {
var cnode = node;
while (cnode instanceof CollectionItem) {
cnode = cnode.node;
}
if (!(cnode instanceof Collection)) return null;
var len = cnode.items.length;
var ci = -1;
for (var i = len - 1; i >= 0; --i) {
var n = cnode.items[i];
if (n.type === Type.COMMENT) {
// Keep sufficiently indented comments with preceding node
var _n$context = n.context,
indent = _n$context.indent,
lineStart = _n$context.lineStart;
if (indent > 0 && n.range.start >= lineStart + indent) break;
ci = i;
} else if (n.type === Type.BLANK_LINE) ci = i;else break;
}
if (ci === -1) return null;
var ca = cnode.items.splice(ci, len - ci);
var prevEnd = ca[0].range.start;
while (true) {
cnode.range.end = prevEnd;
if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd;
if (cnode === node) break;
cnode = cnode.context.parent;
}
return ca;
}
var Collection = /*#__PURE__*/function (_Node) {
_inherits(Collection, _Node);
var _super = _createSuper(Collection);
function Collection(firstItem) {
var _this;
_classCallCheck(this, Collection);
_this = _super.call(this, firstItem.type === Type.SEQ_ITEM ? Type.SEQ : Type.MAP);
for (var i = firstItem.props.length - 1; i >= 0; --i) {
if (firstItem.props[i].start < firstItem.context.lineStart) {
// props on previous line are assumed by the collection
_this.props = firstItem.props.slice(0, i + 1);
firstItem.props = firstItem.props.slice(i + 1);
var itemRange = firstItem.props[0] || firstItem.valueRange;
firstItem.range.start = itemRange.start;
break;
}
}
_this.items = [firstItem];
var ec = grabCollectionEndComments(firstItem);
if (ec) Array.prototype.push.apply(_this.items, ec);
return _this;
}
_createClass(Collection, [{
key: "includesTrailingLines",
get: function get() {
return this.items.length > 0;
}
/**
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var parseNode = context.parseNode,
src = context.src; // It's easier to recalculate lineStart here rather than tracking down the
// last context from which to read it -- eemeli/yaml#2
var lineStart = Node.startOfLine(src, start);
var firstItem = this.items[0]; // First-item context needs to be correct for later comment handling
// -- eemeli/yaml#17
firstItem.context.parent = this;
this.valueRange = Range.copy(firstItem.valueRange);
var indent = firstItem.range.start - firstItem.context.lineStart;
var offset = start;
offset = Node.normalizeOffset(src, offset);
var ch = src[offset];
var atLineStart = Node.endOfWhiteSpace(src, lineStart) === offset;
var prevIncludesTrailingLines = false;
while (ch) {
while (ch === '\n' || ch === '#') {
if (atLineStart && ch === '\n' && !prevIncludesTrailingLines) {
var blankLine = new BlankLine();
offset = blankLine.parse({
src: src
}, offset);
this.valueRange.end = offset;
if (offset >= src.length) {
ch = null;
break;
}
this.items.push(blankLine);
offset -= 1; // blankLine.parse() consumes terminal newline
} else if (ch === '#') {
if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) {
return offset;
}
var comment = new Comment();
offset = comment.parse({
indent: indent,
lineStart: lineStart,
src: src
}, offset);
this.items.push(comment);
this.valueRange.end = offset;
if (offset >= src.length) {
ch = null;
break;
}
}
lineStart = offset + 1;
offset = Node.endOfIndent(src, lineStart);
if (Node.atBlank(src, offset)) {
var wsEnd = Node.endOfWhiteSpace(src, offset);
var next = src[wsEnd];
if (!next || next === '\n' || next === '#') {
offset = wsEnd;
}
}
ch = src[offset];
atLineStart = true;
}
if (!ch) {
break;
}
if (offset !== lineStart + indent && (atLineStart || ch !== ':')) {
if (offset < lineStart + indent) {
if (lineStart > start) offset = lineStart;
break;
} else if (!this.error) {
var msg = 'All collection items must start at the same column';
this.error = new YAMLSyntaxError(this, msg);
}
}
if (firstItem.type === Type.SEQ_ITEM) {
if (ch !== '-') {
if (lineStart > start) offset = lineStart;
break;
}
} else if (ch === '-' && !this.error) {
// map key may start with -, as long as it's followed by a non-whitespace char
var _next = src[offset + 1];
if (!_next || _next === '\n' || _next === '\t' || _next === ' ') {
var _msg = 'A collection cannot be both a mapping and a sequence';
this.error = new YAMLSyntaxError(this, _msg);
}
}
var node = parseNode({
atLineStart: atLineStart,
inCollection: true,
indent: indent,
lineStart: lineStart,
parent: this
}, offset);
if (!node) return offset; // at next document start
this.items.push(node);
this.valueRange.end = node.valueRange.end;
offset = Node.normalizeOffset(src, node.range.end);
ch = src[offset];
atLineStart = false;
prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range
// has advanced to check the current line's indentation level
// -- eemeli/yaml#10 & eemeli/yaml#38
if (ch) {
var ls = offset - 1;
var prev = src[ls];
while (prev === ' ' || prev === '\t') {
prev = src[--ls];
}
if (prev === '\n') {
lineStart = ls + 1;
atLineStart = true;
}
}
var ec = grabCollectionEndComments(node);
if (ec) Array.prototype.push.apply(this.items, ec);
}
return offset;
}
}, {
key: "setOrigRanges",
value: function setOrigRanges(cr, offset) {
offset = _get(_getPrototypeOf(Collection.prototype), "setOrigRanges", this).call(this, cr, offset);
this.items.forEach(function (node) {
offset = node.setOrigRanges(cr, offset);
});
return offset;
}
}, {
key: "toString",
value: function toString() {
var src = this.context.src,
items = this.items,
range = this.range,
value = this.value;
if (value != null) return value;
var str = src.slice(range.start, items[0].range.start) + String(items[0]);
for (var i = 1; i < items.length; ++i) {
var item = items[i];
var _item$context = item.context,
atLineStart = _item$context.atLineStart,
indent = _item$context.indent;
if (atLineStart) for (var _i = 0; _i < indent; ++_i) {
str += ' ';
}
str += String(item);
}
return Node.addStringTerminator(src, range.end, str);
}
}], [{
key: "nextContentHasIndent",
value: function nextContentHasIndent(src, offset, indent) {
var lineStart = Node.endOfLine(src, offset) + 1;
offset = Node.endOfWhiteSpace(src, lineStart);
var ch = src[offset];
if (!ch) return false;
if (offset >= lineStart + indent) return true;
if (ch !== '#' && ch !== '\n') return false;
return Collection.nextContentHasIndent(src, offset, indent);
}
}]);
return Collection;
}(Node);
var Directive = /*#__PURE__*/function (_Node) {
_inherits(Directive, _Node);
var _super = _createSuper(Directive);
function Directive() {
var _this;
_classCallCheck(this, Directive);
_this = _super.call(this, Type.DIRECTIVE);
_this.name = null;
return _this;
}
_createClass(Directive, [{
key: "parameters",
get: function get() {
var raw = this.rawValue;
return raw ? raw.trim().split(/[ \t]+/) : [];
}
}, {
key: "parseName",
value: function parseName(start) {
var src = this.context.src;
var offset = start;
var ch = src[offset];
while (ch && ch !== '\n' && ch !== '\t' && ch !== ' ') {
ch = src[offset += 1];
}
this.name = src.slice(start, offset);
return offset;
}
}, {
key: "parseParameters",
value: function parseParameters(start) {
var src = this.context.src;
var offset = start;
var ch = src[offset];
while (ch && ch !== '\n' && ch !== '#') {
ch = src[offset += 1];
}
this.valueRange = new Range(start, offset);
return offset;
}
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var offset = this.parseName(start + 1);
offset = this.parseParameters(offset);
offset = this.parseComment(offset);
this.range = new Range(start, offset);
return offset;
}
}]);
return Directive;
}(Node);
var Document = /*#__PURE__*/function (_Node) {
_inherits(Document, _Node);
var _super = _createSuper(Document);
function Document() {
var _this;
_classCallCheck(this, Document);
_this = _super.call(this, Type.DOCUMENT);
_this.directives = null;
_this.contents = null;
_this.directivesEndMarker = null;
_this.documentEndMarker = null;
return _this;
}
_createClass(Document, [{
key: "parseDirectives",
value: function parseDirectives(start) {
var src = this.context.src;
this.directives = [];
var atLineStart = true;
var hasDirectives = false;
var offset = start;
while (!Node.atDocumentBoundary(src, offset, Char.DIRECTIVES_END)) {
offset = Document.startCommentOrEndBlankLine(src, offset);
switch (src[offset]) {
case '\n':
if (atLineStart) {
var blankLine = new BlankLine();
offset = blankLine.parse({
src: src
}, offset);
if (offset < src.length) {
this.directives.push(blankLine);
}
} else {
offset += 1;
atLineStart = true;
}
break;
case '#':
{
var comment = new Comment();
offset = comment.parse({
src: src
}, offset);
this.directives.push(comment);
atLineStart = false;
}
break;
case '%':
{
var directive = new Directive();
offset = directive.parse({
parent: this,
src: src
}, offset);
this.directives.push(directive);
hasDirectives = true;
atLineStart = false;
}
break;
default:
if (hasDirectives) {
this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line');
} else if (this.directives.length > 0) {
this.contents = this.directives;
this.directives = [];
}
return offset;
}
}
if (src[offset]) {
this.directivesEndMarker = new Range(offset, offset + 3);
return offset + 3;
}
if (hasDirectives) {
this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line');
} else if (this.directives.length > 0) {
this.contents = this.directives;
this.directives = [];
}
return offset;
}
}, {
key: "parseContents",
value: function parseContents(start) {
var _this$context = this.context,
parseNode = _this$context.parseNode,
src = _this$context.src;
if (!this.contents) this.contents = [];
var lineStart = start;
while (src[lineStart - 1] === '-') {
lineStart -= 1;
}
var offset = Node.endOfWhiteSpace(src, start);
var atLineStart = lineStart === start;
this.valueRange = new Range(offset);
while (!Node.atDocumentBoundary(src, offset, Char.DOCUMENT_END)) {
switch (src[offset]) {
case '\n':
if (atLineStart) {
var blankLine = new BlankLine();
offset = blankLine.parse({
src: src
}, offset);
if (offset < src.length) {
this.contents.push(blankLine);
}
} else {
offset += 1;
atLineStart = true;
}
lineStart = offset;
break;
case '#':
{
var comment = new Comment();
offset = comment.parse({
src: src
}, offset);
this.contents.push(comment);
atLineStart = false;
}
break;
default:
{
var iEnd = Node.endOfIndent(src, offset);
var context = {
atLineStart: atLineStart,
indent: -1,
inFlow: false,
inCollection: false,
lineStart: lineStart,
parent: this
};
var node = parseNode(context, iEnd);
if (!node) return this.valueRange.end = iEnd; // at next document start
this.contents.push(node);
offset = node.range.end;
atLineStart = false;
var ec = grabCollectionEndComments(node);
if (ec) Array.prototype.push.apply(this.contents, ec);
}
}
offset = Document.startCommentOrEndBlankLine(src, offset);
}
this.valueRange.end = offset;
if (src[offset]) {
this.documentEndMarker = new Range(offset, offset + 3);
offset += 3;
if (src[offset]) {
offset = Node.endOfWhiteSpace(src, offset);
if (src[offset] === '#') {
var _comment = new Comment();
offset = _comment.parse({
src: src
}, offset);
this.contents.push(_comment);
}
switch (src[offset]) {
case '\n':
offset += 1;
break;
case undefined:
break;
default:
this.error = new YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix');
}
}
}
return offset;
}
/**
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this
*/
}, {
key: "parse",
value: function parse(context, start) {
context.root = this;
this.context = context;
var src = context.src;
var offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM
offset = this.parseDirectives(offset);
offset = this.parseContents(offset);
return offset;
}
}, {
key: "setOrigRanges",
value: function setOrigRanges(cr, offset) {
offset = _get(_getPrototypeOf(Document.prototype), "setOrigRanges", this).call(this, cr, offset);
this.directives.forEach(function (node) {
offset = node.setOrigRanges(cr, offset);
});
if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset);
this.contents.forEach(function (node) {
offset = node.setOrigRanges(cr, offset);
});
if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset);
return offset;
}
}, {
key: "toString",
value: function toString() {
var contents = this.contents,
directives = this.directives,
value = this.value;
if (value != null) return value;
var str = directives.join('');
if (contents.length > 0) {
if (directives.length > 0 || contents[0].type === Type.COMMENT) str += '---\n';
str += contents.join('');
}
if (str[str.length - 1] !== '\n') str += '\n';
return str;
}
}], [{
key: "startCommentOrEndBlankLine",
value: function startCommentOrEndBlankLine(src, start) {
var offset = Node.endOfWhiteSpace(src, start);
var ch = src[offset];
return ch === '#' || ch === '\n' ? offset : start;
}
}]);
return Document;
}(Node);
var Alias = /*#__PURE__*/function (_Node) {
_inherits(Alias, _Node);
var _super = _createSuper(Alias);
function Alias() {
_classCallCheck(this, Alias);
return _super.apply(this, arguments);
}
_createClass(Alias, [{
key: "parse",
value:
/**
* Parses an *alias from the source
*
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this scalar
*/
function parse(context, start) {
this.context = context;
var src = context.src;
var offset = Node.endOfIdentifier(src, start + 1);
this.valueRange = new Range(start + 1, offset);
offset = Node.endOfWhiteSpace(src, offset);
offset = this.parseComment(offset);
return offset;
}
}]);
return Alias;
}(Node);
var Chomp = {
CLIP: 'CLIP',
KEEP: 'KEEP',
STRIP: 'STRIP'
};
var BlockValue = /*#__PURE__*/function (_Node) {
_inherits(BlockValue, _Node);
var _super = _createSuper(BlockValue);
function BlockValue(type, props) {
var _this;
_classCallCheck(this, BlockValue);
_this = _super.call(this, type, props);
_this.blockIndent = null;
_this.chomping = Chomp.CLIP;
_this.header = null;
return _this;
}
_createClass(BlockValue, [{
key: "includesTrailingLines",
get: function get() {
return this.chomping === Chomp.KEEP;
}
}, {
key: "strValue",
get: function get() {
if (!this.valueRange || !this.context) return null;
var _this$valueRange = this.valueRange,
start = _this$valueRange.start,
end = _this$valueRange.end;
var _this$context = this.context,
indent = _this$context.indent,
src = _this$context.src;
if (this.valueRange.isEmpty()) return '';
var lastNewLine = null;
var ch = src[end - 1];
while (ch === '\n' || ch === '\t' || ch === ' ') {
end -= 1;
if (end <= start) {
if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens
}
if (ch === '\n') lastNewLine = end;
ch = src[end - 1];
}
var keepStart = end + 1;
if (lastNewLine) {
if (this.chomping === Chomp.KEEP) {
keepStart = lastNewLine;
end = this.valueRange.end;
} else {
end = lastNewLine;
}
}
var bi = indent + this.blockIndent;
var folded = this.type === Type.BLOCK_FOLDED;
var atStart = true;
var str = '';
var sep = '';
var prevMoreIndented = false;
for (var i = start; i < end; ++i) {
for (var j = 0; j < bi; ++j) {
if (src[i] !== ' ') break;
i += 1;
}
var _ch = src[i];
if (_ch === '\n') {
if (sep === '\n') str += '\n';else sep = '\n';
} else {
var lineEnd = Node.endOfLine(src, i);
var line = src.slice(i, lineEnd);
i = lineEnd;
if (folded && (_ch === ' ' || _ch === '\t') && i < keepStart) {
if (sep === ' ') sep = '\n';else if (!prevMoreIndented && !atStart && sep === '\n') sep = '\n\n';
str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '')
sep = lineEnd < end && src[lineEnd] || '';
prevMoreIndented = true;
} else {
str += sep + line;
sep = folded && i < keepStart ? ' ' : '\n';
prevMoreIndented = false;
}
if (atStart && line !== '') atStart = false;
}
}
return this.chomping === Chomp.STRIP ? str : str + '\n';
}
}, {
key: "parseBlockHeader",
value: function parseBlockHeader(start) {
var src = this.context.src;
var offset = start + 1;
var bi = '';
while (true) {
var ch = src[offset];
switch (ch) {
case '-':
this.chomping = Chomp.STRIP;
break;
case '+':
this.chomping = Chomp.KEEP;
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
bi += ch;
break;
default:
this.blockIndent = Number(bi) || null;
this.header = new Range(start, offset);
return offset;
}
offset += 1;
}
}
}, {
key: "parseBlockValue",
value: function parseBlockValue(start) {
var _this$context2 = this.context,
indent = _this$context2.indent,
src = _this$context2.src;
var explicit = !!this.blockIndent;
var offset = start;
var valueEnd = start;
var minBlockIndent = 1;
for (var ch = src[offset]; ch === '\n'; ch = src[offset]) {
offset += 1;
if (Node.atDocumentBoundary(src, offset)) break;
var end = Node.endOfBlockIndent(src, indent, offset); // should not include tab?
if (end === null) break;
var _ch2 = src[end];
var lineIndent = end - (offset + indent);
if (!this.blockIndent) {
// no explicit block indent, none yet detected
if (src[end] !== '\n') {
// first line with non-whitespace content
if (lineIndent < minBlockIndent) {
var msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
this.error = new YAMLSemanticError(this, msg);
}
this.blockIndent = lineIndent;
} else if (lineIndent > minBlockIndent) {
// empty line with more whitespace
minBlockIndent = lineIndent;
}
} else if (_ch2 && _ch2 !== '\n' && lineIndent < this.blockIndent) {
if (src[end] === '#') break;
if (!this.error) {
var _src = explicit ? 'explicit indentation indicator' : 'first line';
var _msg = "Block scalars must not be less indented than their ".concat(_src);
this.error = new YAMLSemanticError(this, _msg);
}
}
if (src[end] === '\n') {
offset = end;
} else {
offset = valueEnd = Node.endOfLine(src, end);
}
}
if (this.chomping !== Chomp.KEEP) {
offset = src[valueEnd] ? valueEnd + 1 : valueEnd;
}
this.valueRange = new Range(start + 1, offset);
return offset;
}
/**
* Parses a block value from the source
*
* Accepted forms are:
* ```
* BS
* block
* lines
*
* BS #comment
* block
* lines
* ```
* where the block style BS matches the regexp `[|>][-+1-9]*` and block lines
* are empty or have an indent level greater than `indent`.
*
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this block
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var src = context.src;
var offset = this.parseBlockHeader(start);
offset = Node.endOfWhiteSpace(src, offset);
offset = this.parseComment(offset);
offset = this.parseBlockValue(offset);
return offset;
}
}, {
key: "setOrigRanges",
value: function setOrigRanges(cr, offset) {
offset = _get(_getPrototypeOf(BlockValue.prototype), "setOrigRanges", this).call(this, cr, offset);
return this.header ? this.header.setOrigRange(cr, offset) : offset;
}
}]);
return BlockValue;
}(Node);
var FlowCollection = /*#__PURE__*/function (_Node) {
_inherits(FlowCollection, _Node);
var _super = _createSuper(FlowCollection);
function FlowCollection(type, props) {
var _this;
_classCallCheck(this, FlowCollection);
_this = _super.call(this, type, props);
_this.items = null;
return _this;
}
_createClass(FlowCollection, [{
key: "prevNodeIsJsonLike",
value: function prevNodeIsJsonLike() {
var idx = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.items.length;
var node = this.items[idx - 1];
return !!node && (node.jsonLike || node.type === Type.COMMENT && this.prevNodeIsJsonLike(idx - 1));
}
/**
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var parseNode = context.parseNode,
src = context.src;
var indent = context.indent,
lineStart = context.lineStart;
var char = src[start]; // { or [
this.items = [{
char: char,
offset: start
}];
var offset = Node.endOfWhiteSpace(src, start + 1);
char = src[offset];
while (char && char !== ']' && char !== '}') {
switch (char) {
case '\n':
{
lineStart = offset + 1;
var wsEnd = Node.endOfWhiteSpace(src, lineStart);
if (src[wsEnd] === '\n') {
var blankLine = new BlankLine();
lineStart = blankLine.parse({
src: src
}, lineStart);
this.items.push(blankLine);
}
offset = Node.endOfIndent(src, lineStart);
if (offset <= lineStart + indent) {
char = src[offset];
if (offset < lineStart + indent || char !== ']' && char !== '}') {
var msg = 'Insufficient indentation in flow collection';
this.error = new YAMLSemanticError(this, msg);
}
}
}
break;
case ',':
{
this.items.push({
char: char,
offset: offset
});
offset += 1;
}
break;
case '#':
{
var comment = new Comment();
offset = comment.parse({
src: src
}, offset);
this.items.push(comment);
}
break;
case '?':
case ':':
{
var next = src[offset + 1];
if (next === '\n' || next === '\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace
char === ':' && this.prevNodeIsJsonLike()) {
this.items.push({
char: char,
offset: offset
});
offset += 1;
break;
}
}
// fallthrough
default:
{
var node = parseNode({
atLineStart: false,
inCollection: false,
inFlow: true,
indent: -1,
lineStart: lineStart,
parent: this
}, offset);
if (!node) {
// at next document start
this.valueRange = new Range(start, offset);
return offset;
}
this.items.push(node);
offset = Node.normalizeOffset(src, node.range.end);
}
}
offset = Node.endOfWhiteSpace(src, offset);
char = src[offset];
}
this.valueRange = new Range(start, offset + 1);
if (char) {
this.items.push({
char: char,
offset: offset
});
offset = Node.endOfWhiteSpace(src, offset + 1);
offset = this.parseComment(offset);
}
return offset;
}
}, {
key: "setOrigRanges",
value: function setOrigRanges(cr, offset) {
offset = _get(_getPrototypeOf(FlowCollection.prototype), "setOrigRanges", this).call(this, cr, offset);
this.items.forEach(function (node) {
if (node instanceof Node) {
offset = node.setOrigRanges(cr, offset);
} else if (cr.length === 0) {
node.origOffset = node.offset;
} else {
var i = offset;
while (i < cr.length) {
if (cr[i] > node.offset) break;else ++i;
}
node.origOffset = node.offset + i;
offset = i;
}
});
return offset;
}
}, {
key: "toString",
value: function toString() {
var src = this.context.src,
items = this.items,
range = this.range,
value = this.value;
if (value != null) return value;
var nodes = items.filter(function (item) {
return item instanceof Node;
});
var str = '';
var prevEnd = range.start;
nodes.forEach(function (node) {
var prefix = src.slice(prevEnd, node.range.start);
prevEnd = node.range.end;
str += prefix + String(node);
if (str[str.length - 1] === '\n' && src[prevEnd - 1] !== '\n' && src[prevEnd] === '\n') {
// Comment range does not include the terminal newline, but its
// stringified value does. Without this fix, newlines at comment ends
// get duplicated.
prevEnd += 1;
}
});
str += src.slice(prevEnd, range.end);
return Node.addStringTerminator(src, range.end, str);
}
}]);
return FlowCollection;
}(Node);
var QuoteDouble = /*#__PURE__*/function (_Node) {
_inherits(QuoteDouble, _Node);
var _super = _createSuper(QuoteDouble);
function QuoteDouble() {
_classCallCheck(this, QuoteDouble);
return _super.apply(this, arguments);
}
_createClass(QuoteDouble, [{
key: "strValue",
get:
/**
* @returns {string | { str: string, errors: YAMLSyntaxError[] }}
*/
function get() {
if (!this.valueRange || !this.context) return null;
var errors = [];
var _this$valueRange = this.valueRange,
start = _this$valueRange.start,
end = _this$valueRange.end;
var _this$context = this.context,
indent = _this$context.indent,
src = _this$context.src;
if (src[end - 1] !== '"') errors.push(new YAMLSyntaxError(this, 'Missing closing "quote')); // Using String#replace is too painful with escaped newlines preceded by
// escaped backslashes; also, this should be faster.
var str = '';
for (var i = start + 1; i < end - 1; ++i) {
var ch = src[i];
if (ch === '\n') {
if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values'));
var _Node$foldNewline = Node.foldNewline(src, i, indent),
fold = _Node$foldNewline.fold,
offset = _Node$foldNewline.offset,
error = _Node$foldNewline.error;
str += fold;
i = offset;
if (error) errors.push(new YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented'));
} else if (ch === '\\') {
i += 1;
switch (src[i]) {
case '0':
str += '\0';
break;
// null character
case 'a':
str += '\x07';
break;
// bell character
case 'b':
str += '\b';
break;
// backspace
case 'e':
str += '\x1b';
break;
// escape character
case 'f':
str += '\f';
break;
// form feed
case 'n':
str += '\n';
break;
// line feed
case 'r':
str += '\r';
break;
// carriage return
case 't':
str += '\t';
break;
// horizontal tab
case 'v':
str += '\v';
break;
// vertical tab
case 'N':
str += "\x85";
break;
// Unicode next line
case '_':
str += "\xA0";
break;
// Unicode non-breaking space
case 'L':
str += "\u2028";
break;
// Unicode line separator
case 'P':
str += "\u2029";
break;
// Unicode paragraph separator
case ' ':
str += ' ';
break;
case '"':
str += '"';
break;
case '/':
str += '/';
break;
case '\\':
str += '\\';
break;
case '\t':
str += '\t';
break;
case 'x':
str += this.parseCharCode(i + 1, 2, errors);
i += 2;
break;
case 'u':
str += this.parseCharCode(i + 1, 4, errors);
i += 4;
break;
case 'U':
str += this.parseCharCode(i + 1, 8, errors);
i += 8;
break;
case '\n':
// skip escaped newlines, but still trim the following line
while (src[i + 1] === ' ' || src[i + 1] === '\t') {
i += 1;
}
break;
default:
errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(i - 1, 2))));
str += '\\' + src[i];
}
} else if (ch === ' ' || ch === '\t') {
// trim trailing whitespace
var wsStart = i;
var next = src[i + 1];
while (next === ' ' || next === '\t') {
i += 1;
next = src[i + 1];
}
if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;
} else {
str += ch;
}
}
return errors.length > 0 ? {
errors: errors,
str: str
} : str;
}
}, {
key: "parseCharCode",
value: function parseCharCode(offset, length, errors) {
var src = this.context.src;
var cc = src.substr(offset, length);
var ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
var code = ok ? parseInt(cc, 16) : NaN;
if (isNaN(code)) {
errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(offset - 2, length + 2))));
return src.substr(offset - 2, length + 2);
}
return String.fromCodePoint(code);
}
/**
* Parses a "double quoted" value from the source
*
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this scalar
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var src = context.src;
var offset = QuoteDouble.endOfQuote(src, start + 1);
this.valueRange = new Range(start, offset);
offset = Node.endOfWhiteSpace(src, offset);
offset = this.parseComment(offset);
return offset;
}
}], [{
key: "endOfQuote",
value: function endOfQuote(src, offset) {
var ch = src[offset];
while (ch && ch !== '"') {
offset += ch === '\\' ? 2 : 1;
ch = src[offset];
}
return offset + 1;
}
}]);
return QuoteDouble;
}(Node);
var QuoteSingle = /*#__PURE__*/function (_Node) {
_inherits(QuoteSingle, _Node);
var _super = _createSuper(QuoteSingle);
function QuoteSingle() {
_classCallCheck(this, QuoteSingle);
return _super.apply(this, arguments);
}
_createClass(QuoteSingle, [{
key: "strValue",
get:
/**
* @returns {string | { str: string, errors: YAMLSyntaxError[] }}
*/
function get() {
if (!this.valueRange || !this.context) return null;
var errors = [];
var _this$valueRange = this.valueRange,
start = _this$valueRange.start,
end = _this$valueRange.end;
var _this$context = this.context,
indent = _this$context.indent,
src = _this$context.src;
if (src[end - 1] !== "'") errors.push(new YAMLSyntaxError(this, "Missing closing 'quote"));
var str = '';
for (var i = start + 1; i < end - 1; ++i) {
var ch = src[i];
if (ch === '\n') {
if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values'));
var _Node$foldNewline = Node.foldNewline(src, i, indent),
fold = _Node$foldNewline.fold,
offset = _Node$foldNewline.offset,
error = _Node$foldNewline.error;
str += fold;
i = offset;
if (error) errors.push(new YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented'));
} else if (ch === "'") {
str += ch;
i += 1;
if (src[i] !== "'") errors.push(new YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.'));
} else if (ch === ' ' || ch === '\t') {
// trim trailing whitespace
var wsStart = i;
var next = src[i + 1];
while (next === ' ' || next === '\t') {
i += 1;
next = src[i + 1];
}
if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;
} else {
str += ch;
}
}
return errors.length > 0 ? {
errors: errors,
str: str
} : str;
}
/**
* Parses a 'single quoted' value from the source
*
* @param {ParseContext} context
* @param {number} start - Index of first character
* @returns {number} - Index of the character after this scalar
*/
}, {
key: "parse",
value: function parse(context, start) {
this.context = context;
var src = context.src;
var offset = QuoteSingle.endOfQuote(src, start + 1);
this.valueRange = new Range(start, offset);
offset = Node.endOfWhiteSpace(src, offset);
offset = this.parseComment(offset);
return offset;
}
}], [{
key: "endOfQuote",
value: function endOfQuote(src, offset) {
var ch = src[offset];
while (ch) {
if (ch === "'") {
if (src[offset + 1] !== "'") break;
ch = src[offset += 2];
} else {
ch = src[offset += 1];
}
}
return offset + 1;
}
}]);
return QuoteSingle;
}(Node);
function createNewNode(type, props) {
switch (type) {
case Type.ALIAS:
return new Alias(type, props);
case Type.BLOCK_FOLDED:
case Type.BLOCK_LITERAL:
return new BlockValue(type, props);
case Type.FLOW_MAP:
case Type.FLOW_SEQ:
return new FlowCollection(type, props);
case Type.MAP_KEY:
case Type.MAP_VALUE:
case Type.SEQ_ITEM:
return new CollectionItem(type, props);
case Type.COMMENT:
case Type.PLAIN:
return new PlainValue(type, props);
case Type.QUOTE_DOUBLE:
return new QuoteDouble(type, props);
case Type.QUOTE_SINGLE:
return new QuoteSingle(type, props);
/* istanbul ignore next */
default:
return null;
// should never happen
}
}
/**
* @param {boolean} atLineStart - Node starts at beginning of line
* @param {boolean} inFlow - true if currently in a flow context
* @param {boolean} inCollection - true if currently in a collection context
* @param {number} indent - Current level of indentation
* @param {number} lineStart - Start of the current line
* @param {Node} parent - The parent of the node
* @param {string} src - Source of the YAML document
*/
var ParseContext = /*#__PURE__*/function () {
function ParseContext() {
var _this = this;
var orig = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
atLineStart = _ref.atLineStart,
inCollection = _ref.inCollection,
inFlow = _ref.inFlow,
indent = _ref.indent,
lineStart = _ref.lineStart,
parent = _ref.parent;
_classCallCheck(this, ParseContext);
_defineProperty(this, "parseNode", function (overlay, start) {
if (Node.atDocumentBoundary(_this.src, start)) return null;
var context = new ParseContext(_this, overlay);
var _context$parseProps = context.parseProps(start),
props = _context$parseProps.props,
type = _context$parseProps.type,
valueStart = _context$parseProps.valueStart;
var node = createNewNode(type, props);
var offset = node.parse(context, valueStart);
node.range = new Range(start, offset);
/* istanbul ignore if */
if (offset <= start) {
// This should never happen, but if it does, let's make sure to at least
// step one character forward to avoid a busy loop.
node.error = new Error("Node#parse consumed no characters");
node.error.parseEnd = offset;
node.error.source = node;
node.range.end = start + 1;
}
if (context.nodeStartsCollection(node)) {
if (!node.error && !context.atLineStart && context.parent.type === Type.DOCUMENT) {
node.error = new YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)');
}
var collection = new Collection(node);
offset = collection.parse(new ParseContext(context), offset);
collection.range = new Range(start, offset);
return collection;
}
return node;
});
this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false;
this.inCollection = inCollection != null ? inCollection : orig.inCollection || false;
this.inFlow = inFlow != null ? inFlow : orig.inFlow || false;
this.indent = indent != null ? indent : orig.indent;
this.lineStart = lineStart != null ? lineStart : orig.lineStart;
this.parent = parent != null ? parent : orig.parent || {};
this.root = orig.root;
this.src = orig.src;
}
_createClass(ParseContext, [{
key: "nodeStartsCollection",
value: function nodeStartsCollection(node) {
var inCollection = this.inCollection,
inFlow = this.inFlow,
src = this.src;
if (inCollection || inFlow) return false;
if (node instanceof CollectionItem) return true; // check for implicit key
var offset = node.range.end;
if (src[offset] === '\n' || src[offset - 1] === '\n') return false;
offset = Node.endOfWhiteSpace(src, offset);
return src[offset] === ':';
} // Anchor and tag are before type, which determines the node implementation
// class; hence this intermediate step.
}, {
key: "parseProps",
value: function parseProps(offset) {
var inFlow = this.inFlow,
parent = this.parent,
src = this.src;
var props = [];
var lineHasProps = false;
offset = this.atLineStart ? Node.endOfIndent(src, offset) : Node.endOfWhiteSpace(src, offset);
var ch = src[offset];
while (ch === Char.ANCHOR || ch === Char.COMMENT || ch === Char.TAG || ch === '\n') {
if (ch === '\n') {
var inEnd = offset;
var lineStart = void 0;
do {
lineStart = inEnd + 1;
inEnd = Node.endOfIndent(src, lineStart);
} while (src[inEnd] === '\n');
var indentDiff = inEnd - (lineStart + this.indent);
var noIndicatorAsIndent = parent.type === Type.SEQ_ITEM && parent.context.atLineStart;
if (src[inEnd] !== '#' && !Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break;
this.atLineStart = true;
this.lineStart = lineStart;
lineHasProps = false;
offset = inEnd;
} else if (ch === Char.COMMENT) {
var end = Node.endOfLine(src, offset + 1);
props.push(new Range(offset, end));
offset = end;
} else {
var _end = Node.endOfIdentifier(src, offset + 1);
if (ch === Char.TAG && src[_end] === ',' && /^[a-zA-Z0-9-]+\.[a-zA-Z0-9-]+,\d\d\d\d(-\d\d){0,2}\/\S/.test(src.slice(offset + 1, _end + 13))) {
// Let's presume we're dealing with a YAML 1.0 domain tag here, rather
// than an empty but 'foo.bar' private-tagged node in a flow collection
// followed without whitespace by a plain string starting with a year
// or date divided by something.
_end = Node.endOfIdentifier(src, _end + 5);
}
props.push(new Range(offset, _end));
lineHasProps = true;
offset = Node.endOfWhiteSpace(src, _end);
}
ch = src[offset];
} // '- &a : b' has an anchor on an empty node
if (lineHasProps && ch === ':' && Node.atBlank(src, offset + 1, true)) offset -= 1;
var type = ParseContext.parseType(src, offset, inFlow);
return {
props: props,
type: type,
valueStart: offset
};
}
/**
* Parses a node from the source
* @param {ParseContext} overlay
* @param {number} start - Index of first non-whitespace character for the node
* @returns {?Node} - null if at a document boundary
*/
}], [{
key: "parseType",
value: function parseType(src, offset, inFlow) {
switch (src[offset]) {
case '*':
return Type.ALIAS;
case '>':
return Type.BLOCK_FOLDED;
case '|':
return Type.BLOCK_LITERAL;
case '{':
return Type.FLOW_MAP;
case '[':
return Type.FLOW_SEQ;
case '?':
return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_KEY : Type.PLAIN;
case ':':
return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_VALUE : Type.PLAIN;
case '-':
return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.SEQ_ITEM : Type.PLAIN;
case '"':
return Type.QUOTE_DOUBLE;
case "'":
return Type.QUOTE_SINGLE;
default:
return Type.PLAIN;
}
}
}]);
return ParseContext;
}();
// Published as 'yaml/parse-cst'
function parse(src) {
var cr = [];
if (src.indexOf('\r') !== -1) {
src = src.replace(/\r\n?/g, function (match, offset) {
if (match.length > 1) cr.push(offset);
return '\n';
});
}
var documents = [];
var offset = 0;
do {
var doc = new Document();
var context = new ParseContext({
src: src
});
offset = doc.parse(context, offset);
documents.push(doc);
} while (offset < src.length);
documents.setOrigRanges = function () {
if (cr.length === 0) return false;
for (var i = 1; i < cr.length; ++i) {
cr[i] -= i;
}
var crOffset = 0;
for (var _i = 0; _i < documents.length; ++_i) {
crOffset = documents[_i].setOrigRanges(cr, crOffset);
}
cr.splice(0, cr.length);
return true;
};
documents.toString = function () {
return documents.join('...\n');
};
return documents;
}
export { parse };
| GoogleCloudPlatform/prometheus-engine | third_party/prometheus_ui/base/web/ui/react-app/node_modules/yaml/browser/dist/parse-cst.js | JavaScript | apache-2.0 | 54,633 |
//-----------------------------------------------------------------------
// <copyright file="AkkaSpecExtensions.cs" company="Akka.NET Project">
// Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Akka.Util.Internal;
using Xunit;
// ReSharper disable once CheckNamespace
namespace Akka.TestKit
{
public static class AkkaSpecExtensions
{
public static void Should<T>(this T self, Func<T, bool> isValid, string message)
{
Assert.True(isValid(self), message ?? "Value did not meet criteria. Value: " + self);
}
public static void ShouldHaveCount<T>(this IReadOnlyCollection<T> self, int expectedCount)
{
Assert.Equal(expectedCount, self.Count);
}
public static void ShouldBe<T>(this IEnumerable<T> self, IEnumerable<T> other)
{
Assert.True(self.SequenceEqual(other), "Expected " + other.Select(i => string.Format("'{0}'", i)).Join(",") + " got " + self.Select(i => string.Format("'{0}'", i)).Join(","));
}
public static void ShouldBe<T>(this T self, T expected, string message = null)
{
Assert.Equal(expected, self);
}
public static void ShouldNotBe<T>(this T self, T expected, string message = null)
{
Assert.NotEqual(expected, self);
}
public static void ShouldBeSame<T>(this T self, T expected, string message = null)
{
Assert.Same(expected, self);
}
public static void ShouldNotBeSame<T>(this T self, T expected, string message = null)
{
Assert.NotSame(expected, self);
}
public static void ShouldBeTrue(this bool b, string message = null)
{
Assert.True(b);
}
public static void ShouldBeFalse(this bool b, string message = null)
{
Assert.False(b);
}
public static void ShouldBeLessThan<T>(this T actual, T value, string message = null) where T : IComparable<T>
{
var comparisonResult = actual.CompareTo(value);
Assert.True(comparisonResult < 0, "Expected Actual: " + actual + " to be less than " + value);
}
public static void ShouldBeLessOrEqualTo<T>(this T actual, T value, string message = null) where T : IComparable<T>
{
var comparisonResult = actual.CompareTo(value);
Assert.True(comparisonResult <= 0, "Expected Actual: " + actual + " to be less than " + value);
}
public static void ShouldBeGreaterThan<T>(this T actual, T value, string message = null) where T : IComparable<T>
{
var comparisonResult = actual.CompareTo(value);
Assert.True(comparisonResult > 0, "Expected Actual: " + actual + " to be less than " + value);
}
public static void ShouldBeGreaterOrEqual<T>(this T actual, T value, string message = null) where T : IComparable<T>
{
var comparisonResult = actual.CompareTo(value);
Assert.True(comparisonResult >= 0, "Expected Actual: " + actual + " to be less than " + value);
}
public static void ShouldStartWith(this string s, string start, string message = null)
{
Assert.Equal(s.Substring(0, Math.Min(s.Length, start.Length)), start);
}
public static void ShouldOnlyContainInOrder<T>(this IEnumerable<T> actual, params T[] expected)
{
ShouldBe(actual, expected);
}
public static async Task ThrowsAsync<TException>(Func<Task> func)
{
var expected = typeof(TException);
Type actual = null;
try
{
await func();
}
catch (Exception e)
{
actual = e.GetType();
}
Assert.Equal(expected, actual);
}
}
}
| nanderto/akka.net | src/core/Akka.Tests.Shared.Internals/AkkaSpecExtensions.cs | C# | apache-2.0 | 4,207 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.ApiManagement;
using Microsoft.Azure.Management.ApiManagement.Models;
namespace Microsoft.Azure.Management.ApiManagement
{
/// <summary>
/// .Net client wrapper for the REST API for Azure ApiManagement Service
/// </summary>
public static partial class ResourceProviderOperationsExtensions
{
/// <summary>
/// Backs up an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse Backup(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BackupAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Backs up an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BackupAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return operations.BackupAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Begin backup operation of an Api Management service.To determine
/// whether the operation has finished processing the request, call
/// GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the BeginBackup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse BeginBackup(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BeginBackupAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begin backup operation of an Api Management service.To determine
/// whether the operation has finished processing the request, call
/// GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the BeginBackup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BeginBackupAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return operations.BeginBackupAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Begins creating new or updating existing Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetApiServiceLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CreateOrUpdate Api Management
/// service operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse BeginCreatingOrUpdating(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BeginCreatingOrUpdatingAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begins creating new or updating existing Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetApiServiceLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CreateOrUpdate Api Management
/// service operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BeginCreatingOrUpdatingAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters)
{
return operations.BeginCreatingOrUpdatingAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Begin to manage (CUD) deployments of an Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the ManageDeployments operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse BeginManagingDeployments(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BeginManagingDeploymentsAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begin to manage (CUD) deployments of an Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the ManageDeployments operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BeginManagingDeploymentsAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters)
{
return operations.BeginManagingDeploymentsAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Begin restore from backup operation of an Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Restore Api Management service
/// from backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse BeginRestoring(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BeginRestoringAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begin restore from backup operation of an Api Management service.To
/// determine whether the operation has finished processing the
/// request, call GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Restore Api Management service
/// from backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BeginRestoringAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return operations.BeginRestoringAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Begin updating hostname of an Api Management service.To determine
/// whether the operation has finished processing the request, call
/// GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the UpdateHostname operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse BeginUpdatingHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).BeginUpdatingHostnameAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begin updating hostname of an Api Management service.To determine
/// whether the operation has finished processing the request, call
/// GetLongRunningOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the UpdateHostname operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> BeginUpdatingHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters)
{
return operations.BeginUpdatingHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Checks whether the custom host name maps to an Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CheckCustomHostname operation.
/// </param>
/// <returns>
/// The response of the CheckCustomHostname operation.
/// </returns>
public static ApiServiceCheckCustomHostnameResponse CheckCustomHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCheckCustomHostnameParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).CheckCustomHostnameAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Checks whether the custom host name maps to an Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CheckCustomHostname operation.
/// </param>
/// <returns>
/// The response of the CheckCustomHostname operation.
/// </returns>
public static Task<ApiServiceCheckCustomHostnameResponse> CheckCustomHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCheckCustomHostnameParameters parameters)
{
return operations.CheckCustomHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Checks availability and correctness of a name for an Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CheckNameAvailability
/// operation.
/// </param>
/// <returns>
/// Response of the CheckNameAvailability operation.
/// </returns>
public static ApiServiceNameAvailabilityResponse CheckNameAvailability(this IResourceProviderOperations operations, ApiServiceCheckNameAvailabilityParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).CheckNameAvailabilityAsync(parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Checks availability and correctness of a name for an Api Management
/// service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CheckNameAvailability
/// operation.
/// </param>
/// <returns>
/// Response of the CheckNameAvailability operation.
/// </returns>
public static Task<ApiServiceNameAvailabilityResponse> CheckNameAvailabilityAsync(this IResourceProviderOperations operations, ApiServiceCheckNameAvailabilityParameters parameters)
{
return operations.CheckNameAvailabilityAsync(parameters, CancellationToken.None);
}
/// <summary>
/// Creates new or updates existing Api Management service
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CreateOrUpdate Api Management
/// service operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse CreateOrUpdate(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).CreateOrUpdateAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates new or updates existing Api Management service
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the CreateOrUpdate Api Management
/// service operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> CreateOrUpdateAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceCreateOrUpdateParameters parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Deletes existing Api Management service
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Delete(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).DeleteAsync(resourceGroupName, name);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes existing Api Management service
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> DeleteAsync(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return operations.DeleteAsync(resourceGroupName, name, CancellationToken.None);
}
/// <summary>
/// Get an Api Management service resource description.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// The response of the Get Api Management service operation.
/// </returns>
public static ApiServiceGetResponse Get(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).GetAsync(resourceGroupName, name);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get an Api Management service resource description.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// The response of the Get Api Management service operation.
/// </returns>
public static Task<ApiServiceGetResponse> GetAsync(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return operations.GetAsync(resourceGroupName, name, CancellationToken.None);
}
/// <summary>
/// The Get ApiService Operation Status operation returns the status of
/// the create or update operation. After calling the operation, you
/// can call Get ApiService Operation Status to determine whether the
/// operation has succeeded, failed, or is still in progress. This
/// method differs GetLongRunningOperationStatus in providing Api
/// Management service resource description.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse GetApiServiceLongRunningOperationStatus(this IResourceProviderOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).GetApiServiceLongRunningOperationStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get ApiService Operation Status operation returns the status of
/// the create or update operation. After calling the operation, you
/// can call Get ApiService Operation Status to determine whether the
/// operation has succeeded, failed, or is still in progress. This
/// method differs GetLongRunningOperationStatus in providing Api
/// Management service resource description.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> GetApiServiceLongRunningOperationStatusAsync(this IResourceProviderOperations operations, string operationStatusLink)
{
return operations.GetApiServiceLongRunningOperationStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static LongRunningOperationResponse GetLongRunningOperationStatus(this IResourceProviderOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).GetLongRunningOperationStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public static Task<LongRunningOperationResponse> GetLongRunningOperationStatusAsync(this IResourceProviderOperations operations, string operationStatusLink)
{
return operations.GetLongRunningOperationStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// Gets SsoToken for an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// The response of the GetSsoToken operation.
/// </returns>
public static ApiServiceGetSsoTokenResponse GetSsoToken(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).GetSsoTokenAsync(resourceGroupName, name);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets SsoToken for an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <returns>
/// The response of the GetSsoToken operation.
/// </returns>
public static Task<ApiServiceGetSsoTokenResponse> GetSsoTokenAsync(this IResourceProviderOperations operations, string resourceGroupName, string name)
{
return operations.GetSsoTokenAsync(resourceGroupName, name, CancellationToken.None);
}
/// <summary>
/// List all Api Management services within a resource group.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Optional. The name of the resource group. If resourceGroupName
/// value is null the method lists all Api Management services within
/// subscription
/// </param>
/// <returns>
/// The response of the List Api Management services operation.
/// </returns>
public static ApiServiceListResponse List(this IResourceProviderOperations operations, string resourceGroupName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).ListAsync(resourceGroupName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List all Api Management services within a resource group.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Optional. The name of the resource group. If resourceGroupName
/// value is null the method lists all Api Management services within
/// subscription
/// </param>
/// <returns>
/// The response of the List Api Management services operation.
/// </returns>
public static Task<ApiServiceListResponse> ListAsync(this IResourceProviderOperations operations, string resourceGroupName)
{
return operations.ListAsync(resourceGroupName, CancellationToken.None);
}
/// <summary>
/// Manage (CUD) deployments of an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the ManageDeployments operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse ManageDeployments(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).ManageDeploymentsAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Manage (CUD) deployments of an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the ManageDeployments operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> ManageDeploymentsAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceManageDeploymentsParameters parameters)
{
return operations.ManageDeploymentsAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Restore an Api Management service from backup.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Restore Api Management service
/// from backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse Restore(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).RestoreAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Restore an Api Management service from backup.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Restore Api Management service
/// from backup operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> RestoreAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceBackupRestoreParameters parameters)
{
return operations.RestoreAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Update hostname of an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the UpdateHostname operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static ApiServiceLongRunningOperationResponse UpdateHostname(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).UpdateHostnameAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Update hostname of an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the UpdateHostname operation.
/// </param>
/// <returns>
/// The response of the CreateOrUpdate Api Management service long
/// running operation.
/// </returns>
public static Task<ApiServiceLongRunningOperationResponse> UpdateHostnameAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUpdateHostnameParameters parameters)
{
return operations.UpdateHostnameAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
/// <summary>
/// Upload SSL certificate for an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Upload SSL certificate for an
/// Api Management service operation.
/// </param>
/// <returns>
/// The response of the Upload SSL certificate for an Api Management
/// service operation.
/// </returns>
public static ApiServiceUploadCertificateResponse UploadCertificate(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUploadCertificateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IResourceProviderOperations)s).UploadCertificateAsync(resourceGroupName, name, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Upload SSL certificate for an Api Management service.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IResourceProviderOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='name'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Upload SSL certificate for an
/// Api Management service operation.
/// </param>
/// <returns>
/// The response of the Upload SSL certificate for an Api Management
/// service operation.
/// </returns>
public static Task<ApiServiceUploadCertificateResponse> UploadCertificateAsync(this IResourceProviderOperations operations, string resourceGroupName, string name, ApiServiceUploadCertificateParameters parameters)
{
return operations.UploadCertificateAsync(resourceGroupName, name, parameters, CancellationToken.None);
}
}
}
| nemanja88/azure-sdk-for-net | src/ResourceManagement/ApiManagement/ApiManagementManagement/Generated/ResourceProviderOperationsExtensions.cs | C# | apache-2.0 | 48,468 |
newparam(:destinationtype) do
include EasyType
desc 'The destination type of a jms resource (queue or topic)'
newvalues(:queue, :topic)
to_translate_to_resource do | raw_resource |
raw_resource['destinationtype']
end
end
| rcompos/biemond-orawls | orawls/lib/puppet/type/wls_jms_security_policy/destinationtype.rb | Ruby | apache-2.0 | 239 |
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package otel // import "go.opentelemetry.io/otel"
import (
"go.opentelemetry.io/otel/internal/global"
"go.opentelemetry.io/otel/trace"
)
// Tracer creates a named tracer that implements Tracer interface.
// If the name is an empty string then provider uses default name.
//
// This is short for GetTracerProvider().Tracer(name, opts...)
func Tracer(name string, opts ...trace.TracerOption) trace.Tracer {
return GetTracerProvider().Tracer(name, opts...)
}
// GetTracerProvider returns the registered global trace provider.
// If none is registered then an instance of NoopTracerProvider is returned.
//
// Use the trace provider to create a named tracer. E.g.
// tracer := otel.GetTracerProvider().Tracer("example.com/foo")
// or
// tracer := otel.Tracer("example.com/foo")
func GetTracerProvider() trace.TracerProvider {
return global.TracerProvider()
}
// SetTracerProvider registers `tp` as the global trace provider.
func SetTracerProvider(tp trace.TracerProvider) {
global.SetTracerProvider(tp)
}
| containerd/containerd | vendor/go.opentelemetry.io/otel/trace.go | GO | apache-2.0 | 1,617 |
package org.apache.lucene.benchmark.byTask.tasks;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.HashMap;
import java.util.Map;
import org.apache.lucene.benchmark.byTask.PerfRunData;
import org.apache.lucene.index.IndexWriter;
/**
* Commits the IndexWriter.
*
*/
public class CommitIndexTask extends PerfTask {
Map<String,String> commitUserData;
public CommitIndexTask(PerfRunData runData) {
super(runData);
}
@Override
public boolean supportsParams() {
return true;
}
@Override
public void setParams(String params) {
super.setParams(params);
commitUserData = new HashMap<>();
commitUserData.put(OpenReaderTask.USER_DATA, params);
}
@Override
public int doLogic() throws Exception {
IndexWriter iw = getRunData().getIndexWriter();
if (iw != null) {
if (commitUserData != null) {
iw.setCommitData(commitUserData);
}
iw.commit();
}
return 1;
}
}
| smartan/lucene | src/main/java/org/apache/lucene/benchmark/byTask/tasks/CommitIndexTask.java | Java | apache-2.0 | 1,724 |
interface B { default void foo() {} }
interface C { default void foo() {} }
class <error descr="D inherits unrelated defaults for foo() from types B and C">D</error> implements B, C {}
| android-ia/platform_tools_idea | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/lambda/interfaceMethods/InheritUnrelatedDefaults.java | Java | apache-2.0 | 185 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Pdf
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
/** Zend_Pdf_Color */
require_once 'Zend/Pdf/Color.php';
/**
* HTML color implementation
*
* Factory class which vends Zend_Pdf_Color objects from typical HTML
* representations.
*
* @category Zend
* @package Zend_Pdf
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Pdf_Color_Html extends Zend_Pdf_Color
{
/**
* Color
*
* @var Zend_Pdf_Color
*/
private $_color;
/**
* Class constructor.
*
* @param mixed $color
* @throws Zend_Pdf_Exception
*/
public function __construct($color)
{
$this->_color = self::color($color);
}
/**
* Instructions, which can be directly inserted into content stream
* to switch color.
* Color set instructions differ for stroking and nonstroking operations.
*
* @param boolean $stroking
* @return string
*/
public function instructions($stroking)
{
return $this->_color->instructions($stroking);
}
/**
* Get color components (color space dependent)
*
* @return array
*/
public function getComponents()
{
return $this->_color->getComponents();
}
/**
* Creates a Zend_Pdf_Color object from the HTML representation.
*
* @param string $color May either be a hexidecimal number of the form
* #rrggbb or one of the 140 well-known names (black, white, blue, etc.)
* @return Zend_Pdf_Color
*/
public static function color($color)
{
$pattern = '/^#([A-Fa-f0-9]{2})([A-Fa-f0-9]{2})([A-Fa-f0-9]{2})$/';
if (preg_match($pattern, $color, $matches)) {
$r = round((hexdec($matches[1]) / 255), 3);
$g = round((hexdec($matches[2]) / 255), 3);
$b = round((hexdec($matches[3]) / 255), 3);
if (($r == $g) && ($g == $b)) {
require_once 'Zend/Pdf/Color/GrayScale.php';
return new Zend_Pdf_Color_GrayScale($r);
} else {
require_once 'Zend/Pdf/Color/Rgb.php';
return new Zend_Pdf_Color_Rgb($r, $g, $b);
}
} else {
return Zend_Pdf_Color_Html::namedColor($color);
}
}
/**
* Creates a Zend_Pdf_Color object from the named color.
*
* @param string $color One of the 140 well-known color names (black, white,
* blue, etc.)
* @return Zend_Pdf_Color
*/
public static function namedColor($color)
{
switch (strtolower($color)) {
case 'aqua':
$r = 0.0; $g = 1.0; $b = 1.0; break;
case 'black':
$r = 0.0; $g = 0.0; $b = 0.0; break;
case 'blue':
$r = 0.0; $g = 0.0; $b = 1.0; break;
case 'fuchsia':
$r = 1.0; $g = 0.0; $b = 1.0; break;
case 'gray':
$r = 0.502; $g = 0.502; $b = 0.502; break;
case 'green':
$r = 0.0; $g = 0.502; $b = 0.0; break;
case 'lime':
$r = 0.0; $g = 1.0; $b = 0.0; break;
case 'maroon':
$r = 0.502; $g = 0.0; $b = 0.0; break;
case 'navy':
$r = 0.0; $g = 0.0; $b = 0.502; break;
case 'olive':
$r = 0.502; $g = 0.502; $b = 0.0; break;
case 'purple':
$r = 0.502; $g = 0.0; $b = 0.502; break;
case 'red':
$r = 1.0; $g = 0.0; $b = 0.0; break;
case 'silver':
$r = 0.753; $g = 0.753; $b = 0.753; break;
case 'teal':
$r = 0.0; $g = 0.502; $b = 0.502; break;
case 'white':
$r = 1.0; $g = 1.0; $b = 1.0; break;
case 'yellow':
$r = 1.0; $g = 1.0; $b = 0.0; break;
case 'aliceblue':
$r = 0.941; $g = 0.973; $b = 1.0; break;
case 'antiquewhite':
$r = 0.980; $g = 0.922; $b = 0.843; break;
case 'aquamarine':
$r = 0.498; $g = 1.0; $b = 0.831; break;
case 'azure':
$r = 0.941; $g = 1.0; $b = 1.0; break;
case 'beige':
$r = 0.961; $g = 0.961; $b = 0.863; break;
case 'bisque':
$r = 1.0; $g = 0.894; $b = 0.769; break;
case 'blanchedalmond':
$r = 1.0; $g = 1.0; $b = 0.804; break;
case 'blueviolet':
$r = 0.541; $g = 0.169; $b = 0.886; break;
case 'brown':
$r = 0.647; $g = 0.165; $b = 0.165; break;
case 'burlywood':
$r = 0.871; $g = 0.722; $b = 0.529; break;
case 'cadetblue':
$r = 0.373; $g = 0.620; $b = 0.627; break;
case 'chartreuse':
$r = 0.498; $g = 1.0; $b = 0.0; break;
case 'chocolate':
$r = 0.824; $g = 0.412; $b = 0.118; break;
case 'coral':
$r = 1.0; $g = 0.498; $b = 0.314; break;
case 'cornflowerblue':
$r = 0.392; $g = 0.584; $b = 0.929; break;
case 'cornsilk':
$r = 1.0; $g = 0.973; $b = 0.863; break;
case 'crimson':
$r = 0.863; $g = 0.078; $b = 0.235; break;
case 'cyan':
$r = 0.0; $g = 1.0; $b = 1.0; break;
case 'darkblue':
$r = 0.0; $g = 0.0; $b = 0.545; break;
case 'darkcyan':
$r = 0.0; $g = 0.545; $b = 0.545; break;
case 'darkgoldenrod':
$r = 0.722; $g = 0.525; $b = 0.043; break;
case 'darkgray':
$r = 0.663; $g = 0.663; $b = 0.663; break;
case 'darkgreen':
$r = 0.0; $g = 0.392; $b = 0.0; break;
case 'darkkhaki':
$r = 0.741; $g = 0.718; $b = 0.420; break;
case 'darkmagenta':
$r = 0.545; $g = 0.0; $b = 0.545; break;
case 'darkolivegreen':
$r = 0.333; $g = 0.420; $b = 0.184; break;
case 'darkorange':
$r = 1.0; $g = 0.549; $b = 0.0; break;
case 'darkorchid':
$r = 0.6; $g = 0.196; $b = 0.8; break;
case 'darkred':
$r = 0.545; $g = 0.0; $b = 0.0; break;
case 'darksalmon':
$r = 0.914; $g = 0.588; $b = 0.478; break;
case 'darkseagreen':
$r = 0.561; $g = 0.737; $b = 0.561; break;
case 'darkslateblue':
$r = 0.282; $g = 0.239; $b = 0.545; break;
case 'darkslategray':
$r = 0.184; $g = 0.310; $b = 0.310; break;
case 'darkturquoise':
$r = 0.0; $g = 0.808; $b = 0.820; break;
case 'darkviolet':
$r = 0.580; $g = 0.0; $b = 0.827; break;
case 'deeppink':
$r = 1.0; $g = 0.078; $b = 0.576; break;
case 'deepskyblue':
$r = 0.0; $g = 0.749; $b = 1.0; break;
case 'dimgray':
$r = 0.412; $g = 0.412; $b = 0.412; break;
case 'dodgerblue':
$r = 0.118; $g = 0.565; $b = 1.0; break;
case 'firebrick':
$r = 0.698; $g = 0.133; $b = 0.133; break;
case 'floralwhite':
$r = 1.0; $g = 0.980; $b = 0.941; break;
case 'forestgreen':
$r = 0.133; $g = 0.545; $b = 0.133; break;
case 'gainsboro':
$r = 0.863; $g = 0.863; $b = 0.863; break;
case 'ghostwhite':
$r = 0.973; $g = 0.973; $b = 1.0; break;
case 'gold':
$r = 1.0; $g = 0.843; $b = 0.0; break;
case 'goldenrod':
$r = 0.855; $g = 0.647; $b = 0.125; break;
case 'greenyellow':
$r = 0.678; $g = 1.0; $b = 0.184; break;
case 'honeydew':
$r = 0.941; $g = 1.0; $b = 0.941; break;
case 'hotpink':
$r = 1.0; $g = 0.412; $b = 0.706; break;
case 'indianred':
$r = 0.804; $g = 0.361; $b = 0.361; break;
case 'indigo':
$r = 0.294; $g = 0.0; $b = 0.510; break;
case 'ivory':
$r = 1.0; $g = 0.941; $b = 0.941; break;
case 'khaki':
$r = 0.941; $g = 0.902; $b = 0.549; break;
case 'lavender':
$r = 0.902; $g = 0.902; $b = 0.980; break;
case 'lavenderblush':
$r = 1.0; $g = 0.941; $b = 0.961; break;
case 'lawngreen':
$r = 0.486; $g = 0.988; $b = 0.0; break;
case 'lemonchiffon':
$r = 1.0; $g = 0.980; $b = 0.804; break;
case 'lightblue':
$r = 0.678; $g = 0.847; $b = 0.902; break;
case 'lightcoral':
$r = 0.941; $g = 0.502; $b = 0.502; break;
case 'lightcyan':
$r = 0.878; $g = 1.0; $b = 1.0; break;
case 'lightgoldenrodyellow':
$r = 0.980; $g = 0.980; $b = 0.824; break;
case 'lightgreen':
$r = 0.565; $g = 0.933; $b = 0.565; break;
case 'lightgrey':
$r = 0.827; $g = 0.827; $b = 0.827; break;
case 'lightpink':
$r = 1.0; $g = 0.714; $b = 0.757; break;
case 'lightsalmon':
$r = 1.0; $g = 0.627; $b = 0.478; break;
case 'lightseagreen':
$r = 0.125; $g = 0.698; $b = 0.667; break;
case 'lightskyblue':
$r = 0.529; $g = 0.808; $b = 0.980; break;
case 'lightslategray':
$r = 0.467; $g = 0.533; $b = 0.6; break;
case 'lightsteelblue':
$r = 0.690; $g = 0.769; $b = 0.871; break;
case 'lightyellow':
$r = 1.0; $g = 1.0; $b = 0.878; break;
case 'limegreen':
$r = 0.196; $g = 0.804; $b = 0.196; break;
case 'linen':
$r = 0.980; $g = 0.941; $b = 0.902; break;
case 'magenta':
$r = 1.0; $g = 0.0; $b = 1.0; break;
case 'mediumaquamarine':
$r = 0.4; $g = 0.804; $b = 0.667; break;
case 'mediumblue':
$r = 0.0; $g = 0.0; $b = 0.804; break;
case 'mediumorchid':
$r = 0.729; $g = 0.333; $b = 0.827; break;
case 'mediumpurple':
$r = 0.576; $g = 0.439; $b = 0.859; break;
case 'mediumseagreen':
$r = 0.235; $g = 0.702; $b = 0.443; break;
case 'mediumslateblue':
$r = 0.482; $g = 0.408; $b = 0.933; break;
case 'mediumspringgreen':
$r = 0.0; $g = 0.980; $b = 0.604; break;
case 'mediumturquoise':
$r = 0.282; $g = 0.820; $b = 0.8; break;
case 'mediumvioletred':
$r = 0.780; $g = 0.082; $b = 0.522; break;
case 'midnightblue':
$r = 0.098; $g = 0.098; $b = 0.439; break;
case 'mintcream':
$r = 0.961; $g = 1.0; $b = 0.980; break;
case 'mistyrose':
$r = 1.0; $g = 0.894; $b = 0.882; break;
case 'moccasin':
$r = 1.0; $g = 0.894; $b = 0.710; break;
case 'navajowhite':
$r = 1.0; $g = 0.871; $b = 0.678; break;
case 'oldlace':
$r = 0.992; $g = 0.961; $b = 0.902; break;
case 'olivedrab':
$r = 0.420; $g = 0.557; $b = 0.137; break;
case 'orange':
$r = 1.0; $g = 0.647; $b = 0.0; break;
case 'orangered':
$r = 1.0; $g = 0.271; $b = 0.0; break;
case 'orchid':
$r = 0.855; $g = 0.439; $b = 0.839; break;
case 'palegoldenrod':
$r = 0.933; $g = 0.910; $b = 0.667; break;
case 'palegreen':
$r = 0.596; $g = 0.984; $b = 0.596; break;
case 'paleturquoise':
$r = 0.686; $g = 0.933; $b = 0.933; break;
case 'palevioletred':
$r = 0.859; $g = 0.439; $b = 0.576; break;
case 'papayawhip':
$r = 1.0; $g = 0.937; $b = 0.835; break;
case 'peachpuff':
$r = 1.0; $g = 0.937; $b = 0.835; break;
case 'peru':
$r = 0.804; $g = 0.522; $b = 0.247; break;
case 'pink':
$r = 1.0; $g = 0.753; $b = 0.796; break;
case 'plum':
$r = 0.867; $g = 0.627; $b = 0.867; break;
case 'powderblue':
$r = 0.690; $g = 0.878; $b = 0.902; break;
case 'rosybrown':
$r = 0.737; $g = 0.561; $b = 0.561; break;
case 'royalblue':
$r = 0.255; $g = 0.412; $b = 0.882; break;
case 'saddlebrown':
$r = 0.545; $g = 0.271; $b = 0.075; break;
case 'salmon':
$r = 0.980; $g = 0.502; $b = 0.447; break;
case 'sandybrown':
$r = 0.957; $g = 0.643; $b = 0.376; break;
case 'seagreen':
$r = 0.180; $g = 0.545; $b = 0.341; break;
case 'seashell':
$r = 1.0; $g = 0.961; $b = 0.933; break;
case 'sienna':
$r = 0.627; $g = 0.322; $b = 0.176; break;
case 'skyblue':
$r = 0.529; $g = 0.808; $b = 0.922; break;
case 'slateblue':
$r = 0.416; $g = 0.353; $b = 0.804; break;
case 'slategray':
$r = 0.439; $g = 0.502; $b = 0.565; break;
case 'snow':
$r = 1.0; $g = 0.980; $b = 0.980; break;
case 'springgreen':
$r = 0.0; $g = 1.0; $b = 0.498; break;
case 'steelblue':
$r = 0.275; $g = 0.510; $b = 0.706; break;
case 'tan':
$r = 0.824; $g = 0.706; $b = 0.549; break;
case 'thistle':
$r = 0.847; $g = 0.749; $b = 0.847; break;
case 'tomato':
$r = 0.992; $g = 0.388; $b = 0.278; break;
case 'turquoise':
$r = 0.251; $g = 0.878; $b = 0.816; break;
case 'violet':
$r = 0.933; $g = 0.510; $b = 0.933; break;
case 'wheat':
$r = 0.961; $g = 0.871; $b = 0.702; break;
case 'whitesmoke':
$r = 0.961; $g = 0.961; $b = 0.961; break;
case 'yellowgreen':
$r = 0.604; $g = 0.804; $b = 0.196; break;
default:
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception('Unknown color name: ' . $color);
}
if (($r == $g) && ($g == $b)) {
require_once 'Zend/Pdf/Color/GrayScale.php';
return new Zend_Pdf_Color_GrayScale($r);
} else {
require_once 'Zend/Pdf/Color/Rgb.php';
return new Zend_Pdf_Color_Rgb($r, $g, $b);
}
}
}
| svn2github/zend_framework | library/Zend/Pdf/Color/Html.php | PHP | bsd-3-clause | 16,236 |
#include "main.h"
#include "File.h"
#include "Geometry.h"
namespace ImageStack {
namespace FileTGA {
/*
typedef struct {
unsigned char identsize; // size of ID field that follows 18 byte header (0 usually)
unsigned char colormaptype; // type of colour map 0=none, 1=has palette
unsigned char imagetype; // type of image 0=none,1=indexed,2=rgb,3=grey,+8=rle packed
char colormap[5]; // crap to do with the color map
short xstart; // image x origin
short ystart; // image y origin
short width; // image width in pixels
short height; // image height in pixels
unsigned char bits; // image bits per pixel 8,16,24,32
unsigned char descriptor; // image descriptor bits (vh flip bits)
} Header;
*/
void help() {
printf(".tga files. These can have 1, 3, or 4 channels, are run-length encoded, and\n"
"are low dynamic range.\n");
}
Image load(string filename) {
FILE *f = fopen(filename.c_str(), "rb");
assert(f, "Could not open file %s\n", filename.c_str());
unsigned char identsize, colormaptype, imagetype, bits;
int width, height;
identsize = fgetc(f);
colormaptype = fgetc(f);
imagetype = fgetc(f);
// skip the colormap
for (int i = 0; i < 5; i++) { fgetc(f); }
// skip xstart and ystart
for (int i = 0; i < 4; i++) { fgetc(f); }
width = fgetc(f);
width += (fgetc(f) << 8);
height = fgetc(f);
height += (fgetc(f) << 8);
bits = fgetc(f);
// skip the descriptor
fgetc(f);
// skip the ident stuff
for (int i = 0; i < identsize; i++) { fgetc(f); }
// check the colormaptype
assert(colormaptype == 0, "ImageStack can't read tgas with a color map");
int channels = 0;
bool rle = false;
switch (imagetype) {
case 2: // rgb
channels = 3;
rle = false;
break;
case 3: // gray
channels = 1;
rle = false;
break;
case 10: // rgb rle
channels = 3;
rle = true;
break;
case 11: // gray rle
channels = 1;
rle = true;
break;
default:
panic("ImageStack can't load this type of tga (type %i)\n", imagetype);
}
// check for an alpha channel
if (bits == 32 && channels == 3) { channels = 4; }
assert(bits == 8 * channels, "ImageStack only supports 8 bits per channel tgas (this one has %i bits for %i channels)\n", bits, channels);
Image im(width, height, 1, channels);
bool vflip = true; //!(descriptor & 0x10);
if (!rle && channels == 1) {
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
im(x, y) = LDRtoHDR(fgetc(f));
}
}
} else if (!rle && channels == 3) {
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
im(x, y, 2) = LDRtoHDR(fgetc(f));
im(x, y, 1) = LDRtoHDR(fgetc(f));
im(x, y, 0) = LDRtoHDR(fgetc(f));
}
}
} else if (!rle && channels == 4) {
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
im(x, y, 2) = LDRtoHDR(fgetc(f));
im(x, y, 1) = LDRtoHDR(fgetc(f));
im(x, y, 0) = LDRtoHDR(fgetc(f));
im(x, y, 3) = LDRtoHDR(fgetc(f));
}
}
} else if (rle && channels == 1) {
for (int x = 0, y = 0; y < height;) {
unsigned char ch = fgetc(f);
unsigned char runlength = ch & 0x7f;
if (ch & 0x80) { // compressed
float val = LDRtoHDR(fgetc(f));
for (int j = 0; j < runlength; j++) {
im(x, y) = val;
x++; if (x == width) {x = 0; y++;}
}
} else { // normal
for (int j = 0; j < runlength; j++) {
im(x, y) = LDRtoHDR(fgetc(f));
x++; if (x == width) {x = 0; y++;}
}
}
}
} else if (rle && channels == 3) {
for (int x = 0, y = 0; y < height;) {
unsigned char ch = fgetc(f);
unsigned char runlength = ch & 0x7f;
if (ch & 0x80) { // compressed
float b = LDRtoHDR(fgetc(f));
float g = LDRtoHDR(fgetc(f));
float r = LDRtoHDR(fgetc(f));
for (int j = 0; j < runlength; j++) {
im(x, y, 0) = r;
im(x, y, 1) = g;
im(x, y, 2) = b;
x++; if (x == width) {x = 0; y++;}
}
} else { // normal
for (int j = 0; j < runlength; j++) {
im(x, y, 0) = LDRtoHDR(fgetc(f));
im(x, y, 1) = LDRtoHDR(fgetc(f));
im(x, y, 2) = LDRtoHDR(fgetc(f));
x++; if (x == width) {x = 0; y++;}
}
}
}
} else if (rle && channels == 4) {
for (int x = 0, y = 0; y < height;) {
unsigned char ch = fgetc(f);
unsigned char runlength = ch & 0x7f;
if (ch & 0x80) { // compressed
float b = LDRtoHDR(fgetc(f));
float g = LDRtoHDR(fgetc(f));
float r = LDRtoHDR(fgetc(f));
float a = LDRtoHDR(fgetc(f));
for (int j = 0; j < runlength; j++) {
im(x, y, 0) = r;
im(x, y, 1) = g;
im(x, y, 2) = b;
im(x, y, 3) = a;
x++; if (x == width) {x = 0; y++;}
}
} else { // normal
for (int j = 0; j < runlength; j++) {
im(x, y, 0) = LDRtoHDR(fgetc(f));
im(x, y, 1) = LDRtoHDR(fgetc(f));
im(x, y, 2) = LDRtoHDR(fgetc(f));
im(x, y, 3) = LDRtoHDR(fgetc(f));
x++; if (x == width) {x = 0; y++;}
}
}
}
}
fclose(f);
if (vflip) Flip::apply(im, 'y');
return im;
}
void save(Image im, string filename) {
FILE *f = fopen(filename.c_str(), "wb");
assert(f, "Could not open file %s\n", filename.c_str());
assert(im.frames == 1, "can only save single frame tgas\n");
assert(im.channels == 4 || im.channels == 3 || im.channels == 1, "can only save tgas with one, three, or four channels\n");
fputc(0, f); // identsize
fputc(0, f); // colormaptype
fputc(im.channels == 1 ? 3 : 2, f); // gray or rgb
fputc(0, f); // colormap stuff
fputc(0, f);
fputc(0, f);
fputc(0, f);
fputc(0, f);
fputc(0, f); fputc(0, f); // x origin
fputc(0, f); fputc(0, f); // y origin
fputc(im.width & 255, f); fputc((im.width >> 8) & 255, f); // width
fputc(im.height & 255, f); fputc((im.height >> 8) & 255, f); // height
fputc(im.channels * 8, f); // bits
fputc(0, f); // descriptor
if (im.channels == 1) {
for (int y = im.height-1; y >= 0; y--) {
for (int x = 0; x < im.width; x++) {
fputc(HDRtoLDR(im(x, y)), f);
}
}
} else if (im.channels == 3) {
for (int y = im.height-1; y >= 0; y--) {
for (int x = 0; x < im.width; x++) {
fputc(HDRtoLDR(im(x, y, 2)), f);
fputc(HDRtoLDR(im(x, y, 1)), f);
fputc(HDRtoLDR(im(x, y, 0)), f);
}
}
} else if (im.channels == 4) {
for (int y = im.height-1; y >= 0; y--) {
for (int x = 0; x < im.width; x++) {
fputc(HDRtoLDR(im(x, y, 2)), f);
fputc(HDRtoLDR(im(x, y, 1)), f);
fputc(HDRtoLDR(im(x, y, 0)), f);
fputc(HDRtoLDR(im(x, y, 3)), f);
}
}
}
fclose(f);
}
}
}
| ricoyoung345/imagestack | src/FileTGA.cpp | C++ | bsd-3-clause | 8,011 |
/*******************************************************
* Copyright (c) 2014, ArrayFire
* All rights reserved.
*
* This file is distributed under 3-clause BSD license.
* The complete license agreement can be obtained at:
* http://arrayfire.com/licenses/BSD-3-Clause
********************************************************/
#include <af/features.h>
#include <af/array.h>
#include "error.hpp"
namespace af
{
features::features()
{
AF_THROW(af_create_features(&feat, 0));
}
features::features(const size_t n)
{
AF_THROW(af_create_features(&feat, (int)n));
}
features::features(af_features f) : feat(f)
{
}
features& features::operator= (const features& other)
{
if (this != &other) {
AF_THROW(af_release_features(feat));
AF_THROW(af_retain_features(&feat, other.get()));
}
return *this;
}
features::~features()
{
// THOU SHALL NOT THROW IN DESTRUCTORS
if (feat) {
af_release_features(feat);
}
}
size_t features::getNumFeatures() const
{
dim_t n = 0;
AF_THROW(af_get_features_num(&n, feat));
return n;
}
array features::getX() const
{
af_array x = 0;
AF_THROW(af_get_features_xpos(&x, feat));
af_array tmp = 0;
AF_THROW(af_retain_array(&tmp, x));
return array(tmp);
}
array features::getY() const
{
af_array y = 0;
AF_THROW(af_get_features_ypos(&y, feat));
af_array tmp = 0;
AF_THROW(af_retain_array(&tmp, y));
return array(tmp);
}
array features::getScore() const
{
af_array s = 0;
AF_THROW(af_get_features_score(&s, feat));
af_array tmp = 0;
AF_THROW(af_retain_array(&tmp, s));
return array(tmp);
}
array features::getOrientation() const
{
af_array ori = 0;
AF_THROW(af_get_features_orientation(&ori, feat));
af_array tmp = 0;
AF_THROW(af_retain_array(&tmp, ori));
return array(tmp);
}
array features::getSize() const
{
af_array s = 0;
AF_THROW(af_get_features_size(&s, feat));
af_array tmp = 0;
AF_THROW(af_retain_array(&tmp, s));
return array(tmp);
}
af_features features::get() const
{
return feat;
}
};
| ghisvail/arrayfire | src/api/cpp/features.cpp | C++ | bsd-3-clause | 2,405 |
package org.javasimon.jdbcx4;
import java.sql.SQLException;
import javax.sql.StatementEventListener;
import javax.sql.XAConnection;
import javax.transaction.xa.XAResource;
/**
* Simon implementation of <code>XAConnection</code>, needed for
* Simon XADataSource implementation.
* <p/>
* All method invokes its real implementation.
* <p/>
* See the {@link org.javasimon.jdbcx4 package description} for more
* information.
*
* @author Radovan Sninsky
* @author <a href="mailto:virgo47@gmail.com">Richard "Virgo" Richter</a>
* @since 2.4
*/
public final class SimonXAConnection extends SimonPooledConnection implements XAConnection {
private final XAConnection realConn;
/**
* Class constructor.
*
* @param connection real xa connection
* @param prefix Simon prefix
*/
public SimonXAConnection(XAConnection connection, String prefix) {
super(connection, prefix);
this.realConn = connection;
}
@Override
public XAResource getXAResource() throws SQLException {
return realConn.getXAResource();
}
@Override
public void addStatementEventListener(StatementEventListener listener) {
realConn.addStatementEventListener(listener);
}
@Override
public void removeStatementEventListener(StatementEventListener listener) {
realConn.removeStatementEventListener(listener);
}
}
| karouani/javasimon | jdbc41/src/main/java/org/javasimon/jdbcx4/SimonXAConnection.java | Java | bsd-3-clause | 1,312 |
// 2005-04-26 Paolo Carlini <pcarlini@suse.de>
// Copyright (C) 2005 Free Software Foundation
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 2, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING. If not, write to the Free
// Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
// USA.
// 22.2.2.1.1 num_get members
#include <locale>
#include <sstream>
#include <limits>
#include <testsuite_hooks.h>
void test01()
{
using namespace std;
typedef istreambuf_iterator<wchar_t> iterator_type;
bool test __attribute__((unused)) = true;
wstringstream ss;
const num_get<wchar_t>& ng = use_facet<num_get<wchar_t> >(ss.getloc());
ios_base::iostate err;
iterator_type end;
unsigned short us0, us1 = numeric_limits<unsigned short>::max();
unsigned int ui0, ui1 = numeric_limits<unsigned int>::max();
unsigned long ul0, ul1 = numeric_limits<unsigned long>::max();
long l01, l1 = numeric_limits<long>::max();
long l02, l2 = numeric_limits<long>::min();
#ifdef _GLIBCXX_USE_LONG_LONG
unsigned long long ull0, ull1 = numeric_limits<unsigned long long>::max();
long long ll01, ll1 = numeric_limits<long long>::max();
long long ll02, ll2 = numeric_limits<long long>::min();
#endif
const wstring empty;
us0 = 0;
ss << us1;
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, us0);
VERIFY( err == ios_base::eofbit );
VERIFY( us0 == us1 );
us0 = 0;
ss.clear();
ss.str(empty);
ss << us1 << L'0';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, us0);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( us0 == 0 );
ui0 = 0U;
ss.clear();
ss.str(empty);
ss << ui1 << ' ';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ui0);
VERIFY( err == ios_base::goodbit );
VERIFY( ui0 == ui1 );
ui0 = 0U;
ss.clear();
ss.str(empty);
ss << ui1 << L'1';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ui0);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( ui0 == 0U );
ul0 = 0UL;
ss.clear();
ss.str(empty);
ss << ul1;
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ul0);
VERIFY( err == ios_base::eofbit );
VERIFY( ul0 == ul1 );
ul0 = 0UL;
ss.clear();
ss.str(empty);
ss << ul1 << L'2';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ul0);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( ul0 == 0UL );
l01 = 0L;
ss.clear();
ss.str(empty);
ss << l1 << L' ';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, l01);
VERIFY( err == ios_base::goodbit );
VERIFY( l01 == l1 );
l01 = 0L;
ss.clear();
ss.str(empty);
ss << l1 << L'3';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, l01);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( l01 == 0L );
l02 = 0L;
ss.clear();
ss.str(empty);
ss << l2;
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, l02);
VERIFY( err == ios_base::eofbit );
VERIFY( l02 == l2 );
l02 = 0L;
ss.clear();
ss.str(empty);
ss << l2 << L'4';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, l02);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( l02 == 0L );
#ifdef _GLIBCXX_USE_LONG_LONG
ull0 = 0ULL;
ss.clear();
ss.str(empty);
ss << ull1 << L' ';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ull0);
VERIFY( err == ios_base::goodbit );
VERIFY( ull0 == ull1 );
ull0 = 0ULL;
ss.clear();
ss.str(empty);
ss << ull1 << L'5';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ull0);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( ull0 == 0ULL );
ll01 = 0LL;
ss.clear();
ss.str(empty);
ss << ll1;
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ll01);
VERIFY( err == ios_base::eofbit );
VERIFY( ll01 == ll1 );
ll01 = 0LL;
ss.clear();
ss.str(empty);
ss << ll1 << L'6';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ll01);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( ll01 == 0LL );
ll02 = 0LL;
ss.clear();
ss.str(empty);
ss << ll2 << L' ';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ll02);
VERIFY( err == ios_base::goodbit );
VERIFY( ll02 == ll2 );
ll02 = 0LL;
ss.clear();
ss.str(empty);
ss << ll2 << L'7';
err = ios_base::goodbit;
end = ng.get(ss.rdbuf(), 0, ss, err, ll02);
VERIFY( err == (ios_base::failbit | ios_base::eofbit) );
VERIFY( ll02 == 0LL );
#endif
}
int main()
{
test01();
return 0;
}
| shaotuanchen/sunflower_exp | tools/source/gcc-4.2.4/libstdc++-v3/testsuite/22_locale/num_get/get/wchar_t/16.cc | C++ | bsd-3-clause | 5,193 |
import { Component, Input } from '@angular/core';
import { AllReadyEvent } from "../../pages/events/events";
import { NavController } from "ionic-angular";
import { EventDetailsPage } from "../../pages/eventdetails/eventdetails";
import { DateFilterPipe } from "../../pipes/datefilter/datefilter";
@Component({
selector: 'event-card',
templateUrl: 'event-card.html'
})
export class EventCardComponent {
@Input('event') event: AllReadyEvent;
private formattedStartDateTime: string;
constructor(public navCtrl: NavController, public dateFilterPipe: DateFilterPipe) {
this.dateFilterPipe = new DateFilterPipe();
}
ngOnChanges() {
this.formattedStartDateTime =
this.dateFilterPipe.transform(this.event.StartDateTime.toString(), null);
}
showEventDetails(event: AllReadyEvent) {
this.navCtrl.push(EventDetailsPage, { event: event });
}
}
| stevejgordon/allReady | AllReadyApp/Mobile-App/src/components/event-card/event-card.ts | TypeScript | mit | 897 |
version https://git-lfs.github.com/spec/v1
oid sha256:d57f539203e9219f4c43532d12ca16d51ce822bf08d658cabd06aa84fc61dda2
size 661
| yogeshsaroya/new-cdnjs | ajax/libs/dojo/1.5.4/cldr/nls/sv/number.js | JavaScript | mit | 128 |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Runtime.Remoting.Channels.BaseChannelSinkWithProperties.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Runtime.Remoting.Channels
{
abstract public partial class BaseChannelSinkWithProperties : BaseChannelObjectWithProperties
{
#region Methods and constructors
protected BaseChannelSinkWithProperties()
{
}
#endregion
}
}
| Microsoft/CodeContracts | Microsoft.Research/Contracts/MsCorlib/Sources/System.Runtime.Remoting.Channels.BaseChannelSinkWithProperties.cs | C# | mit | 2,271 |
using System;
using System.Diagnostics;
using System.Net;
using System.Web;
using Umbraco.Core;
using Umbraco.Core.Logging;
namespace umbraco.presentation
{
/// <summary>
/// Makes a call to /umbraco/ping.aspx which is used to keep the web app alive
/// </summary>
public class keepAliveService
{
//NOTE: sender will be the umbraco ApplicationContext
public static void PingUmbraco(object sender)
{
if (sender == null || !(sender is ApplicationContext))
return;
var appContext = (ApplicationContext) sender;
var url = string.Format("http://{0}/ping.aspx", appContext.OriginalRequestUrl);
try
{
using (var wc = new WebClient())
{
wc.DownloadString(url);
}
}
catch(Exception ee)
{
LogHelper.Debug<keepAliveService>(string.Format("Error in ping({0}) -> {1}", url, ee));
}
}
}
} | Shazwazza/Umbraco-CMS | src/Umbraco.Web/umbraco.presentation/keepAliveService.cs | C# | mit | 909 |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Net.NetworkInformation.IPv4InterfaceProperties.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Net.NetworkInformation
{
abstract public partial class IPv4InterfaceProperties
{
#region Methods and constructors
protected IPv4InterfaceProperties()
{
}
#endregion
#region Properties and indexers
public abstract int Index
{
get;
}
public abstract bool IsAutomaticPrivateAddressingActive
{
get;
}
public abstract bool IsAutomaticPrivateAddressingEnabled
{
get;
}
public abstract bool IsDhcpEnabled
{
get;
}
public abstract bool IsForwardingEnabled
{
get;
}
public abstract int Mtu
{
get;
}
public abstract bool UsesWins
{
get;
}
#endregion
}
}
| Microsoft/CodeContracts | Microsoft.Research/Contracts/System/Sources/System.Net.NetworkInformation.IPv4InterfaceProperties.cs | C# | mit | 2,729 |
'use strict';
var Promise = require('../../lib/ext/promise');
var conf = require('ember-cli-internal-test-helpers/lib/helpers/conf');
var ember = require('../helpers/ember');
var fs = require('fs-extra');
var outputFile = Promise.denodeify(fs.outputFile);
var path = require('path');
var remove = Promise.denodeify(fs.remove);
var replaceFile = require('ember-cli-internal-test-helpers/lib/helpers/file-utils').replaceFile;
var root = process.cwd();
var tmproot = path.join(root, 'tmp');
var Blueprint = require('../../lib/models/blueprint');
var BlueprintNpmTask = require('ember-cli-internal-test-helpers/lib/helpers/disable-npm-on-blueprint');
var mkTmpDirIn = require('../../lib/utilities/mk-tmp-dir-in');
var chai = require('../chai');
var expect = chai.expect;
var file = chai.file;
describe('Acceptance: ember generate', function() {
this.timeout(20000);
var tmpdir;
before(function() {
BlueprintNpmTask.disableNPM(Blueprint);
conf.setup();
});
after(function() {
BlueprintNpmTask.restoreNPM(Blueprint);
conf.restore();
});
beforeEach(function() {
return mkTmpDirIn(tmproot).then(function(dir) {
tmpdir = dir;
process.chdir(tmpdir);
});
});
afterEach(function() {
process.chdir(root);
return remove(tmproot);
});
function initApp() {
return ember([
'init',
'--name=my-app',
'--skip-npm',
'--skip-bower'
]);
}
function generate(args) {
var generateArgs = ['generate'].concat(args);
return initApp().then(function() {
return ember(generateArgs);
});
}
it('component x-foo', function() {
return generate(['component', 'x-foo']).then(function() {
expect(file('app/components/x-foo.js'))
.to.contain("import Ember from 'ember';")
.to.contain("export default Ember.Component.extend({")
.to.contain("});");
expect(file('app/templates/components/x-foo.hbs'))
.to.contain("{{yield}}");
expect(file('tests/integration/components/x-foo-test.js'))
.to.contain("import { moduleForComponent, test } from 'ember-qunit';")
.to.contain("import hbs from 'htmlbars-inline-precompile';")
.to.contain("moduleForComponent('x-foo'")
.to.contain("integration: true")
.to.contain("{{x-foo}}")
.to.contain("{{#x-foo}}");
});
});
it('component foo/x-foo', function() {
return generate(['component', 'foo/x-foo']).then(function() {
expect(file('app/components/foo/x-foo.js'))
.to.contain("import Ember from 'ember';")
.to.contain("export default Ember.Component.extend({")
.to.contain("});");
expect(file('app/templates/components/foo/x-foo.hbs'))
.to.contain("{{yield}}");
expect(file('tests/integration/components/foo/x-foo-test.js'))
.to.contain("import { moduleForComponent, test } from 'ember-qunit';")
.to.contain("import hbs from 'htmlbars-inline-precompile';")
.to.contain("moduleForComponent('foo/x-foo'")
.to.contain("integration: true")
.to.contain("{{foo/x-foo}}")
.to.contain("{{#foo/x-foo}}");
});
});
it('component x-foo ignores --path option', function() {
return generate(['component', 'x-foo', '--path', 'foo']).then(function() {
expect(file('app/components/x-foo.js'))
.to.contain("import Ember from 'ember';")
.to.contain("export default Ember.Component.extend({")
.to.contain("});");
expect(file('app/templates/components/x-foo.hbs'))
.to.contain("{{yield}}");
expect(file('tests/integration/components/x-foo-test.js'))
.to.contain("import { moduleForComponent, test } from 'ember-qunit';")
.to.contain("import hbs from 'htmlbars-inline-precompile';")
.to.contain("moduleForComponent('x-foo'")
.to.contain("integration: true")
.to.contain("{{x-foo}}")
.to.contain("{{#x-foo}}");
});
});
it('blueprint foo', function() {
return generate(['blueprint', 'foo']).then(function() {
expect(file('blueprints/foo/index.js'))
.to.contain("module.exports = {\n" +
" description: ''\n" +
"\n" +
" // locals: function(options) {\n" +
" // // Return custom template variables here.\n" +
" // return {\n" +
" // foo: options.entity.options.foo\n" +
" // };\n" +
" // }\n" +
"\n" +
" // afterInstall: function(options) {\n" +
" // // Perform extra work here.\n" +
" // }\n" +
"};");
});
});
it('blueprint foo/bar', function() {
return generate(['blueprint', 'foo/bar']).then(function() {
expect(file('blueprints/foo/bar/index.js'))
.to.contain("module.exports = {\n" +
" description: ''\n" +
"\n" +
" // locals: function(options) {\n" +
" // // Return custom template variables here.\n" +
" // return {\n" +
" // foo: options.entity.options.foo\n" +
" // };\n" +
" // }\n" +
"\n" +
" // afterInstall: function(options) {\n" +
" // // Perform extra work here.\n" +
" // }\n" +
"};");
});
});
it('http-mock foo', function() {
return generate(['http-mock', 'foo']).then(function() {
expect(file('server/index.js'))
.to.contain("mocks.forEach(function(route) { route(app); });");
expect(file('server/mocks/foo.js'))
.to.contain("module.exports = function(app) {\n" +
" var express = require('express');\n" +
" var fooRouter = express.Router();\n" +
"\n" +
" fooRouter.get('/', function(req, res) {\n" +
" res.send({\n" +
" 'foo': []\n" +
" });\n" +
" });\n" +
"\n" +
" fooRouter.post('/', function(req, res) {\n" +
" res.status(201).end();\n" +
" });\n" +
"\n" +
" fooRouter.get('/:id', function(req, res) {\n" +
" res.send({\n" +
" 'foo': {\n" +
" id: req.params.id\n" +
" }\n" +
" });\n" +
" });\n" +
"\n" +
" fooRouter.put('/:id', function(req, res) {\n" +
" res.send({\n" +
" 'foo': {\n" +
" id: req.params.id\n" +
" }\n" +
" });\n" +
" });\n" +
"\n" +
" fooRouter.delete('/:id', function(req, res) {\n" +
" res.status(204).end();\n" +
" });\n" +
"\n" +
" // The POST and PUT call will not contain a request body\n" +
" // because the body-parser is not included by default.\n" +
" // To use req.body, run:\n" +
"\n" +
" // npm install --save-dev body-parser\n" +
"\n" +
" // After installing, you need to `use` the body-parser for\n" +
" // this mock uncommenting the following line:\n" +
" //\n" +
" //app.use('/api/foo', require('body-parser').json());\n" +
" app.use('/api/foo', fooRouter);\n" +
"};");
expect(file('server/.jshintrc'))
.to.contain('{\n "node": true\n}');
});
});
it('http-mock foo-bar', function() {
return generate(['http-mock', 'foo-bar']).then(function() {
expect(file('server/index.js'))
.to.contain("mocks.forEach(function(route) { route(app); });");
expect(file('server/mocks/foo-bar.js'))
.to.contain("module.exports = function(app) {\n" +
" var express = require('express');\n" +
" var fooBarRouter = express.Router();\n" +
"\n" +
" fooBarRouter.get('/', function(req, res) {\n" +
" res.send({\n" +
" 'foo-bar': []\n" +
" });\n" +
" });\n" +
"\n" +
" fooBarRouter.post('/', function(req, res) {\n" +
" res.status(201).end();\n" +
" });\n" +
"\n" +
" fooBarRouter.get('/:id', function(req, res) {\n" +
" res.send({\n" +
" 'foo-bar': {\n" +
" id: req.params.id\n" +
" }\n" +
" });\n" +
" });\n" +
"\n" +
" fooBarRouter.put('/:id', function(req, res) {\n" +
" res.send({\n" +
" 'foo-bar': {\n" +
" id: req.params.id\n" +
" }\n" +
" });\n" +
" });\n" +
"\n" +
" fooBarRouter.delete('/:id', function(req, res) {\n" +
" res.status(204).end();\n" +
" });\n" +
"\n" +
" // The POST and PUT call will not contain a request body\n" +
" // because the body-parser is not included by default.\n" +
" // To use req.body, run:\n" +
"\n" +
" // npm install --save-dev body-parser\n" +
"\n" +
" // After installing, you need to `use` the body-parser for\n" +
" // this mock uncommenting the following line:\n" +
" //\n" +
" //app.use('/api/foo-bar', require('body-parser').json());\n" +
" app.use('/api/foo-bar', fooBarRouter);\n" +
"};");
expect(file('server/.jshintrc'))
.to.contain('{\n "node": true\n}');
});
});
it('http-proxy foo', function() {
return generate(['http-proxy', 'foo', 'http://localhost:5000']).then(function() {
expect(file('server/index.js'))
.to.contain("proxies.forEach(function(route) { route(app); });");
expect(file('server/proxies/foo.js'))
.to.contain("var proxyPath = '/foo';\n" +
"\n" +
"module.exports = function(app) {\n" +
" // For options, see:\n" +
" // https://github.com/nodejitsu/node-http-proxy\n" +
" var proxy = require('http-proxy').createProxyServer({});\n" +
"\n" +
" proxy.on('error', function(err, req) {\n" +
" console.error(err, req.url);\n" +
" });\n" +
"\n" +
" app.use(proxyPath, function(req, res, next){\n" +
" // include root path in proxied request\n" +
" req.url = proxyPath + '/' + req.url;\n" +
" proxy.web(req, res, { target: 'http://localhost:5000' });\n" +
" });\n" +
"};");
expect(file('server/.jshintrc'))
.to.contain('{\n "node": true\n}');
});
});
it('uses blueprints from the project directory', function() {
return initApp()
.then(function() {
return outputFile(
'blueprints/foo/files/app/foos/__name__.js',
"import Ember from 'ember';\n" +
'export default Ember.Object.extend({ foo: true });\n'
);
})
.then(function() {
return ember(['generate', 'foo', 'bar']);
})
.then(function() {
expect(file('app/foos/bar.js')).to.contain('foo: true');
});
});
it('allows custom blueprints to override built-ins', function() {
return initApp()
.then(function() {
return outputFile(
'blueprints/controller/files/app/controllers/__name__.js',
"import Ember from 'ember';\n\n" +
"export default Ember.Controller.extend({ custom: true });\n"
);
})
.then(function() {
return ember(['generate', 'controller', 'foo']);
})
.then(function() {
expect(file('app/controllers/foo.js')).to.contain('custom: true');
});
});
it('passes custom cli arguments to blueprint options', function() {
return initApp()
.then(function() {
outputFile(
'blueprints/customblue/files/app/__name__.js',
"Q: Can I has custom command? A: <%= hasCustomCommand %>"
);
return outputFile(
'blueprints/customblue/index.js',
"module.exports = {\n" +
" locals: function(options) {\n" +
" var loc = {};\n" +
" loc.hasCustomCommand = (options.customCommand) ? 'Yes!' : 'No. :C';\n" +
" return loc;\n" +
" },\n" +
"};\n"
);
})
.then(function() {
return ember(['generate', 'customblue', 'foo', '--custom-command']);
})
.then(function() {
expect(file('app/foo.js')).to.contain('A: Yes!');
});
});
it('correctly identifies the root of the project', function() {
return initApp()
.then(function() {
return outputFile(
'blueprints/controller/files/app/controllers/__name__.js',
"import Ember from 'ember';\n\n" +
"export default Ember.Controller.extend({ custom: true });\n"
);
})
.then(function() {
process.chdir(path.join(tmpdir, 'app'));
})
.then(function() {
return ember(['generate', 'controller', 'foo']);
})
.then(function() {
process.chdir(tmpdir);
})
.then(function() {
expect(file('app/controllers/foo.js')).to.contain('custom: true');
});
});
it('route foo --dry-run does not change router.js', function() {
return generate(['route', 'foo', '--dry-run']).then(function() {
expect(file('app/router.js')).to.not.contain("route('foo')");
});
});
it('server', function() {
return generate(['server']).then(function() {
expect(file('server/index.js')).to.exist;
expect(file('server/.jshintrc')).to.exist;
});
});
it('availableOptions work with aliases.', function() {
return generate(['route', 'foo', '-d']).then(function() {
expect(file('app/router.js')).to.not.contain("route('foo')");
});
});
it('lib', function() {
return generate(['lib']).then(function() {
expect(file('lib/.jshintrc')).to.exist;
});
});
it('custom blueprint availableOptions', function() {
return initApp().then(function() {
return ember(['generate', 'blueprint', 'foo']).then(function() {
replaceFile('blueprints/foo/index.js', 'module.exports = {',
'module.exports = {\navailableOptions: [ \n' +
'{ name: \'foo\',\ntype: String, \n' +
'values: [\'one\', \'two\'],\n' +
'default: \'one\',\n' +
'aliases: [ {\'one\': \'one\'}, {\'two\': \'two\'} ] } ],\n' +
'locals: function(options) {\n' +
'return { foo: options.foo };\n' +
'},');
return outputFile(
'blueprints/foo/files/app/foos/__name__.js',
"import Ember from 'ember';\n" +
'export default Ember.Object.extend({ foo: <%= foo %> });\n'
).then(function() {
return ember(['generate','foo','bar','-two']);
});
});
}).then(function() {
expect(file('app/foos/bar.js')).to.contain('export default Ember.Object.extend({ foo: two });');
});
});
});
| lazybensch/ember-cli | tests/acceptance/generate-test.js | JavaScript | mit | 16,507 |
import Ember from 'ember';
import { module, test } from 'qunit';
import Confirmation from 'ember-validations/validators/local/confirmation';
import Mixin from 'ember-validations/mixin';
import buildContainer from '../../../helpers/build-container';
var model, Model, options, validator;
var get = Ember.get;
var set = Ember.set;
var run = Ember.run;
module('Confirmation Validator', {
setup: function() {
Model = Ember.Object.extend(Mixin, {
container: buildContainer()
});
run(function() {
model = Model.create();
});
}
});
test('when values match', function(assert) {
options = { message: 'failed validation' };
run(function() {
validator = Confirmation.create({model: model, property: 'attribute', options: options});
set(model, 'attribute', 'test');
set(model, 'attributeConfirmation', 'test');
});
assert.deepEqual(validator.errors, []);
run(function() {
set(model, 'attributeConfirmation', 'newTest');
});
assert.deepEqual(validator.errors, ['failed validation']);
run(function() {
set(model, 'attribute', 'newTest');
});
assert.deepEqual(validator.errors, []);
});
test('when values do not match', function(assert) {
options = { message: 'failed validation' };
run(function() {
validator = Confirmation.create({model: model, property: 'attribute', options: options});
set(model, 'attribute', 'test');
});
assert.deepEqual(validator.errors, ['failed validation']);
});
test('when original is null', function(assert) {
run(function() {
validator = Confirmation.create({model: model, property: 'attribute'});
model.set('attribute', null);
});
assert.ok(Ember.isEmpty(validator.errors));
});
test('when confirmation is null', function(assert) {
run(function() {
validator = Confirmation.create({model: model, property: 'attribute'});
model.set('attributeConfirmation', null);
});
assert.ok(Ember.isEmpty(validator.errors));
});
test('when options is true', function(assert) {
options = true;
run(function() {
validator = Confirmation.create({model: model, property: 'attribute', options: options});
set(model, 'attribute', 'test');
});
assert.deepEqual(validator.errors, ["doesn't match attribute"]);
});
test('message integration on model, prints message on Confirmation property', function(assert) {
var otherModel, OtherModel = Model.extend({
validations: {
attribute: {
confirmation: true
}
}
});
run(function() {
otherModel = OtherModel.create();
set(otherModel, 'attribute', 'test');
});
assert.deepEqual(get(otherModel, 'errors.attributeConfirmation'), ["doesn't match attribute"]);
assert.deepEqual(get(otherModel, 'errors.attribute'), []);
});
| meszike123/ember-validations | tests/unit/validators/local/confirmation-test.js | JavaScript | mit | 2,746 |
<?php
namespace Oro\Bundle\IntegrationBundle\Exception;
interface IntegrationException
{
}
| MarkThink/OROCRM | vendor/oro/platform/src/Oro/Bundle/IntegrationBundle/Exception/IntegrationException.php | PHP | mit | 93 |
<?php
namespace Sirius\Validation\Rule;
class RequiredWhen extends Required
{
const OPTION_ITEM = 'item';
const OPTION_RULE = 'rule';
const OPTION_RULE_OPTIONS = 'rule_options';
protected static $defaultMessageTemplate = 'This field is required';
public function getItemRule()
{
/* @var $rule AbstractValidator */
$rule = false;
$ruleOptions = (isset($this->options[self::OPTION_RULE_OPTIONS])) ? (array)$this->options[self::OPTION_RULE_OPTIONS] : array();
if (is_string($this->options[self::OPTION_RULE])) {
$ruleClass = $this->options[self::OPTION_RULE];
if (class_exists($ruleClass)) {
$rule = new $ruleClass($ruleOptions);
} elseif (class_exists('Sirius\\Validation\\Rule\\' . $ruleClass)) {
$ruleClass = 'Sirius\\Validation\\Rule\\' . $ruleClass;
$rule = new $ruleClass($ruleOptions);
}
} elseif (is_object(
$this->options[self::OPTION_RULE]
) && $this->options[self::OPTION_RULE] instanceof AbstractValidator
) {
$rule = $this->options[self::OPTION_RULE];
}
if (!$rule) {
throw new \InvalidArgumentException(
'Validator for the other item is not valid or cannot be constructed based on the data provided'
);
}
$context = $this->context ? $this->context : array();
$rule->setContext($context);
return $rule;
}
public function validate($value, $valueIdentifier = null)
{
$this->value = $value;
if (!isset($this->options[self::OPTION_ITEM])) {
$this->success = true;
} else {
$itemRule = $this->getItemRule();
$itemValue = $this->context->getItemValue($this->options[self::OPTION_ITEM]);
if ($itemRule->validate($itemValue, $this->options[self::OPTION_ITEM])) {
$this->success = ($value !== null || trim($value) !== '');
} else {
$this->success = true;
}
}
return $this->success;
}
}
| frisbeesport/frisbeesport.nl | vendor/siriusphp/validation/src/Rule/RequiredWhen.php | PHP | mit | 2,149 |
'use strict';
const existsSync = require('exists-sync');
const path = require('path');
const LiveReloadServer = require('./server/livereload-server');
const ExpressServer = require('./server/express-server');
const RSVP = require('rsvp');
const Task = require('../models/task');
const Watcher = require('../models/watcher');
const ServerWatcher = require('../models/server-watcher');
const Builder = require('../models/builder');
const Promise = RSVP.Promise;
class ServeTask extends Task {
constructor(options) {
super(options);
this._runDeferred = null;
this._builder = null;
}
run(options) {
let builder = this._builder = options._builder || new Builder({
ui: this.ui,
outputPath: options.outputPath,
project: this.project,
environment: options.environment,
});
let watcher = options._watcher || new Watcher({
ui: this.ui,
builder,
analytics: this.analytics,
options,
serving: true,
});
let serverRoot = './server';
let serverWatcher = null;
if (existsSync(serverRoot)) {
serverWatcher = new ServerWatcher({
ui: this.ui,
analytics: this.analytics,
watchedDir: path.resolve(serverRoot),
options,
});
}
let expressServer = options._expressServer || new ExpressServer({
ui: this.ui,
project: this.project,
watcher,
serverRoot,
serverWatcher,
});
let liveReloadServer = options._liveReloadServer || new LiveReloadServer({
ui: this.ui,
analytics: this.analytics,
project: this.project,
watcher,
expressServer,
});
/* hang until the user exits */
this._runDeferred = RSVP.defer();
return Promise.all([
liveReloadServer.start(options),
expressServer.start(options),
]).then(() => this._runDeferred.promise);
}
/**
* Exit silently
*
* @private
* @method onInterrupt
*/
onInterrupt() {
return this._builder.cleanup().then(() => this._runDeferred.resolve());
}
}
module.exports = ServeTask;
| gabz75/ember-cli-deploy-redis-publish | node_modules/ember-cli/lib/tasks/serve.js | JavaScript | mit | 2,076 |
<?php
/**
* @package Joomla.Platform
* @subpackage String
*
* @copyright Copyright (C) 2005 - 2016 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE
*/
defined('JPATH_PLATFORM') or die;
use Joomla\Uri\UriHelper;
JLoader::register('idna_convert', JPATH_LIBRARIES . '/idna_convert/idna_convert.class.php');
/**
* Joomla Platform String Punycode Class
*
* Class for handling UTF-8 URLs
* Wraps the Punycode library
* All functions assume the validity of utf-8 URLs.
*
* @since 3.1.2
*/
abstract class JStringPunycode
{
/**
* Transforms a UTF-8 string to a Punycode string
*
* @param string $utfString The UTF-8 string to transform
*
* @return string The punycode string
*
* @since 3.1.2
*/
public static function toPunycode($utfString)
{
$idn = new idna_convert;
return $idn->encode($utfString);
}
/**
* Transforms a Punycode string to a UTF-8 string
*
* @param string $punycodeString The Punycode string to transform
*
* @return string The UF-8 URL
*
* @since 3.1.2
*/
public static function fromPunycode($punycodeString)
{
$idn = new idna_convert;
return $idn->decode($punycodeString);
}
/**
* Transforms a UTF-8 URL to a Punycode URL
*
* @param string $uri The UTF-8 URL to transform
*
* @return string The punycode URL
*
* @since 3.1.2
*/
public static function urlToPunycode($uri)
{
$parsed = UriHelper::parse_url($uri);
if (!isset($parsed['host']) || $parsed['host'] == '')
{
// If there is no host we do not need to convert it.
return $uri;
}
$host = $parsed['host'];
$hostExploded = explode('.', $host);
$newhost = '';
foreach ($hostExploded as $hostex)
{
$hostex = static::toPunycode($hostex);
$newhost .= $hostex . '.';
}
$newhost = substr($newhost, 0, -1);
$newuri = '';
if (!empty($parsed['scheme']))
{
// Assume :// is required although it is not always.
$newuri .= $parsed['scheme'] . '://';
}
if (!empty($newhost))
{
$newuri .= $newhost;
}
if (!empty($parsed['port']))
{
$newuri .= ':' . $parsed['port'];
}
if (!empty($parsed['path']))
{
$newuri .= $parsed['path'];
}
if (!empty($parsed['query']))
{
$newuri .= '?' . $parsed['query'];
}
if (!empty($parsed['fragment']))
{
$newuri .= '#' . $parsed['fragment'];
}
return $newuri;
}
/**
* Transforms a Punycode URL to a UTF-8 URL
*
* @param string $uri The Punycode URL to transform
*
* @return string The UTF-8 URL
*
* @since 3.1.2
*/
public static function urlToUTF8($uri)
{
if (empty($uri))
{
return;
}
$parsed = UriHelper::parse_url($uri);
if (!isset($parsed['host']) || $parsed['host'] == '')
{
// If there is no host we do not need to convert it.
return $uri;
}
$host = $parsed['host'];
$hostExploded = explode('.', $host);
$newhost = '';
foreach ($hostExploded as $hostex)
{
$hostex = self::fromPunycode($hostex);
$newhost .= $hostex . '.';
}
$newhost = substr($newhost, 0, -1);
$newuri = '';
if (!empty($parsed['scheme']))
{
// Assume :// is required although it is not always.
$newuri .= $parsed['scheme'] . '://';
}
if (!empty($newhost))
{
$newuri .= $newhost;
}
if (!empty($parsed['port']))
{
$newuri .= ':' . $parsed['port'];
}
if (!empty($parsed['path']))
{
$newuri .= $parsed['path'];
}
if (!empty($parsed['query']))
{
$newuri .= '?' . $parsed['query'];
}
if (!empty($parsed['fragment']))
{
$newuri .= '#' . $parsed['fragment'];
}
return $newuri;
}
/**
* Transforms a UTF-8 email to a Punycode email
* This assumes a valid email address
*
* @param string $email The UTF-8 email to transform
*
* @return string The punycode email
*
* @since 3.1.2
*/
public static function emailToPunycode($email)
{
$explodedAddress = explode('@', $email);
// Not addressing UTF-8 user names
$newEmail = $explodedAddress[0];
if (!empty($explodedAddress[1]))
{
$domainExploded = explode('.', $explodedAddress[1]);
$newdomain = '';
foreach ($domainExploded as $domainex)
{
$domainex = static::toPunycode($domainex);
$newdomain .= $domainex . '.';
}
$newdomain = substr($newdomain, 0, -1);
$newEmail = $newEmail . '@' . $newdomain;
}
return $newEmail;
}
/**
* Transforms a Punycode email to a UTF-8 email
* This assumes a valid email address
*
* @param string $email The punycode email to transform
*
* @return string The punycode email
*
* @since 3.1.2
*/
public static function emailToUTF8($email)
{
$explodedAddress = explode('@', $email);
// Not addressing UTF-8 user names
$newEmail = $explodedAddress[0];
if (!empty($explodedAddress[1]))
{
$domainExploded = explode('.', $explodedAddress[1]);
$newdomain = '';
foreach ($domainExploded as $domainex)
{
$domainex = static::fromPunycode($domainex);
$newdomain .= $domainex . '.';
}
$newdomain = substr($newdomain, 0, -1);
$newEmail = $newEmail . '@' . $newdomain;
}
return $newEmail;
}
}
| demis-palma/joomla-cms | libraries/joomla/string/punycode.php | PHP | gpl-2.0 | 5,195 |
<?php
/**
* File containing the URLAliasCreate parser class.
*
* @copyright Copyright (C) eZ Systems AS. All rights reserved.
* @license For full copyright and license information view LICENSE file distributed with this source code.
*
* @version //autogentag//
*/
namespace eZ\Publish\Core\REST\Server\Input\Parser;
use eZ\Publish\Core\REST\Common\Input\BaseParser;
use eZ\Publish\Core\REST\Common\Input\ParsingDispatcher;
use eZ\Publish\Core\REST\Common\Input\ParserTools;
use eZ\Publish\Core\REST\Common\Exceptions;
/**
* Parser for URLAliasCreate.
*/
class URLAliasCreate extends BaseParser
{
/**
* Parser tools.
*
* @var \eZ\Publish\Core\REST\Common\Input\ParserTools
*/
protected $parserTools;
/**
* Construct.
*
* @param \eZ\Publish\Core\REST\Common\Input\ParserTools $parserTools
*/
public function __construct(ParserTools $parserTools)
{
$this->parserTools = $parserTools;
}
/**
* Parse input structure.
*
* @param array $data
* @param \eZ\Publish\Core\REST\Common\Input\ParsingDispatcher $parsingDispatcher
*
* @return array
*/
public function parse(array $data, ParsingDispatcher $parsingDispatcher)
{
if (!array_key_exists('_type', $data)) {
throw new Exceptions\Parser("Missing '_type' value for URLAliasCreate.");
}
if ($data['_type'] == 'LOCATION') {
if (!array_key_exists('location', $data)) {
throw new Exceptions\Parser("Missing 'location' value for URLAliasCreate.");
}
if (!is_array($data['location']) || !array_key_exists('_href', $data['location'])) {
throw new Exceptions\Parser("Missing 'location' > '_href' attribute for URLAliasCreate.");
}
} else {
if (!array_key_exists('resource', $data)) {
throw new Exceptions\Parser("Missing 'resource' value for URLAliasCreate.");
}
}
if (!array_key_exists('path', $data)) {
throw new Exceptions\Parser("Missing 'path' value for URLAliasCreate.");
}
if (!array_key_exists('languageCode', $data)) {
throw new Exceptions\Parser("Missing 'languageCode' value for URLAliasCreate.");
}
if (array_key_exists('alwaysAvailable', $data)) {
$data['alwaysAvailable'] = $this->parserTools->parseBooleanValue($data['alwaysAvailable']);
} else {
$data['alwaysAvailable'] = false;
}
if (array_key_exists('forward', $data)) {
$data['forward'] = $this->parserTools->parseBooleanValue($data['forward']);
} else {
$data['forward'] = false;
}
return $data;
}
}
| flovntp/BikeTutorialWebsite | vendor/ezsystems/ezpublish-kernel/eZ/Publish/Core/REST/Server/Input/Parser/URLAliasCreate.php | PHP | gpl-2.0 | 2,782 |
__version_info__ = (0, 6, 1)
__version__ = '.'.join(map(str, __version_info__))
| NeuPhysics/NumSolTUn | docs/_themes/alabaster/_version.py | Python | gpl-2.0 | 80 |
<?php
/**
* @package Joomla.Site
* @subpackage Layout
*
* @copyright (C) 2016 Open Source Matters, Inc. <https://www.joomla.org>
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('_JEXEC') or die;
use Joomla\CMS\Form\Form;
extract($displayData);
/**
* Layout variables
* -----------------
* @var Form $tmpl The Empty form for template
* @var array $forms Array of JForm instances for render the rows
* @var bool $multiple The multiple state for the form field
* @var int $min Count of minimum repeating in multiple mode
* @var int $max Count of maximum repeating in multiple mode
* @var string $name Name of the input field.
* @var string $fieldname The field name
* @var string $fieldId The field ID
* @var string $control The forms control
* @var string $label The field label
* @var string $description The field description
* @var array $buttons Array of the buttons that will be rendered
* @var bool $groupByFieldset Whether group the subform fields by it`s fieldset
*/
$form = $forms[0];
?>
<div class="subform-wrapper">
<?php foreach ($form->getGroup('') as $field) : ?>
<?php echo $field->renderField(); ?>
<?php endforeach; ?>
</div>
| brianteeman/joomla-cms | layouts/joomla/form/field/subform/default.php | PHP | gpl-2.0 | 1,390 |
class AddIncludeTl0InDigestsToUserOptions < ActiveRecord::Migration[4.2]
def change
add_column :user_options, :include_tl0_in_digests, :boolean, default: false
end
end
| gfvcastro/discourse | db/migrate/20160317201955_add_include_tl0_in_digests_to_user_options.rb | Ruby | gpl-2.0 | 176 |
<?php
/**
* @file
* Contains \Drupal\Console\Command\Generate\PluginTypeYamlCommand.
*/
namespace Drupal\Console\Command\Generate;
use Drupal\Console\Generator\PluginTypeYamlGenerator;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Drupal\Console\Command\ServicesTrait;
use Drupal\Console\Command\ModuleTrait;
use Drupal\Console\Command\FormTrait;
use Drupal\Console\Command\ConfirmationTrait;
use Drupal\Console\Command\GeneratorCommand;
use Drupal\Console\Style\DrupalStyle;
class PluginTypeYamlCommand extends GeneratorCommand
{
use ServicesTrait;
use ModuleTrait;
use FormTrait;
use ConfirmationTrait;
protected function configure()
{
$this
->setName('generate:plugin:type:yaml')
->setDescription($this->trans('commands.generate.plugin.type.yaml.description'))
->setHelp($this->trans('commands.generate.plugin.type.yaml.help'))
->addOption('module', '', InputOption::VALUE_REQUIRED, $this->trans('commands.common.options.module'))
->addOption(
'class',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.type.yaml.options.class')
)
->addOption(
'plugin-name',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.type.yaml.options.plugin-name')
)
->addOption(
'plugin-file-name',
'',
InputOption::VALUE_OPTIONAL,
$this->trans('commands.generate.plugin.type.yaml.options.plugin-file-name')
);
}
/**
* {@inheritdoc}
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$module = $input->getOption('module');
$class_name = $input->getOption('class');
$plugin_name = $input->getOption('plugin-name');
$plugin_file_name = $input->getOption('plugin-file-name');
$generator = $this->getGenerator();
$generator->generate($module, $class_name, $plugin_name, $plugin_file_name);
}
protected function interact(InputInterface $input, OutputInterface $output)
{
$io = new DrupalStyle($input, $output);
// --module option
$module = $input->getOption('module');
if (!$module) {
// @see Drupal\Console\Command\ModuleTrait::moduleQuestion
$module = $this->moduleQuestion($output);
$input->setOption('module', $module);
}
// --class option
$class_name = $input->getOption('class');
if (!$class_name) {
$class_name = $io->ask(
$this->trans('commands.generate.plugin.type.yaml.options.class'),
'ExamplePlugin'
);
$input->setOption('class', $class_name);
}
// --plugin-name option
$plugin_name = $input->getOption('plugin-name');
if (!$plugin_name) {
$plugin_name = $io->ask(
$this->trans('commands.generate.plugin.type.yaml.options.plugin-name'),
$this->getStringHelper()->camelCaseToUnderscore($class_name)
);
$input->setOption('plugin-name', $plugin_name);
}
// --plugin-file-name option
$plugin_file_name = $input->getOption('plugin-file-name');
if (!$plugin_file_name) {
$plugin_file_name = $io->ask(
$this->trans('commands.generate.plugin.type.yaml.options.plugin-file-name'),
strtr($plugin_name, '_-', '..')
);
$input->setOption('plugin-file-name', $plugin_file_name);
}
}
protected function createGenerator()
{
return new PluginTypeYamlGenerator();
}
}
| sgrichards/BrightonDrupal | vendor/drupal/console/src/Command/Generate/PluginTypeYamlCommand.php | PHP | gpl-2.0 | 3,969 |
<?php
use Action_Scheduler\WP_CLI\Migration_Command;
use Action_Scheduler\Migration\Controller;
/**
* Class ActionScheduler
* @codeCoverageIgnore
*/
abstract class ActionScheduler {
private static $plugin_file = '';
/** @var ActionScheduler_ActionFactory */
private static $factory = NULL;
/** @var bool */
private static $data_store_initialized = false;
public static function factory() {
if ( !isset(self::$factory) ) {
self::$factory = new ActionScheduler_ActionFactory();
}
return self::$factory;
}
public static function store() {
return ActionScheduler_Store::instance();
}
public static function lock() {
return ActionScheduler_Lock::instance();
}
public static function logger() {
return ActionScheduler_Logger::instance();
}
public static function runner() {
return ActionScheduler_QueueRunner::instance();
}
public static function admin_view() {
return ActionScheduler_AdminView::instance();
}
/**
* Get the absolute system path to the plugin directory, or a file therein
* @static
* @param string $path
* @return string
*/
public static function plugin_path( $path ) {
$base = dirname(self::$plugin_file);
if ( $path ) {
return trailingslashit($base).$path;
} else {
return untrailingslashit($base);
}
}
/**
* Get the absolute URL to the plugin directory, or a file therein
* @static
* @param string $path
* @return string
*/
public static function plugin_url( $path ) {
return plugins_url($path, self::$plugin_file);
}
public static function autoload( $class ) {
$d = DIRECTORY_SEPARATOR;
$classes_dir = self::plugin_path( 'classes' . $d );
$separator = strrpos( $class, '\\' );
if ( false !== $separator ) {
if ( 0 !== strpos( $class, 'Action_Scheduler' ) ) {
return;
}
$class = substr( $class, $separator + 1 );
}
if ( 'Deprecated' === substr( $class, -10 ) ) {
$dir = self::plugin_path( 'deprecated' . $d );
} elseif ( self::is_class_abstract( $class ) ) {
$dir = $classes_dir . 'abstracts' . $d;
} elseif ( self::is_class_migration( $class ) ) {
$dir = $classes_dir . 'migration' . $d;
} elseif ( 'Schedule' === substr( $class, -8 ) ) {
$dir = $classes_dir . 'schedules' . $d;
} elseif ( 'Action' === substr( $class, -6 ) ) {
$dir = $classes_dir . 'actions' . $d;
} elseif ( 'Schema' === substr( $class, -6 ) ) {
$dir = $classes_dir . 'schema' . $d;
} elseif ( strpos( $class, 'ActionScheduler' ) === 0 ) {
$segments = explode( '_', $class );
$type = isset( $segments[ 1 ] ) ? $segments[ 1 ] : '';
switch ( $type ) {
case 'WPCLI':
$dir = $classes_dir . 'WP_CLI' . $d;
break;
case 'DBLogger':
case 'DBStore':
case 'HybridStore':
case 'wpPostStore':
case 'wpCommentLogger':
$dir = $classes_dir . 'data-stores' . $d;
break;
default:
$dir = $classes_dir;
break;
}
} elseif ( self::is_class_cli( $class ) ) {
$dir = $classes_dir . 'WP_CLI' . $d;
} elseif ( strpos( $class, 'CronExpression' ) === 0 ) {
$dir = self::plugin_path( 'lib' . $d . 'cron-expression' . $d );
} elseif ( strpos( $class, 'WP_Async_Request' ) === 0 ) {
$dir = self::plugin_path( 'lib' . $d );
} else {
return;
}
if ( file_exists( "{$dir}{$class}.php" ) ) {
include( "{$dir}{$class}.php" );
return;
}
}
/**
* Initialize the plugin
*
* @static
* @param string $plugin_file
*/
public static function init( $plugin_file ) {
self::$plugin_file = $plugin_file;
spl_autoload_register( array( __CLASS__, 'autoload' ) );
/**
* Fires in the early stages of Action Scheduler init hook.
*/
do_action( 'action_scheduler_pre_init' );
require_once( self::plugin_path( 'functions.php' ) );
ActionScheduler_DataController::init();
$store = self::store();
$logger = self::logger();
$runner = self::runner();
$admin_view = self::admin_view();
// Ensure initialization on plugin activation.
if ( ! did_action( 'init' ) ) {
add_action( 'init', array( $admin_view, 'init' ), 0, 0 ); // run before $store::init()
add_action( 'init', array( $store, 'init' ), 1, 0 );
add_action( 'init', array( $logger, 'init' ), 1, 0 );
add_action( 'init', array( $runner, 'init' ), 1, 0 );
} else {
$admin_view->init();
$store->init();
$logger->init();
$runner->init();
}
if ( apply_filters( 'action_scheduler_load_deprecated_functions', true ) ) {
require_once( self::plugin_path( 'deprecated/functions.php' ) );
}
if ( defined( 'WP_CLI' ) && WP_CLI ) {
WP_CLI::add_command( 'action-scheduler', 'ActionScheduler_WPCLI_Scheduler_command' );
if ( ! ActionScheduler_DataController::is_migration_complete() && Controller::instance()->allow_migration() ) {
$command = new Migration_Command();
$command->register();
}
}
self::$data_store_initialized = true;
/**
* Handle WP comment cleanup after migration.
*/
if ( is_a( $logger, 'ActionScheduler_DBLogger' ) && ActionScheduler_DataController::is_migration_complete() && ActionScheduler_WPCommentCleaner::has_logs() ) {
ActionScheduler_WPCommentCleaner::init();
}
add_action( 'action_scheduler/migration_complete', 'ActionScheduler_WPCommentCleaner::maybe_schedule_cleanup' );
}
/**
* Check whether the AS data store has been initialized.
*
* @param string $function_name The name of the function being called. Optional. Default `null`.
* @return bool
*/
public static function is_initialized( $function_name = null ) {
if ( ! self::$data_store_initialized && ! empty( $function_name ) ) {
$message = sprintf( __( '%s() was called before the Action Scheduler data store was initialized', 'action-scheduler' ), esc_attr( $function_name ) );
error_log( $message, E_WARNING );
}
return self::$data_store_initialized;
}
/**
* Determine if the class is one of our abstract classes.
*
* @since 3.0.0
*
* @param string $class The class name.
*
* @return bool
*/
protected static function is_class_abstract( $class ) {
static $abstracts = array(
'ActionScheduler' => true,
'ActionScheduler_Abstract_ListTable' => true,
'ActionScheduler_Abstract_QueueRunner' => true,
'ActionScheduler_Abstract_Schedule' => true,
'ActionScheduler_Abstract_RecurringSchedule' => true,
'ActionScheduler_Lock' => true,
'ActionScheduler_Logger' => true,
'ActionScheduler_Abstract_Schema' => true,
'ActionScheduler_Store' => true,
'ActionScheduler_TimezoneHelper' => true,
);
return isset( $abstracts[ $class ] ) && $abstracts[ $class ];
}
/**
* Determine if the class is one of our migration classes.
*
* @since 3.0.0
*
* @param string $class The class name.
*
* @return bool
*/
protected static function is_class_migration( $class ) {
static $migration_segments = array(
'ActionMigrator' => true,
'BatchFetcher' => true,
'DBStoreMigrator' => true,
'DryRun' => true,
'LogMigrator' => true,
'Config' => true,
'Controller' => true,
'Runner' => true,
'Scheduler' => true,
);
$segments = explode( '_', $class );
$segment = isset( $segments[ 1 ] ) ? $segments[ 1 ] : $class;
return isset( $migration_segments[ $segment ] ) && $migration_segments[ $segment ];
}
/**
* Determine if the class is one of our WP CLI classes.
*
* @since 3.0.0
*
* @param string $class The class name.
*
* @return bool
*/
protected static function is_class_cli( $class ) {
static $cli_segments = array(
'QueueRunner' => true,
'Command' => true,
'ProgressBar' => true,
);
$segments = explode( '_', $class );
$segment = isset( $segments[ 1 ] ) ? $segments[ 1 ] : $class;
return isset( $cli_segments[ $segment ] ) && $cli_segments[ $segment ];
}
final public function __clone() {
trigger_error("Singleton. No cloning allowed!", E_USER_ERROR);
}
final public function __wakeup() {
trigger_error("Singleton. No serialization allowed!", E_USER_ERROR);
}
final private function __construct() {}
/** Deprecated **/
public static function get_datetime_object( $when = null, $timezone = 'UTC' ) {
_deprecated_function( __METHOD__, '2.0', 'wcs_add_months()' );
return as_get_datetime_object( $when, $timezone );
}
/**
* Issue deprecated warning if an Action Scheduler function is called in the shutdown hook.
*
* @param string $function_name The name of the function being called.
* @deprecated 3.1.6.
*/
public static function check_shutdown_hook( $function_name ) {
_deprecated_function( __FUNCTION__, '3.1.6' );
}
}
| rasken2003/fuga-it-business | wp-content/plugins/wp-mail-smtp/vendor/woocommerce/action-scheduler/classes/abstracts/ActionScheduler.php | PHP | gpl-2.0 | 8,752 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* This file defines the quiz overview report class.
*
* @package quiz_overview
* @copyright 1999 onwards Martin Dougiamas and others {@link http://moodle.com}
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
require_once($CFG->dirroot . '/mod/quiz/report/attemptsreport.php');
require_once($CFG->dirroot . '/mod/quiz/report/overview/overview_options.php');
require_once($CFG->dirroot . '/mod/quiz/report/overview/overview_form.php');
require_once($CFG->dirroot . '/mod/quiz/report/overview/overview_table.php');
/**
* Quiz report subclass for the overview (grades) report.
*
* @copyright 1999 onwards Martin Dougiamas and others {@link http://moodle.com}
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class quiz_overview_report extends quiz_attempts_report {
/**
* @var bool whether there are actually students to show, given the options.
*/
protected $hasgroupstudents;
public function display($quiz, $cm, $course) {
global $DB, $OUTPUT, $PAGE;
list($currentgroup, $studentsjoins, $groupstudentsjoins, $allowedjoins) = $this->init(
'overview', 'quiz_overview_settings_form', $quiz, $cm, $course);
$options = new quiz_overview_options('overview', $quiz, $cm, $course);
if ($fromform = $this->form->get_data()) {
$options->process_settings_from_form($fromform);
} else {
$options->process_settings_from_params();
}
$this->form->set_data($options->get_initial_form_data());
// Load the required questions.
$questions = quiz_report_get_significant_questions($quiz);
// Prepare for downloading, if applicable.
$courseshortname = format_string($course->shortname, true,
array('context' => context_course::instance($course->id)));
$table = new quiz_overview_table($quiz, $this->context, $this->qmsubselect,
$options, $groupstudentsjoins, $studentsjoins, $questions, $options->get_url());
$filename = quiz_report_download_filename(get_string('overviewfilename', 'quiz_overview'),
$courseshortname, $quiz->name);
$table->is_downloading($options->download, $filename,
$courseshortname . ' ' . format_string($quiz->name, true));
if ($table->is_downloading()) {
raise_memory_limit(MEMORY_EXTRA);
}
$this->hasgroupstudents = false;
if (!empty($groupstudentsjoins->joins)) {
$sql = "SELECT DISTINCT u.id
FROM {user} u
$groupstudentsjoins->joins
WHERE $groupstudentsjoins->wheres";
$this->hasgroupstudents = $DB->record_exists_sql($sql, $groupstudentsjoins->params);
}
$hasstudents = false;
if (!empty($studentsjoins->joins)) {
$sql = "SELECT DISTINCT u.id
FROM {user} u
$studentsjoins->joins
WHERE $studentsjoins->wheres";
$hasstudents = $DB->record_exists_sql($sql, $studentsjoins->params);
}
if ($options->attempts == self::ALL_WITH) {
// This option is only available to users who can access all groups in
// groups mode, so setting allowed to empty (which means all quiz attempts
// are accessible, is not a security porblem.
$allowedjoins = new \core\dml\sql_join();
}
$this->course = $course; // Hack to make this available in process_actions.
$this->process_actions($quiz, $cm, $currentgroup, $groupstudentsjoins, $allowedjoins, $options->get_url());
$hasquestions = quiz_has_questions($quiz->id);
// Start output.
if (!$table->is_downloading()) {
// Only print headers if not asked to download data.
$this->print_standard_header_and_messages($cm, $course, $quiz,
$options, $currentgroup, $hasquestions, $hasstudents);
// Print the display options.
$this->form->display();
}
$hasstudents = $hasstudents && (!$currentgroup || $this->hasgroupstudents);
if ($hasquestions && ($hasstudents || $options->attempts == self::ALL_WITH)) {
// Construct the SQL.
$table->setup_sql_queries($allowedjoins);
if (!$table->is_downloading()) {
// Output the regrade buttons.
if (has_capability('mod/quiz:regrade', $this->context)) {
$regradesneeded = $this->count_question_attempts_needing_regrade(
$quiz, $groupstudentsjoins);
if ($currentgroup) {
$a= new stdClass();
$a->groupname = groups_get_group_name($currentgroup);
$a->coursestudents = get_string('participants');
$a->countregradeneeded = $regradesneeded;
$regradealldrydolabel =
get_string('regradealldrydogroup', 'quiz_overview', $a);
$regradealldrylabel =
get_string('regradealldrygroup', 'quiz_overview', $a);
$regradealllabel =
get_string('regradeallgroup', 'quiz_overview', $a);
} else {
$regradealldrydolabel =
get_string('regradealldrydo', 'quiz_overview', $regradesneeded);
$regradealldrylabel =
get_string('regradealldry', 'quiz_overview');
$regradealllabel =
get_string('regradeall', 'quiz_overview');
}
$displayurl = new moodle_url($options->get_url(), array('sesskey' => sesskey()));
echo '<div class="mdl-align">';
echo '<form action="'.$displayurl->out_omit_querystring().'">';
echo '<div>';
echo html_writer::input_hidden_params($displayurl);
echo '<input type="submit" class="btn btn-secondary" name="regradeall" value="'.$regradealllabel.'"/>';
echo '<input type="submit" class="btn btn-secondary ml-1" name="regradealldry" value="' .
$regradealldrylabel . '"/>';
if ($regradesneeded) {
echo '<input type="submit" class="btn btn-secondary ml-1" name="regradealldrydo" value="' .
$regradealldrydolabel . '"/>';
}
echo '</div>';
echo '</form>';
echo '</div>';
}
// Print information on the grading method.
if ($strattempthighlight = quiz_report_highlighting_grading_method(
$quiz, $this->qmsubselect, $options->onlygraded)) {
echo '<div class="quizattemptcounts">' . $strattempthighlight . '</div>';
}
}
// Define table columns.
$columns = array();
$headers = array();
if (!$table->is_downloading() && $options->checkboxcolumn) {
$columnname = 'checkbox';
$columns[] = $columnname;
$headers[] = $table->checkbox_col_header($columnname);
}
$this->add_user_columns($table, $columns, $headers);
$this->add_state_column($columns, $headers);
$this->add_time_columns($columns, $headers);
$this->add_grade_columns($quiz, $options->usercanseegrades, $columns, $headers, false);
if (!$table->is_downloading() && has_capability('mod/quiz:regrade', $this->context) &&
$this->has_regraded_questions($table->sql->from, $table->sql->where, $table->sql->params)) {
$columns[] = 'regraded';
$headers[] = get_string('regrade', 'quiz_overview');
}
if ($options->slotmarks) {
foreach ($questions as $slot => $question) {
// Ignore questions of zero length.
$columns[] = 'qsgrade' . $slot;
$header = get_string('qbrief', 'quiz', $question->number);
if (!$table->is_downloading()) {
$header .= '<br />';
} else {
$header .= ' ';
}
$header .= '/' . quiz_rescale_grade($question->maxmark, $quiz, 'question');
$headers[] = $header;
}
}
$this->set_up_table_columns($table, $columns, $headers, $this->get_base_url(), $options, false);
$table->set_attribute('class', 'generaltable generalbox grades');
$table->out($options->pagesize, true);
}
if (!$table->is_downloading() && $options->usercanseegrades) {
$output = $PAGE->get_renderer('mod_quiz');
list($bands, $bandwidth) = self::get_bands_count_and_width($quiz);
$labels = self::get_bands_labels($bands, $bandwidth, $quiz);
if ($currentgroup && $this->hasgroupstudents) {
$sql = "SELECT qg.id
FROM {quiz_grades} qg
JOIN {user} u on u.id = qg.userid
{$groupstudentsjoins->joins}
WHERE qg.quiz = $quiz->id AND {$groupstudentsjoins->wheres}";
if ($DB->record_exists_sql($sql, $groupstudentsjoins->params)) {
$data = quiz_report_grade_bands($bandwidth, $bands, $quiz->id, $groupstudentsjoins);
$chart = self::get_chart($labels, $data);
$graphname = get_string('overviewreportgraphgroup', 'quiz_overview', groups_get_group_name($currentgroup));
echo $output->chart($chart, $graphname);
}
}
if ($DB->record_exists('quiz_grades', array('quiz'=> $quiz->id))) {
$data = quiz_report_grade_bands($bandwidth, $bands, $quiz->id, new \core\dml\sql_join());
$chart = self::get_chart($labels, $data);
$graphname = get_string('overviewreportgraph', 'quiz_overview');
echo $output->chart($chart, $graphname);
}
}
return true;
}
/**
* Extends parent function processing any submitted actions.
*
* @param object $quiz
* @param object $cm
* @param int $currentgroup
* @param \core\dml\sql_join $groupstudentsjoins (joins, wheres, params)
* @param \core\dml\sql_join $allowedjoins (joins, wheres, params)
* @param moodle_url $redirecturl
*/
protected function process_actions($quiz, $cm, $currentgroup, \core\dml\sql_join $groupstudentsjoins,
\core\dml\sql_join $allowedjoins, $redirecturl) {
parent::process_actions($quiz, $cm, $currentgroup, $groupstudentsjoins, $allowedjoins, $redirecturl);
if (empty($currentgroup) || $this->hasgroupstudents) {
if (optional_param('regrade', 0, PARAM_BOOL) && confirm_sesskey()) {
if ($attemptids = optional_param_array('attemptid', array(), PARAM_INT)) {
$this->start_regrade($quiz, $cm);
$this->regrade_attempts($quiz, false, $groupstudentsjoins, $attemptids);
$this->finish_regrade($redirecturl);
}
}
}
if (optional_param('regradeall', 0, PARAM_BOOL) && confirm_sesskey()) {
$this->start_regrade($quiz, $cm);
$this->regrade_attempts($quiz, false, $groupstudentsjoins);
$this->finish_regrade($redirecturl);
} else if (optional_param('regradealldry', 0, PARAM_BOOL) && confirm_sesskey()) {
$this->start_regrade($quiz, $cm);
$this->regrade_attempts($quiz, true, $groupstudentsjoins);
$this->finish_regrade($redirecturl);
} else if (optional_param('regradealldrydo', 0, PARAM_BOOL) && confirm_sesskey()) {
$this->start_regrade($quiz, $cm);
$this->regrade_attempts_needing_it($quiz, $groupstudentsjoins);
$this->finish_regrade($redirecturl);
}
}
/**
* Check necessary capabilities, and start the display of the regrade progress page.
* @param object $quiz the quiz settings.
* @param object $cm the cm object for the quiz.
*/
protected function start_regrade($quiz, $cm) {
require_capability('mod/quiz:regrade', $this->context);
$this->print_header_and_tabs($cm, $this->course, $quiz, $this->mode);
}
/**
* Finish displaying the regrade progress page.
* @param moodle_url $nexturl where to send the user after the regrade.
* @uses exit. This method never returns.
*/
protected function finish_regrade($nexturl) {
global $OUTPUT;
\core\notification::success(get_string('regradecomplete', 'quiz_overview'));
echo $OUTPUT->continue_button($nexturl);
echo $OUTPUT->footer();
die();
}
/**
* Unlock the session and allow the regrading process to run in the background.
*/
protected function unlock_session() {
\core\session\manager::write_close();
ignore_user_abort(true);
}
/**
* Regrade a particular quiz attempt. Either for real ($dryrun = false), or
* as a pretend regrade to see which fractions would change. The outcome is
* stored in the quiz_overview_regrades table.
*
* Note, $attempt is not upgraded in the database. The caller needs to do that.
* However, $attempt->sumgrades is updated, if this is not a dry run.
*
* @param object $attempt the quiz attempt to regrade.
* @param bool $dryrun if true, do a pretend regrade, otherwise do it for real.
* @param array $slots if null, regrade all questions, otherwise, just regrade
* the quetsions with those slots.
*/
protected function regrade_attempt($attempt, $dryrun = false, $slots = null) {
global $DB;
// Need more time for a quiz with many questions.
core_php_time_limit::raise(300);
$transaction = $DB->start_delegated_transaction();
$quba = question_engine::load_questions_usage_by_activity($attempt->uniqueid);
if (is_null($slots)) {
$slots = $quba->get_slots();
}
$finished = $attempt->state == quiz_attempt::FINISHED;
foreach ($slots as $slot) {
$qqr = new stdClass();
$qqr->oldfraction = $quba->get_question_fraction($slot);
$quba->regrade_question($slot, $finished);
$qqr->newfraction = $quba->get_question_fraction($slot);
if (abs($qqr->oldfraction - $qqr->newfraction) > 1e-7) {
$qqr->questionusageid = $quba->get_id();
$qqr->slot = $slot;
$qqr->regraded = empty($dryrun);
$qqr->timemodified = time();
$DB->insert_record('quiz_overview_regrades', $qqr, false);
}
}
if (!$dryrun) {
question_engine::save_questions_usage_by_activity($quba);
}
$transaction->allow_commit();
// Really, PHP should not need this hint, but without this, we just run out of memory.
$quba = null;
$transaction = null;
gc_collect_cycles();
}
/**
* Regrade attempts for this quiz, exactly which attempts are regraded is
* controlled by the parameters.
* @param object $quiz the quiz settings.
* @param bool $dryrun if true, do a pretend regrade, otherwise do it for real.
* @param \core\dml\sql_join|array $groupstudentsjoins empty for all attempts, otherwise regrade attempts
* for these users.
* @param array $attemptids blank for all attempts, otherwise only regrade
* attempts whose id is in this list.
*/
protected function regrade_attempts($quiz, $dryrun = false,
\core\dml\sql_join$groupstudentsjoins = null, $attemptids = array()) {
global $DB;
$this->unlock_session();
$sql = "SELECT quiza.*, " . get_all_user_name_fields(true, 'u') . "
FROM {quiz_attempts} quiza
JOIN {user} u ON u.id = quiza.userid";
$where = "quiz = :qid AND preview = 0";
$params = array('qid' => $quiz->id);
if ($this->hasgroupstudents && !empty($groupstudentsjoins->joins)) {
$sql .= "\n{$groupstudentsjoins->joins}";
$where .= " AND {$groupstudentsjoins->wheres}";
$params += $groupstudentsjoins->params;
}
if ($attemptids) {
list($attemptidcondition, $attemptidparams) = $DB->get_in_or_equal($attemptids, SQL_PARAMS_NAMED);
$where .= " AND quiza.id $attemptidcondition";
$params += $attemptidparams;
}
$sql .= "\nWHERE {$where}";
$attempts = $DB->get_records_sql($sql, $params);
if (!$attempts) {
return;
}
$this->regrade_batch_of_attempts($quiz, $attempts, $dryrun, $groupstudentsjoins);
}
/**
* Regrade those questions in those attempts that are marked as needing regrading
* in the quiz_overview_regrades table.
* @param object $quiz the quiz settings.
* @param \core\dml\sql_join $groupstudentsjoins empty for all attempts, otherwise regrade attempts
* for these users.
*/
protected function regrade_attempts_needing_it($quiz, \core\dml\sql_join $groupstudentsjoins) {
global $DB;
$this->unlock_session();
$join = '{quiz_overview_regrades} qqr ON qqr.questionusageid = quiza.uniqueid';
$where = "quiza.quiz = :qid AND quiza.preview = 0 AND qqr.regraded = 0";
$params = array('qid' => $quiz->id);
// Fetch all attempts that need regrading.
if ($this->hasgroupstudents && !empty($groupstudentsjoins->joins)) {
$join .= "\nJOIN {user} u ON u.id = quiza.userid
{$groupstudentsjoins->joins}";
$where .= " AND {$groupstudentsjoins->wheres}";
$params += $groupstudentsjoins->params;
}
$toregrade = $DB->get_recordset_sql("
SELECT quiza.uniqueid, qqr.slot
FROM {quiz_attempts} quiza
JOIN $join
WHERE $where", $params);
$attemptquestions = array();
foreach ($toregrade as $row) {
$attemptquestions[$row->uniqueid][] = $row->slot;
}
$toregrade->close();
if (!$attemptquestions) {
return;
}
list($uniqueidcondition, $params) = $DB->get_in_or_equal(array_keys($attemptquestions));
$attempts = $DB->get_records_sql("
SELECT quiza.*, " . get_all_user_name_fields(true, 'u') . "
FROM {quiz_attempts} quiza
JOIN {user} u ON u.id = quiza.userid
WHERE quiza.uniqueid $uniqueidcondition
", $params);
foreach ($attempts as $attempt) {
$attempt->regradeonlyslots = $attemptquestions[$attempt->uniqueid];
}
$this->regrade_batch_of_attempts($quiz, $attempts, false, $groupstudentsjoins);
}
/**
* This is a helper used by {@link regrade_attempts()} and
* {@link regrade_attempts_needing_it()}.
*
* Given an array of attempts, it regrades them all, or does a dry run.
* Each object in the attempts array must be a row from the quiz_attempts
* table, with the get_all_user_name_fields from the user table joined in.
* In addition, if $attempt->regradeonlyslots is set, then only those slots
* are regraded, otherwise all slots are regraded.
*
* @param object $quiz the quiz settings.
* @param array $attempts of data from the quiz_attempts table, with extra data as above.
* @param bool $dryrun if true, do a pretend regrade, otherwise do it for real.
* @param \core\dml\sql_join $groupstudentsjoins empty for all attempts, otherwise regrade attempts
*/
protected function regrade_batch_of_attempts($quiz, array $attempts,
bool $dryrun, \core\dml\sql_join $groupstudentsjoins) {
$this->clear_regrade_table($quiz, $groupstudentsjoins);
$progressbar = new progress_bar('quiz_overview_regrade', 500, true);
$a = array(
'count' => count($attempts),
'done' => 0,
);
foreach ($attempts as $attempt) {
$a['done']++;
$a['attemptnum'] = $attempt->attempt;
$a['name'] = fullname($attempt);
$a['attemptid'] = $attempt->id;
if (!isset($attempt->regradeonlyslots)) {
$attempt->regradeonlyslots = null;
}
$progressbar->update($a['done'], $a['count'],
get_string('regradingattemptxofywithdetails', 'quiz_overview', $a));
$this->regrade_attempt($attempt, $dryrun, $attempt->regradeonlyslots);
}
$progressbar->update($a['done'], $a['count'],
get_string('regradedsuccessfullyxofy', 'quiz_overview', $a));
if (!$dryrun) {
$this->update_overall_grades($quiz);
}
}
/**
* Count the number of attempts in need of a regrade.
*
* @param object $quiz the quiz settings.
* @param \core\dml\sql_join $groupstudentsjoins (joins, wheres, params) If this is given, only data relating
* to these users is cleared.
* @return int the number of attempts.
*/
protected function count_question_attempts_needing_regrade($quiz, \core\dml\sql_join $groupstudentsjoins) {
global $DB;
$userjoin = '';
$usertest = '';
$params = array();
if ($this->hasgroupstudents) {
$userjoin = "JOIN {user} u ON u.id = quiza.userid
{$groupstudentsjoins->joins}";
$usertest = "{$groupstudentsjoins->wheres} AND u.id = quiza.userid AND ";
$params = $groupstudentsjoins->params;
}
$params['cquiz'] = $quiz->id;
$sql = "SELECT COUNT(DISTINCT quiza.id)
FROM {quiz_attempts} quiza
JOIN {quiz_overview_regrades} qqr ON quiza.uniqueid = qqr.questionusageid
$userjoin
WHERE
$usertest
quiza.quiz = :cquiz AND
quiza.preview = 0 AND
qqr.regraded = 0";
return $DB->count_records_sql($sql, $params);
}
/**
* Are there any pending regrades in the table we are going to show?
* @param string $from tables used by the main query.
* @param string $where where clause used by the main query.
* @param array $params required by the SQL.
* @return bool whether there are pending regrades.
*/
protected function has_regraded_questions($from, $where, $params) {
global $DB;
return $DB->record_exists_sql("
SELECT 1
FROM {$from}
JOIN {quiz_overview_regrades} qor ON qor.questionusageid = quiza.uniqueid
WHERE {$where}", $params);
}
/**
* Remove all information about pending/complete regrades from the database.
* @param object $quiz the quiz settings.
* @param \core\dml\sql_join $groupstudentsjoins (joins, wheres, params). If this is given, only data relating
* to these users is cleared.
*/
protected function clear_regrade_table($quiz, \core\dml\sql_join $groupstudentsjoins) {
global $DB;
// Fetch all attempts that need regrading.
$select = "questionusageid IN (
SELECT uniqueid
FROM {quiz_attempts} quiza";
$where = "WHERE quiza.quiz = :qid";
$params = array('qid' => $quiz->id);
if ($this->hasgroupstudents && !empty($groupstudentsjoins->joins)) {
$select .= "\nJOIN {user} u ON u.id = quiza.userid
{$groupstudentsjoins->joins}";
$where .= " AND {$groupstudentsjoins->wheres}";
$params += $groupstudentsjoins->params;
}
$select .= "\n$where)";
$DB->delete_records_select('quiz_overview_regrades', $select, $params);
}
/**
* Update the final grades for all attempts. This method is used following
* a regrade.
* @param object $quiz the quiz settings.
* @param array $userids only update scores for these userids.
* @param array $attemptids attemptids only update scores for these attempt ids.
*/
protected function update_overall_grades($quiz) {
quiz_update_all_attempt_sumgrades($quiz);
quiz_update_all_final_grades($quiz);
quiz_update_grades($quiz);
}
/**
* Get the bands configuration for the quiz.
*
* This returns the configuration for having between 11 and 20 bars in
* a chart based on the maximum grade to be given on a quiz. The width of
* a band is the number of grade points it encapsulates.
*
* @param object $quiz The quiz object.
* @return array Contains the number of bands, and their width.
*/
public static function get_bands_count_and_width($quiz) {
$bands = $quiz->grade;
while ($bands > 20 || $bands <= 10) {
if ($bands > 50) {
$bands /= 5;
} else if ($bands > 20) {
$bands /= 2;
}
if ($bands < 4) {
$bands *= 5;
} else if ($bands <= 10) {
$bands *= 2;
}
}
// See MDL-34589. Using doubles as array keys causes problems in PHP 5.4, hence the explicit cast to int.
$bands = (int) ceil($bands);
return [$bands, $quiz->grade / $bands];
}
/**
* Get the bands labels.
*
* @param int $bands The number of bands.
* @param int $bandwidth The band width.
* @param object $quiz The quiz object.
* @return string[] The labels.
*/
public static function get_bands_labels($bands, $bandwidth, $quiz) {
$bandlabels = [];
for ($i = 1; $i <= $bands; $i++) {
$bandlabels[] = quiz_format_grade($quiz, ($i - 1) * $bandwidth) . ' - ' . quiz_format_grade($quiz, $i * $bandwidth);
}
return $bandlabels;
}
/**
* Get a chart.
*
* @param string[] $labels Chart labels.
* @param int[] $data The data.
* @return \core\chart_base
*/
protected static function get_chart($labels, $data) {
$chart = new \core\chart_bar();
$chart->set_labels($labels);
$chart->get_xaxis(0, true)->set_label(get_string('grade'));
$yaxis = $chart->get_yaxis(0, true);
$yaxis->set_label(get_string('participants'));
$yaxis->set_stepsize(max(1, round(max($data) / 10)));
$series = new \core\chart_series(get_string('participants'), $data);
$chart->add_series($series);
return $chart;
}
}
| evltuma/moodle | mod/quiz/report/overview/report.php | PHP | gpl-3.0 | 28,104 |
import { ElementRef, EventEmitter, NgZone, Renderer } from '@angular/core';
import { Config } from '../../config/config';
import { Ion } from '../ion';
import { Platform } from '../../platform/platform';
import { SlideContainer, SlideElement, SlideTouchEvents, SlideTouches, SlideZoom } from './swiper/swiper-interfaces';
import { ViewController } from '../../navigation/view-controller';
/**
* @name Slides
* @description
* The Slides component is a multi-section container. Each section can be swiped
* or dragged between. It contains any number of [Slide](../Slide) components.
*
*
* ### Creating
* You should use a template to create slides and listen to slide events. The template
* should contain the slide container, an `<ion-slides>` element, and any number of
* [Slide](../Slide) components, written as `<ion-slide>`. Basic configuration
* values can be set as input properties, which are listed below. Slides events
* can also be listened to such as the slide changing by placing the event on the
* `<ion-slides>` element. See [Usage](#usage) below for more information.
*
*
* ### Navigating
* After creating and configuring the slides, you can navigate between them
* by swiping or calling methods on the `Slides` instance. You can call `slideTo()` to
* navigate to a specific slide, or `slideNext()` to change to the slide that follows
* the active slide. All of the [methods](#instance-members) provided by the `Slides`
* instance are listed below. See [Usage](#usage) below for more information on
* navigating between slides.
*
*
* @usage
*
* You can add slides to a `@Component` using the following template:
*
* ```html
* <ion-slides>
* <ion-slide>
* <h1>Slide 1</h1>
* </ion-slide>
* <ion-slide>
* <h1>Slide 2</h1>
* </ion-slide>
* <ion-slide>
* <h1>Slide 3</h1>
* </ion-slide>
* </ion-slides>
* ```
*
* Next, we can use `ViewChild` to assign the Slides instance to
* your `slides` property. Now we can call any of the `Slides`
* [methods](#instance-members), for example we can use the Slide's
* `slideTo()` method in order to navigate to a specific slide on
* a button click. Below we call the `goToSlide()` method and it
* navigates to the 3rd slide:
*
* ```ts
* import { ViewChild } from '@angular/core';
*
* class MyPage {
* @ViewChild(Slides) slides: Slides;
*
* goToSlide() {
* this.slides.slideTo(2, 500);
* }
* }
* ```
*
* We can also add events to listen to on the `<ion-slides>` element.
* Let's add the `ionSlideDidChange` event and call a method when the slide changes:
*
* ```html
* <ion-slides (ionSlideDidChange)="slideChanged()">
* ```
*
* In our class, we add the `slideChanged()` method which gets the active
* index and prints it:
*
* ```ts
* class MyPage {
* ...
*
* slideChanged() {
* let currentIndex = this.slides.getActiveIndex();
* console.log("Current index is", currentIndex);
* }
* }
* ```
*
* @demo /docs/v2/demos/src/slides/
* @see {@link /docs/v2/components#slides Slides Component Docs}
*
* Adopted from Swiper.js:
* The most modern mobile touch slider and framework with
* hardware accelerated transitions.
*
* http://www.idangero.us/swiper/
*
* Copyright 2016, Vladimir Kharlampidi
* The iDangero.us
* http://www.idangero.us/
*
* Licensed under MIT
*/
export declare class Slides extends Ion {
private _plt;
/**
* @input {number} Delay between transitions (in milliseconds). If this
* parameter is not passed, autoplay is disabled. Default does
* not have a value and does not autoplay.
* Default: `null`.
*/
autoplay: any;
private _autoplayMs;
/**
* @input {string} Could be `slide`, `fade`, `cube`, `coverflow` or `flip`.
* Default: `slide`.
*/
effect: string;
private _effectName;
/**
* @input {string} Swipe direction: 'horizontal' or 'vertical'.
* Default: `horizontal`.
*/
direction: string;
private _direction;
/**
* @input {number} Index number of initial slide. Default: `0`.
*/
initialSlide: any;
private _initialSlide;
/**
* @input {boolean} Whether to continuously loop from the last slide to the
* first slide. Default: `false`.
*/
loop: boolean;
private _isLoop;
/**
* @input {boolean} String with type of pagination. Can be
* `bullets`, `fraction`, `progress`. Default does not have
* pagination set.
*/
pager: boolean;
private _pager;
/**
* @input {boolean} String with type of pagination. Can be
* `bullets`, `fraction`, `progress`. Default: `bullets`.
* (Note that the pager will not show unless `pager` input
* is set to true).
*/
paginationType: string;
private _paginationType;
/**
* @input {boolean} Enable, if you want to use "parallaxed" elements inside of
* slider. Default: `false`.
*/
parallax: boolean;
private _isParallax;
/**
* @input {number} Duration of transition between slides
* (in milliseconds). Default: `300`.
*/
speed: any;
private _speedMs;
/**
* @input {boolean} Set to `true` to enable zooming functionality.
* Default: `false`.
*/
zoom: boolean;
private _isZoom;
/**
* @private
* Height of container.
*/
height: number;
/**
* @private
* Width of container.
*/
width: number;
/**
* @private
* Enabled this option and swiper will be operated as usual except it will
* not move, real translate values on wrapper will not be set. Useful when
* you may need to create custom slide transition.
*/
virtualTranslate: boolean;
/**
* @private
* Set to true to round values of slides width and height to prevent blurry
* texts on usual resolution screens (if you have such)
*/
roundLengths: boolean;
/**
* @private
*/
spaceBetween: number;
/**
* @private
*/
slidesPerView: number | string;
/**
* @private
*/
slidesPerColumn: number;
/**
* @private
*/
slidesPerColumnFill: string;
/**
* @private
*/
slidesPerGroup: number;
/**
* @private
*/
centeredSlides: boolean;
/**
* @private
*/
slidesOffsetBefore: number;
/**
* @private
*/
slidesOffsetAfter: number;
/**
* @private
*/
touchEventsTarget: 'container';
/**
* @private
*/
autoplayDisableOnInteraction: boolean;
/**
* @private
*/
autoplayStopOnLast: boolean;
/**
* @private
*/
freeMode: boolean;
/**
* @private
*/
freeModeMomentum: boolean;
/**
* @private
*/
freeModeMomentumRatio: number;
/**
* @private
*/
freeModeMomentumBounce: boolean;
/**
* @private
*/
freeModeMomentumBounceRatio: number;
/**
* @private
*/
freeModeMomentumVelocityRatio: number;
/**
* @private
*/
freeModeSticky: boolean;
/**
* @private
*/
freeModeMinimumVelocity: number;
/**
* @private
*/
autoHeight: boolean;
/**
* @private
*/
setWrapperSize: boolean;
/**
* @private
*/
zoomMax: number;
/**
* @private
*/
zoomMin: number;
/**
* @private
*/
zoomToggle: boolean;
/**
* @private
*/
touchRatio: number;
/**
* @private
*/
touchAngle: number;
/**
* @private
*/
simulateTouch: boolean;
/**
* @private
*/
shortSwipes: boolean;
/**
* @private
*/
longSwipes: boolean;
/**
* @private
*/
longSwipesRatio: number;
/**
* @private
*/
longSwipesMs: number;
/**
* @private
*/
followFinger: boolean;
/**
* @private
*/
onlyExternal: boolean;
/**
* @private
*/
threshold: number;
/**
* @private
*/
touchMoveStopPropagation: boolean;
/**
* @private
*/
touchReleaseOnEdges: boolean;
/**
* @private
*/
iOSEdgeSwipeDetection: boolean;
/**
* @private
*/
iOSEdgeSwipeThreshold: number;
/**
* @private
*/
paginationClickable: boolean;
/**
* @private
*/
paginationHide: boolean;
resistance: boolean;
resistanceRatio: number;
watchSlidesProgress: boolean;
watchSlidesVisibility: boolean;
/**
* @private
*/
preventClicks: boolean;
/**
* @private
*/
preventClicksPropagation: boolean;
/**
* @private
*/
slideToClickedSlide: boolean;
/**
* @private
*/
loopAdditionalSlides: number;
/**
* @private
*/
loopedSlides: any;
/**
* @private
*/
swipeHandler: any;
/**
* @private
*/
noSwiping: boolean;
runCallbacksOnInit: boolean;
/**
* @private
*/
keyboardControl: boolean;
/**
* @private
*/
coverflow: {
rotate: number;
stretch: number;
depth: number;
modifier: number;
slideShadows: boolean;
};
/**
* @private
*/
flip: {
slideShadows: boolean;
limitRotation: boolean;
};
/**
* @private
*/
cube: {
slideShadows: boolean;
shadow: boolean;
shadowOffset: number;
shadowScale: number;
};
/**
* @private
*/
fade: {
crossFade: boolean;
};
/**
* @private
*/
prevSlideMessage: string;
/**
* @private
*/
nextSlideMessage: string;
/**
* @private
*/
firstSlideMessage: string;
/**
* @private
*/
lastSlideMessage: string;
/**
* @private
*/
originalEvent: any;
/**
* @output {Slides} Expression to evaluate when a slide change starts.
*/
ionSlideWillChange: EventEmitter<Slides>;
/**
* @output {Slides} Expression to evaluate when a slide change ends.
*/
ionSlideDidChange: EventEmitter<Slides>;
/**
* @output {Slides} Expression to evaluate when a slide moves.
*/
ionSlideDrag: EventEmitter<Slides>;
/**
* @output {Slides} When slides reach its beginning (initial position).
*/
ionSlideReachStart: EventEmitter<Slides>;
/**
* @output {Slides} When slides reach its last slide.
*/
ionSlideReachEnd: EventEmitter<Slides>;
/**
* @output {Slides} Expression to evaluate when a slide moves.
*/
ionSlideAutoplay: EventEmitter<Slides>;
/**
* @output {Slides} Same as `ionSlideWillChange` but caused by autoplay.
*/
ionSlideAutoplayStart: EventEmitter<Slides>;
/**
* @output {Slides} Expression to evaluate when a autoplay stops.
*/
ionSlideAutoplayStop: EventEmitter<Slides>;
/**
* @output {Slides} Same as `ionSlideWillChange` but for "forward" direction only.
*/
ionSlideNextStart: EventEmitter<Slides>;
/**
* @output {Slides} Same as `ionSlideWillChange` but for "backward" direction only.
*/
ionSlidePrevStart: EventEmitter<Slides>;
/**
* @output {Slides} Same as `ionSlideDidChange` but for "forward" direction only.
*/
ionSlideNextEnd: EventEmitter<Slides>;
/**
* @output {Slides} Same as `ionSlideDidChange` but for "backward" direction only.
*/
ionSlidePrevEnd: EventEmitter<Slides>;
/**
* @output {Slides} When the user taps/clicks on the slide's container.
*/
ionSlideTap: EventEmitter<Slides>;
/**
* @output {Slides} When the user double taps on the slide's container.
*/
ionSlideDoubleTap: EventEmitter<Slides>;
/** @private */
ionSlideProgress: EventEmitter<number>;
/** @private */
ionSlideTransitionStart: EventEmitter<Slides>;
/** @private */
ionSlideTransitionEnd: EventEmitter<Slides>;
/** @private */
ionSlideTouchStart: EventEmitter<TouchEvent>;
/** @private */
ionSlideTouchEnd: EventEmitter<TouchEvent>;
/**
* @private
* Deprecated
*/
options: any;
/**
* @private
* Deprecated: Use "ionSlideWillChange" instead.
* Added 2016-12-29
*/
readonly ionWillChange: EventEmitter<{}>;
/**
* @private
* Deprecated: Use "ionSlideDidChange" instead.
* Added 2016-12-29
*/
readonly ionDidChange: EventEmitter<{}>;
/**
* @private
* Deprecated: Use "ionSlideDrag" instead.
* Added 2016-12-29
*/
readonly ionDrag: EventEmitter<{}>;
/**
* Private properties only useful to this class.
* ------------------------------------
*/
private _init;
private _tmr;
private _unregs;
/**
* Properties that are exposed publically but no docs.
* ------------------------------------
*/
/** @private */
clickedIndex: number;
/** @private */
clickedSlide: SlideElement;
/** @private */
container: SlideContainer;
/** @private */
id: number;
/** @private */
progress: number;
/** @private */
realIndex: number;
/** @private */
renderedHeight: number;
/** @private */
renderedWidth: number;
/** @private */
slideId: string;
/** @private */
swipeDirection: string;
/** @private */
velocity: number;
/**
* Properties which are for internal use only
* and not exposed to the public
* ------------------------------------
*/
/** @internal */
_activeIndex: number;
/** @internal */
_allowClick: boolean;
/** @internal */
_allowSwipeToNext: boolean;
/** @internal */
_allowSwipeToPrev: boolean;
/** @internal */
_animating: boolean;
/** @internal */
_autoplaying: boolean;
/** @internal */
_autoplayPaused: boolean;
/** @internal */
_autoplayTimeoutId: number;
/** @internal */
_bullets: HTMLElement[];
/** @internal */
_classNames: string[];
/** @internal */
_isBeginning: boolean;
/** @internal */
_isEnd: boolean;
/** @internal */
_keyboardUnReg: Function;
/** @internal */
_liveRegion: HTMLElement;
/** @internal */
_paginationContainer: HTMLElement;
/** @internal */
_previousIndex: number;
/** @internal */
_renderedSize: number;
/** @internal */
_rtl: boolean;
/** @internal */
_slides: SlideElement[];
/** @internal */
_snapGrid: any;
/** @internal */
_slidesGrid: any;
/** @internal */
_snapIndex: number;
/** @internal */
_slidesSizesGrid: any;
/** @internal */
_supportTouch: boolean;
/** @internal */
_supportGestures: boolean;
/** @internal */
_touches: SlideTouches;
/** @internal */
_touchEvents: SlideTouchEvents;
/** @internal */
_touchEventsDesktop: SlideTouchEvents;
/** @internal */
_translate: number;
/** @internal */
_virtualSize: any;
/** @internal */
_wrapper: HTMLElement;
/** @internal */
_zone: NgZone;
/** @internal */
_zoom: SlideZoom;
nextButton: HTMLElement;
prevButton: HTMLElement;
constructor(config: Config, _plt: Platform, zone: NgZone, viewCtrl: ViewController, elementRef: ElementRef, renderer: Renderer);
private _initSlides();
/**
* @private
*/
ngAfterContentInit(): void;
/**
* @private
* Update the underlying slider implementation. Call this if you've added or removed
* child slides.
*/
update(debounce?: number): void;
/**
* Transition to the specified slide.
*
* @param {number} index The index number of the slide.
* @param {number} [speed] Transition duration (in ms).
* @param {boolean} [runCallbacks] Whether or not to emit the `ionWillChange`/`ionDidChange` events. Default true.
*/
slideTo(index: number, speed?: number, runCallbacks?: boolean): void;
/**
* Transition to the next slide.
*
* @param {number} [speed] Transition duration (in ms).
* @param {boolean} [runCallbacks] Whether or not to emit the `ionWillChange`/`ionDidChange` events. Default true.
*/
slideNext(speed?: number, runCallbacks?: boolean): void;
/**
* Transition to the previous slide.
*
* @param {number} [speed] Transition duration (in ms).
* @param {boolean} [runCallbacks] Whether or not to emit the `ionWillChange`/`ionDidChange` events. Default true.
*/
slidePrev(speed?: number, runCallbacks?: boolean): void;
/**
* Get the index of the active slide.
*
* @returns {number} The index number of the current slide.
*/
getActiveIndex(): number;
/**
* Get the index of the previous slide.
*
* @returns {number} The index number of the previous slide.
*/
getPreviousIndex(): number;
/**
* Get the total number of slides.
*
* @returns {number} The total number of slides.
*/
length(): number;
/**
* Get whether or not the current slide is the last slide.
*
* @returns {boolean} If the slide is the last slide or not.
*/
isEnd(): boolean;
/**
* Get whether or not the current slide is the first slide.
*
* @returns {boolean} If the slide is the first slide or not.
*/
isBeginning(): boolean;
/**
* Start auto play.
*/
startAutoplay(): void;
/**
* Stop auto play.
*/
stopAutoplay(): void;
/**
* Lock or unlock the ability to slide to the next slides.
*/
lockSwipeToNext(shouldLockSwipeToNext: boolean): void;
/**
* Lock or unlock the ability to slide to the previous slides.
*/
lockSwipeToPrev(shouldLockSwipeToPrev: boolean): void;
/**
* Lock or unlock the ability to slide to change slides.
*/
lockSwipes(shouldLockSwipes: boolean): void;
/**
* Enable or disable keyboard control.
*/
enableKeyboardControl(shouldEnableKeyboard: boolean): void;
/**
* @private
*/
ngOnDestroy(): void;
/**
* @private
* Deprecated, please use the instance of ion-slides.
*/
getSlider(): void;
}
| chitranshi21/home_service_ionic | node_modules/ionic-angular/umd/components/slides/slides.d.ts | TypeScript | gpl-3.0 | 18,338 |
<?php
/**
* interface/therapy_groups/therapy_groups_controllers/participants_controller.php contains the participants controller for therapy groups.
*
* This is the controller for the groups' participant view.
*
* Copyright (C) 2016 Shachar Zilbershlag <shaharzi@matrix.co.il>
* Copyright (C) 2016 Amiel Elboim <amielel@matrix.co.il>
*
* LICENSE: This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 3
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://opensource.org/licenses/gpl-license.php>;.
*
* @package OpenEMR
* @author Shachar Zilbershlag <shaharzi@matrix.co.il>
* @author Amiel Elboim <amielel@matrix.co.il>
* @link http://www.open-emr.org
*/
require_once dirname(__FILE__) . '/base_controller.php';
require_once dirname(__FILE__) . '/therapy_groups_controller.php';
require_once("{$GLOBALS['srcdir']}/pid.inc");
class ParticipantsController extends BaseController
{
public function __construct()
{
$this->groupParticipantsModel = $this->loadModel('therapy_groups_participants');
$this->groupEventsModel = $this->loadModel('Therapy_Groups_Events');
$this->groupModel = $this->loadModel('therapy_groups');
}
public function index($groupId, $data = array())
{
if (isset($_POST['save'])) {
for ($k = 0; $k < count($_POST['pid']); $k++) {
$patient['pid'] = $_POST['pid'][$k];
$patient['group_patient_status'] = $_POST['group_patient_status'][$k];
$patient['group_patient_start'] = DateToYYYYMMDD($_POST['group_patient_start'][$k]);
$patient['group_patient_end'] = DateToYYYYMMDD($_POST['group_patient_end'][$k]);
$patient['group_patient_comment'] = $_POST['group_patient_comment'][$k];
$filters = array(
'group_patient_status' => FILTER_VALIDATE_INT,
'group_patient_start' => FILTER_DEFAULT,
'group_patient_end' => FILTER_SANITIZE_SPECIAL_CHARS,
'group_patient_comment' => FILTER_DEFAULT,
);
//filter and sanitize all post data.
$participant = filter_var_array($patient, $filters);
$this->groupParticipantsModel->updateParticipant($participant, $patient['pid'], $_POST['group_id']);
unset($_GET['editParticipants']);
}
}
if (isset($_GET['deleteParticipant'])) {
$this->groupParticipantsModel->removeParticipant($_GET['group_id'], $_GET['pid']);
}
$data['events'] = $this->groupEventsModel->getGroupEvents($groupId);
$data['readonly'] = 'disabled';
$data['participants'] = $this->groupParticipantsModel->getParticipants($groupId);
$statuses = array();
$names = array();
foreach ($data['participants'] as $key => $row) {
$statuses[$key] = $row['group_patient_status'];
$names[$key] = $row['lname'] . ' ' . $row['fname'];
}
array_multisort($statuses, SORT_ASC, $names, SORT_ASC, $data['participants']);
$data['statuses'] = TherapyGroupsController::prepareParticipantStatusesList();
$data['groupId'] = $groupId;
$groupData = $this->groupModel->getGroup($groupId);
$data['groupName'] = $groupData['group_name'];
if (isset($_GET['editParticipants'])) {
$data['readonly'] = '';
}
TherapyGroupsController::setSession($groupId);
$this->loadView('groupDetailsParticipants', $data);
}
public function add($groupId)
{
if (isset($_POST['save_new'])) {
$_POST['group_patient_start'] = DateToYYYYMMDD($_POST['group_patient_start']);
$alreadyRegistered = $this->groupParticipantsModel->isAlreadyRegistered($_POST['pid'], $groupId);
if ($alreadyRegistered) {
$this->index($groupId, array('participant_data' => $_POST, 'addStatus' => 'failed','message' => xlt('The patient already registered to the group')));
}
// adding group id to $_POST
$_POST = array('group_id' => $groupId) + $_POST;
$filters = array(
'group_id' => FILTER_VALIDATE_INT,
'pid' => FILTER_VALIDATE_INT,
'group_patient_start' => FILTER_DEFAULT,
'group_patient_comment' => FILTER_DEFAULT,
);
$participant_data = filter_var_array($_POST, $filters);
$participant_data['group_patient_status'] = 10;
$participant_data['group_patient_end'] = 'NULL';
$this->groupParticipantsModel->saveParticipant($participant_data);
}
$this->index($groupId, array('participant_data' => null));
}
}
| vaibhavgupta3110/openemr | interface/therapy_groups/therapy_groups_controllers/participants_controller.php | PHP | gpl-3.0 | 5,277 |
// { dg-do compile }
// { dg-options "-D__STDCPP_WANT_MATH_SPEC_FUNCS__" }
// Copyright (C) 2016 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// 8.1.11 ellint_1
#include <math.h>
void
test01()
{
float kf = 0.5F, phif = atan2(1.0F, 1.0F);
double kd = 0.5, phid = atan2(1.0, 1.0);
long double kl = 0.5L, phil = atan2(1.0L, 1.0L);
ellint_1(kf, phif);
ellint_1f(kf, phif);
ellint_1(kd, phid);
ellint_1(kl, phil);
ellint_1l(kl, phil);
return;
}
| selmentdev/selment-toolchain | source/gcc-latest/libstdc++-v3/testsuite/special_functions/11_ellint_1/compile_2.cc | C++ | gpl-3.0 | 1,160 |
/**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.domain.reports;
import java.util.Collections;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.fenixedu.academic.domain.ExecutionYear;
import org.fenixedu.academic.domain.Person;
import org.fenixedu.academic.domain.SchoolLevelType;
import org.fenixedu.academic.domain.contacts.PhysicalAddress;
import org.fenixedu.academic.domain.student.Registration;
import org.fenixedu.academic.domain.student.curriculum.ConclusionProcess;
import org.fenixedu.commons.spreadsheet.Spreadsheet;
import org.fenixedu.commons.spreadsheet.Spreadsheet.Row;
import org.joda.time.LocalDate;
public class GraduationReportFile extends GraduationReportFile_Base {
public GraduationReportFile() {
super();
}
@Override
public String getJobName() {
return "Listagem de diplomados";
}
@Override
protected String getPrefix() {
return "diplomados";
}
@Override
public void renderReport(Spreadsheet spreadsheet) {
spreadsheet.setHeader("número aluno");
spreadsheet.setHeader("nome");
setDegreeHeaders(spreadsheet);
spreadsheet.setHeader("ciclo");
spreadsheet.setHeader("Nota Conclusão Secundário");
spreadsheet.setHeader("Nota Seriação");
spreadsheet.setHeader("ano de ingresso");
spreadsheet.setHeader("ano lectivo conclusão");
spreadsheet.setHeader("data conclusão");
spreadsheet.setHeader("número de anos para conclusão");
spreadsheet.setHeader("média final");
spreadsheet.setHeader("morada");
spreadsheet.setHeader("código postal");
spreadsheet.setHeader("cidade");
spreadsheet.setHeader("país");
spreadsheet.setHeader("telefone");
spreadsheet.setHeader("telemovel");
spreadsheet.setHeader("email");
spreadsheet.setHeader("sexo");
spreadsheet.setHeader("data nascimento");
final Set<ExecutionYear> toInspectSet =
getExecutionYear() == null ? getRootDomainObject().getExecutionYearsSet() : Collections
.singleton(getExecutionYear());
for (final ExecutionYear toInspect : toInspectSet) {
for (final ConclusionProcess conclusionProcess : toInspect.getConclusionProcessesConcludedSet()) {
if (checkDegreeType(getDegreeType(), conclusionProcess)) {
reportGraduate(spreadsheet, conclusionProcess);
}
}
}
}
private void reportGraduate(final Spreadsheet sheet, final ConclusionProcess conclusionProcess) {
final Row row = sheet.addRow();
final Registration registration = conclusionProcess.getRegistration();
final ExecutionYear ingression = conclusionProcess.getIngressionYear();
final ExecutionYear conclusion = conclusionProcess.getConclusionYear();
final LocalDate conclusionDate = conclusionProcess.getConclusionDate();
row.setCell(registration.getNumber());
row.setCell(registration.getName());
setDegreeCells(row, registration.getDegree());
row.setCell(conclusionProcess.getName().getContent());
row.setCell(registration.getPrecedentDegreeConclusionGrade(SchoolLevelType.SECOND_CYCLE_BASIC_SCHOOL));
row.setCell(registration.getEntryGrade() != null ? registration.getEntryGrade().toString() : StringUtils.EMPTY);
row.setCell(ingression.getYear());
row.setCell(conclusion == null ? StringUtils.EMPTY : conclusion.getYear());
row.setCell(conclusionDate == null ? StringUtils.EMPTY : conclusionDate.toString("yyyy-MM-dd"));
row.setCell(conclusion == null ? StringUtils.EMPTY : String.valueOf(ingression.getDistanceInCivilYears(conclusion) + 1));
row.setCell(conclusionProcess.getFinalGrade().getValue());
setPersonCells(registration, row);
}
private void setPersonCells(final Registration registration, final Row row) {
final Person person = registration.getPerson();
final PhysicalAddress defaultPhysicalAddress = person.getDefaultPhysicalAddress();
if (defaultPhysicalAddress != null) {
row.setCell(defaultPhysicalAddress.getAddress());
row.setCell(defaultPhysicalAddress.getPostalCode());
row.setCell(defaultPhysicalAddress.getArea());
row.setCell(defaultPhysicalAddress.getCountryOfResidence() == null ? StringUtils.EMPTY : defaultPhysicalAddress
.getCountryOfResidence().getName());
} else {
row.setCell(StringUtils.EMPTY);
row.setCell(StringUtils.EMPTY);
row.setCell(StringUtils.EMPTY);
row.setCell(StringUtils.EMPTY);
}
row.setCell(person.getDefaultPhoneNumber());
row.setCell(person.getDefaultMobilePhoneNumber());
row.setCell(person.getInstitutionalOrDefaultEmailAddressValue());
row.setCell(person.getGender().toLocalizedString());
row.setCell(person.getDateOfBirthYearMonthDay() != null ? person.getDateOfBirthYearMonthDay().toString("yyyy-MM-dd") : StringUtils.EMPTY);
}
}
| jcarvalho/fenixedu-academic | src/main/java/org/fenixedu/academic/domain/reports/GraduationReportFile.java | Java | lgpl-3.0 | 5,943 |
/**
*
*/
package org.sword.wechat4j;
import javax.servlet.http.HttpServletRequest;
/**
* @author ChengNing
* @date 2014年12月7日
*/
public class Wechat extends WechatSupport{
public Wechat(HttpServletRequest request) {
super(request);
}
@Override
protected void onText() {
// TODO Auto-generated method stub
}
@Override
protected void onImage() {
// TODO Auto-generated method stub
}
@Override
protected void onVoice() {
// TODO Auto-generated method stub
}
@Override
protected void onVideo() {
// TODO Auto-generated method stub
}
@Override
protected void onLocation() {
// TODO Auto-generated method stub
}
@Override
protected void onLink() {
// TODO Auto-generated method stub
}
@Override
protected void onUnknown() {
// TODO Auto-generated method stub
}
@Override
protected void click() {
// TODO Auto-generated method stub
}
@Override
protected void subscribe() {
// TODO Auto-generated method stub
}
@Override
protected void unSubscribe() {
// TODO Auto-generated method stub
}
@Override
protected void scan() {
// TODO Auto-generated method stub
}
@Override
protected void location() {
// TODO Auto-generated method stub
}
@Override
protected void view() {
// TODO Auto-generated method stub
}
@Override
protected void templateMsgCallback() {
// TODO Auto-generated method stub
}
@Override
protected void scanCodePush() {
// TODO Auto-generated method stub
}
@Override
protected void scanCodeWaitMsg() {
// TODO Auto-generated method stub
}
@Override
protected void picSysPhoto() {
// TODO Auto-generated method stub
}
@Override
protected void picPhotoOrAlbum() {
// TODO Auto-generated method stub
}
@Override
protected void picWeixin() {
// TODO Auto-generated method stub
}
@Override
protected void locationSelect() {
// TODO Auto-generated method stub
}
@Override
protected void onShortVideo() {
// TODO Auto-generated method stub
}
@Override
protected void kfCreateSession() {
// TODO Auto-generated method stub
}
@Override
protected void kfCloseSession() {
// TODO Auto-generated method stub
}
@Override
protected void kfSwitchSession() {
// TODO Auto-generated method stub
}
}
| liwanwei/wechat4j | test/org/sword/wechat4j/Wechat.java | Java | apache-2.0 | 2,327 |
/*
Copyright (c) 2011 Eli Grey, http://eligrey.com
This file is based on:
https://github.com/eligrey/FileSaver.js/blob/master/FileSaver.js ,
licensed under X11/MIT.
See https://github.com/eligrey/FileSaver.js/blob/master/LICENSE.md
This file is part of SwitchySharp.
SwitchySharp is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SwitchySharp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SwitchySharp. If not, see <http://www.gnu.org/licenses/>.
*/
var saveAs = saveAs || (function (view) {
"use strict";
var
doc = view.document
// only get URL when necessary in case BlobBuilder.js hasn't overridden it yet
, get_URL = function () {
return view.URL || view.webkitURL || view;
}
, URL = view.URL || view.webkitURL || view
, save_link = doc.createElementNS("http://www.w3.org/1999/xhtml", "a")
, can_use_save_link = "download" in save_link
, click = function (node) {
var event = doc.createEvent("MouseEvents");
event.initMouseEvent(
"click", true, false, view, 0, 0, 0, 0, 0
, false, false, false, false, 0, null
);
return node.dispatchEvent(event); // false if event was cancelled
}
, webkit_req_fs = view.webkitRequestFileSystem
, req_fs = view.requestFileSystem || webkit_req_fs || view.mozRequestFileSystem
, throw_outside = function (ex) {
(view.setImmediate || view.setTimeout)(function () {
throw ex;
}, 0);
}
, force_saveable_type = "application/octet-stream"
, fs_min_size = 0
, deletion_queue = []
, process_deletion_queue = function () {
var i = deletion_queue.length;
while (i--) {
var file = deletion_queue[i];
if (typeof file === "string") { // file is an object URL
URL.revokeObjectURL(file);
} else { // file is a File
file.remove();
}
}
deletion_queue.length = 0; // clear queue
}
, dispatch = function (filesaver, event_types, event) {
event_types = [].concat(event_types);
var i = event_types.length;
while (i--) {
var listener = filesaver["on" + event_types[i]];
if (typeof listener === "function") {
try {
listener.call(filesaver, event || filesaver);
} catch (ex) {
throw_outside(ex);
}
}
}
}
, FileSaver = function (blob, name) {
// First try a.download, then web filesystem, then object URLs
var
filesaver = this
, type = blob.type
, blob_changed = false
, object_url
, target_view
, get_object_url = function (blob) {
var object_url = get_URL().createObjectURL(blob);
deletion_queue.push(object_url);
return object_url;
}
, dispatch_all = function () {
dispatch(filesaver, "writestart progress write writeend".split(" "));
}
// on any filesys errors revert to saving with object URLs
, fs_error = function () {
// don't create more object URLs than needed
if (blob_changed || !object_url) {
object_url = get_object_url(blob);
}
target_view.location.href = object_url;
filesaver.readyState = filesaver.DONE;
dispatch_all();
}
, abortable = function (func) {
return function () {
if (filesaver.readyState !== filesaver.DONE) {
return func.apply(this, arguments);
}
};
}
, create_if_not_found = {create:true, exclusive:false}
, slice
;
filesaver.readyState = filesaver.INIT;
if (!name) {
name = "download";
}
if (can_use_save_link) {
object_url = get_object_url(blob);
save_link.href = object_url;
save_link.download = name;
if (click(save_link)) {
filesaver.readyState = filesaver.DONE;
dispatch_all();
return;
}
}
// Object and web filesystem URLs have a problem saving in Google Chrome when
// viewed in a tab, so I force save with application/octet-stream
// http://code.google.com/p/chromium/issues/detail?id=91158
if (view.chrome && type && type !== force_saveable_type) {
slice = blob.slice || blob.webkitSlice;
blob = slice.call(blob, 0, blob.size, force_saveable_type);
blob_changed = true;
}
// Since I can't be sure that the guessed media type will trigger a download
// in WebKit, I append .download to the filename.
// https://bugs.webkit.org/show_bug.cgi?id=65440
//if (webkit_req_fs && name !== "download") {
// name += ".download";
//}
if (type === force_saveable_type || webkit_req_fs) {
target_view = view;
} else {
target_view = view.open();
}
if (!req_fs) {
fs_error();
return;
}
fs_min_size += blob.size;
req_fs(view.TEMPORARY, fs_min_size, abortable(function (fs) {
fs.root.getDirectory("saved", create_if_not_found, abortable(function (dir) {
var save = function () {
dir.getFile(name, create_if_not_found, abortable(function (file) {
file.createWriter(abortable(function (writer) {
writer.onwriteend = function (event) {
target_view.location.href = file.toURL();
deletion_queue.push(file);
filesaver.readyState = filesaver.DONE;
dispatch(filesaver, "writeend", event);
};
writer.onerror = function () {
var error = writer.error;
if (error.code !== error.ABORT_ERR) {
fs_error();
}
};
"writestart progress write abort".split(" ").forEach(function (event) {
writer["on" + event] = filesaver["on" + event];
});
writer.write(blob);
filesaver.abort = function () {
writer.abort();
filesaver.readyState = filesaver.DONE;
};
filesaver.readyState = filesaver.WRITING;
}), fs_error);
}), fs_error);
};
dir.getFile(name, {create:false}, abortable(function (file) {
// delete file if it already exists
file.remove();
save();
}), abortable(function (ex) {
if (ex.code === ex.NOT_FOUND_ERR) {
save();
} else {
fs_error();
}
}));
}), fs_error);
}), fs_error);
}
, FS_proto = FileSaver.prototype
, saveAs = function (blob, name) {
return new FileSaver(blob, name);
}
;
FS_proto.abort = function () {
var filesaver = this;
filesaver.readyState = filesaver.DONE;
dispatch(filesaver, "abort");
};
FS_proto.readyState = FS_proto.INIT = 0;
FS_proto.WRITING = 1;
FS_proto.DONE = 2;
FS_proto.error =
FS_proto.onwritestart =
FS_proto.onprogress =
FS_proto.onwrite =
FS_proto.onabort =
FS_proto.onerror =
FS_proto.onwriteend =
null;
view.addEventListener("unload", process_deletion_queue, false);
return saveAs;
}(self));
| FrankLiu/nscrapy | plugin/SwitchySharp/assets/libs/FileSaver.js | JavaScript | apache-2.0 | 9,450 |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.model.v1_3;
import java.util.ArrayList;
import org.kie.dmn.model.api.InformationItem;
import org.kie.dmn.model.api.List;
import org.kie.dmn.model.api.Relation;
public class TRelation extends TExpression implements Relation {
protected java.util.List<InformationItem> column;
protected java.util.List<List> row;
@Override
public java.util.List<InformationItem> getColumn() {
if (column == null) {
column = new ArrayList<InformationItem>();
}
return this.column;
}
@Override
public java.util.List<List> getRow() {
if (row == null) {
row = new ArrayList<List>();
}
return this.row;
}
}
| droolsjbpm/drools | kie-dmn/kie-dmn-model/src/main/java/org/kie/dmn/model/v1_3/TRelation.java | Java | apache-2.0 | 1,338 |
// +build linux
package main
import (
"os"
"path/filepath"
"strings"
"syscall"
"github.com/docker/docker/integration-cli/checker"
"github.com/docker/docker/pkg/mount"
icmd "github.com/docker/docker/pkg/testutil/cmd"
"github.com/go-check/check"
)
// TestDaemonRestartWithPluginEnabled tests state restore for an enabled plugin
func (s *DockerDaemonSuite) TestDaemonRestartWithPluginEnabled(c *check.C) {
testRequires(c, IsAmd64, Network)
s.d.Start(c)
if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
defer func() {
if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
c.Fatalf("Could not disable plugin: %v %s", err, out)
}
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
}()
s.d.Restart(c)
out, err := s.d.Cmd("plugin", "ls")
if err != nil {
c.Fatalf("Could not list plugins: %v %s", err, out)
}
c.Assert(out, checker.Contains, pName)
c.Assert(out, checker.Contains, "true")
}
// TestDaemonRestartWithPluginDisabled tests state restore for a disabled plugin
func (s *DockerDaemonSuite) TestDaemonRestartWithPluginDisabled(c *check.C) {
testRequires(c, IsAmd64, Network)
s.d.Start(c)
if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName, "--disable"); err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
defer func() {
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
}()
s.d.Restart(c)
out, err := s.d.Cmd("plugin", "ls")
if err != nil {
c.Fatalf("Could not list plugins: %v %s", err, out)
}
c.Assert(out, checker.Contains, pName)
c.Assert(out, checker.Contains, "false")
}
// TestDaemonKillLiveRestoreWithPlugins SIGKILLs daemon started with --live-restore.
// Plugins should continue to run.
func (s *DockerDaemonSuite) TestDaemonKillLiveRestoreWithPlugins(c *check.C) {
testRequires(c, IsAmd64, Network)
s.d.Start(c, "--live-restore")
if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
defer func() {
s.d.Restart(c, "--live-restore")
if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
c.Fatalf("Could not disable plugin: %v %s", err, out)
}
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
}()
if err := s.d.Kill(); err != nil {
c.Fatalf("Could not kill daemon: %v", err)
}
icmd.RunCommand("pgrep", "-f", pluginProcessName).Assert(c, icmd.Success)
}
// TestDaemonShutdownLiveRestoreWithPlugins SIGTERMs daemon started with --live-restore.
// Plugins should continue to run.
func (s *DockerDaemonSuite) TestDaemonShutdownLiveRestoreWithPlugins(c *check.C) {
testRequires(c, IsAmd64, Network)
s.d.Start(c, "--live-restore")
if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
defer func() {
s.d.Restart(c, "--live-restore")
if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
c.Fatalf("Could not disable plugin: %v %s", err, out)
}
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
}()
if err := s.d.Interrupt(); err != nil {
c.Fatalf("Could not kill daemon: %v", err)
}
icmd.RunCommand("pgrep", "-f", pluginProcessName).Assert(c, icmd.Success)
}
// TestDaemonShutdownWithPlugins shuts down running plugins.
func (s *DockerDaemonSuite) TestDaemonShutdownWithPlugins(c *check.C) {
testRequires(c, IsAmd64, Network, SameHostDaemon)
s.d.Start(c)
if out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName); err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
defer func() {
s.d.Restart(c)
if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
c.Fatalf("Could not disable plugin: %v %s", err, out)
}
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
}()
if err := s.d.Interrupt(); err != nil {
c.Fatalf("Could not kill daemon: %v", err)
}
for {
if err := syscall.Kill(s.d.Pid(), 0); err == syscall.ESRCH {
break
}
}
icmd.RunCommand("pgrep", "-f", pluginProcessName).Assert(c, icmd.Expected{
ExitCode: 1,
Error: "exit status 1",
})
s.d.Start(c, "--live-restore")
icmd.RunCommand("pgrep", "-f", pluginProcessName).Assert(c, icmd.Success)
}
// TestVolumePlugin tests volume creation using a plugin.
func (s *DockerDaemonSuite) TestVolumePlugin(c *check.C) {
testRequires(c, IsAmd64, Network)
volName := "plugin-volume"
destDir := "/tmp/data/"
destFile := "foo"
s.d.Start(c)
out, err := s.d.Cmd("plugin", "install", pName, "--grant-all-permissions")
if err != nil {
c.Fatalf("Could not install plugin: %v %s", err, out)
}
pluginID, err := s.d.Cmd("plugin", "inspect", "-f", "{{.Id}}", pName)
pluginID = strings.TrimSpace(pluginID)
if err != nil {
c.Fatalf("Could not retrieve plugin ID: %v %s", err, pluginID)
}
mountpointPrefix := filepath.Join(s.d.RootDir(), "plugins", pluginID, "rootfs")
defer func() {
if out, err := s.d.Cmd("plugin", "disable", pName); err != nil {
c.Fatalf("Could not disable plugin: %v %s", err, out)
}
if out, err := s.d.Cmd("plugin", "remove", pName); err != nil {
c.Fatalf("Could not remove plugin: %v %s", err, out)
}
exists, err := existsMountpointWithPrefix(mountpointPrefix)
c.Assert(err, checker.IsNil)
c.Assert(exists, checker.Equals, false)
}()
out, err = s.d.Cmd("volume", "create", "-d", pName, volName)
if err != nil {
c.Fatalf("Could not create volume: %v %s", err, out)
}
defer func() {
if out, err := s.d.Cmd("volume", "remove", volName); err != nil {
c.Fatalf("Could not remove volume: %v %s", err, out)
}
}()
out, err = s.d.Cmd("volume", "ls")
if err != nil {
c.Fatalf("Could not list volume: %v %s", err, out)
}
c.Assert(out, checker.Contains, volName)
c.Assert(out, checker.Contains, pName)
mountPoint, err := s.d.Cmd("volume", "inspect", volName, "--format", "{{.Mountpoint}}")
if err != nil {
c.Fatalf("Could not inspect volume: %v %s", err, mountPoint)
}
mountPoint = strings.TrimSpace(mountPoint)
out, err = s.d.Cmd("run", "--rm", "-v", volName+":"+destDir, "busybox", "touch", destDir+destFile)
c.Assert(err, checker.IsNil, check.Commentf(out))
path := filepath.Join(s.d.RootDir(), "plugins", pluginID, "rootfs", mountPoint, destFile)
_, err = os.Lstat(path)
c.Assert(err, checker.IsNil)
exists, err := existsMountpointWithPrefix(mountpointPrefix)
c.Assert(err, checker.IsNil)
c.Assert(exists, checker.Equals, true)
}
func (s *DockerDaemonSuite) TestGraphdriverPlugin(c *check.C) {
testRequires(c, Network, IsAmd64, DaemonIsLinux, overlay2Supported, ExperimentalDaemon)
s.d.Start(c)
// install the plugin
plugin := "cpuguy83/docker-overlay2-graphdriver-plugin"
out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", plugin)
c.Assert(err, checker.IsNil, check.Commentf(out))
// restart the daemon with the plugin set as the storage driver
s.d.Restart(c, "-s", plugin, "--storage-opt", "overlay2.override_kernel_check=1")
// run a container
out, err = s.d.Cmd("run", "--rm", "busybox", "true") // this will pull busybox using the plugin
c.Assert(err, checker.IsNil, check.Commentf(out))
}
func (s *DockerDaemonSuite) TestPluginVolumeRemoveOnRestart(c *check.C) {
testRequires(c, DaemonIsLinux, Network, IsAmd64)
s.d.Start(c, "--live-restore=true")
out, err := s.d.Cmd("plugin", "install", "--grant-all-permissions", pName)
c.Assert(err, checker.IsNil, check.Commentf(out))
c.Assert(strings.TrimSpace(out), checker.Contains, pName)
out, err = s.d.Cmd("volume", "create", "--driver", pName, "test")
c.Assert(err, checker.IsNil, check.Commentf(out))
s.d.Restart(c, "--live-restore=true")
out, err = s.d.Cmd("plugin", "disable", pName)
c.Assert(err, checker.NotNil, check.Commentf(out))
c.Assert(out, checker.Contains, "in use")
out, err = s.d.Cmd("volume", "rm", "test")
c.Assert(err, checker.IsNil, check.Commentf(out))
out, err = s.d.Cmd("plugin", "disable", pName)
c.Assert(err, checker.IsNil, check.Commentf(out))
out, err = s.d.Cmd("plugin", "rm", pName)
c.Assert(err, checker.IsNil, check.Commentf(out))
}
func existsMountpointWithPrefix(mountpointPrefix string) (bool, error) {
mounts, err := mount.GetMounts()
if err != nil {
return false, err
}
for _, mnt := range mounts {
if strings.HasPrefix(mnt.Mountpoint, mountpointPrefix) {
return true, nil
}
}
return false, nil
}
| jgsqware/clairctl | vendor/github.com/docker/docker/integration-cli/docker_cli_daemon_plugins_test.go | GO | apache-2.0 | 8,839 |
from lib.mmonit import MmonitBaseAction
class MmonitGetUptimeHost(MmonitBaseAction):
def run(self, host_id, uptime_range=0, datefrom=0, dateto=0):
self.login()
if datefrom != 0 and uptime_range != 12:
raise Exception("If datefrom is set, range should be 12")
data = {"id": host_id, "range": uptime_range, "datefrom": datefrom, "dateto": dateto}
req = self.session.post("{}/reports/uptime/get".format(self.url), data=data)
try:
return req.json()
except Exception:
raise
finally:
self.logout()
| meirwah/st2contrib | packs/mmonit/actions/get_uptime_host.py | Python | apache-2.0 | 604 |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.rest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.jasig.portal.fragment.subscribe.IUserFragmentSubscription;
import org.jasig.portal.fragment.subscribe.dao.IUserFragmentSubscriptionDao;
import org.jasig.portal.layout.dlm.ConfigurationLoader;
import org.jasig.portal.layout.dlm.Evaluator;
import org.jasig.portal.layout.dlm.FragmentDefinition;
import org.jasig.portal.layout.dlm.providers.SubscribedTabEvaluatorFactory;
import org.jasig.portal.security.IAuthorizationPrincipal;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.services.AuthorizationService;
import org.jasig.portal.user.IUserInstance;
import org.jasig.portal.user.IUserInstanceManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.MessageSource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.support.RequestContextUtils;
/**
* Returns JSON representing the list of subscribable fragments for the
* currently-authenticated user.
*
* @author Mary Hunt
* @author Jen Bourey
* @version $Revision$ $Date$
*/
@Controller
public class SubscribableTabsRESTController {
@Autowired
@Qualifier("userInstanceManager")
private IUserInstanceManager userInstanceManager;
public void setUserInstanceManager(IUserInstanceManager userInstanceManager) {
this.userInstanceManager = userInstanceManager;
}
private IUserFragmentSubscriptionDao userFragmentSubscriptionDao;
@Autowired(required = true)
public void setUserFragmentSubscriptionDao(IUserFragmentSubscriptionDao userFragmentSubscriptionDao) {
this.userFragmentSubscriptionDao = userFragmentSubscriptionDao;
}
@Autowired
@Qualifier("dlmConfigurationLoader")
private ConfigurationLoader configurationLoader;
@Autowired
private MessageSource messageSource;
@RequestMapping(value="/subscribableTabs.json", method = RequestMethod.GET)
public ModelAndView getSubscriptionList(HttpServletRequest request) {
Map<String, Object> model = new HashMap<String, Object>();
/**
* Retrieve the IPerson and IAuthorizationPrincipal for the currently
* authenticated user
*/
IUserInstance userInstance = userInstanceManager.getUserInstance(request);
IPerson person = userInstance.getPerson();
AuthorizationService authService = AuthorizationService.instance();
IAuthorizationPrincipal principal = authService.newPrincipal(person.getUserName(), IPerson.class);
/**
* Build a collection of owner IDs for the fragments to which the
* authenticated user is subscribed
*/
// get the list of current subscriptions for this user
List<IUserFragmentSubscription> subscriptions = userFragmentSubscriptionDao
.getUserFragmentInfo(person);
// transform it into the set of owners
Set<String> subscribedOwners = new HashSet<String>();
for (IUserFragmentSubscription subscription : subscriptions){
if (subscription.isActive()) {
subscribedOwners.add(subscription.getFragmentOwner());
}
}
/**
* Iterate through the list of all currently defined DLM fragments and
* determine if the current user has permissions to subscribe to each.
* Any subscribable fragments will be transformed into a JSON-friendly
* bean and added to the model.
*/
final List<SubscribableFragment> jsonFragments = new ArrayList<SubscribableFragment>();
// get the list of fragment definitions from DLM
final List<FragmentDefinition> fragmentDefinitions = configurationLoader.getFragments();
final Locale locale = RequestContextUtils.getLocale(request);
// iterate through the list
for (FragmentDefinition fragmentDefinition : fragmentDefinitions) {
if (isSubscribable(fragmentDefinition, principal)) {
String owner = fragmentDefinition.getOwnerId();
// check to see if the current user has permission to subscribe to
// this fragment
if (principal.hasPermission("UP_FRAGMENT", "FRAGMENT_SUBSCRIBE", owner)) {
// create a JSON fragment bean and add it to our list
boolean subscribed = subscribedOwners.contains(owner);
final String name = getMessage("fragment." + owner + ".name", fragmentDefinition.getName(), locale);
final String description = getMessage("fragment." + owner + ".description", fragmentDefinition.getDescription(), locale);
SubscribableFragment jsonFragment = new SubscribableFragment(name, description, owner, subscribed);
jsonFragments.add(jsonFragment);
}
}
}
model.put("fragments", jsonFragments);
return new ModelAndView("json", model);
}
protected boolean isSubscribable(FragmentDefinition definition, IAuthorizationPrincipal principal) {
String owner = definition.getOwnerId();
for (Evaluator evaluator : definition.getEvaluators()) {
if (evaluator.getFactoryClass().equals(SubscribedTabEvaluatorFactory.class)) {
return principal.hasPermission("UP_FRAGMENT", "FRAGMENT_SUBSCRIBE", owner);
}
}
return false;
}
protected String getMessage(String key, String defaultName, Locale locale) {
return messageSource.getMessage(key, new Object[] {}, defaultName, locale);
}
/**
* Convenience class for representing fragment information in JSON
*/
public class SubscribableFragment {
private String name = null;
private String ownerID = null;
private String description;
private boolean subscribed;
public SubscribableFragment(String name, String description, String ownerId, boolean subscribed) {
this.name = name;
this.description = description;
this.ownerID = ownerId;
this.subscribed = subscribed;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getOwnerID() {
return ownerID;
}
public void setOwnerID(String ownerID) {
this.ownerID = ownerID;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isSubscribed() {
return subscribed;
}
public void setSubscribed(boolean subscribed) {
this.subscribed = subscribed;
}
}
}
| ASU-Capstone/uPortal-Forked | uportal-war/src/main/java/org/jasig/portal/rest/SubscribableTabsRESTController.java | Java | apache-2.0 | 8,245 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.text;
import com.intellij.openapi.util.text.CharSequenceWithStringHash;
import com.intellij.openapi.util.text.Strings;
import org.jetbrains.annotations.NotNull;
public class CharArrayCharSequence implements CharSequenceBackedByArray, CharSequenceWithStringHash {
protected final char[] myChars;
protected final int myStart;
protected final int myEnd;
public CharArrayCharSequence(char @NotNull ... chars) {
this(chars, 0, chars.length);
}
public CharArrayCharSequence(char @NotNull [] chars, int start, int end) {
if (start < 0 || end > chars.length || start > end) {
throw new IndexOutOfBoundsException("chars.length:" + chars.length + ", start:" + start + ", end:" + end);
}
myChars = chars;
myStart = start;
myEnd = end;
}
@Override
public final int length() {
return myEnd - myStart;
}
@Override
public final char charAt(int index) {
return myChars[index + myStart];
}
@NotNull
@Override
public CharSequence subSequence(int start, int end) {
return start == 0 && end == length() ? this : new CharArrayCharSequence(myChars, myStart + start, myStart + end);
}
@Override
@NotNull
public String toString() {
return new String(myChars, myStart, myEnd - myStart); //TODO StringFactory
}
@Override
public char @NotNull [] getChars() {
if (myStart == 0) return myChars;
char[] chars = new char[length()];
getChars(chars, 0);
return chars;
}
@Override
public void getChars(char @NotNull [] dst, int dstOffset) {
System.arraycopy(myChars, myStart, dst, dstOffset, length());
}
@Override
public boolean equals(Object anObject) {
if (this == anObject) {
return true;
}
if (anObject == null || getClass() != anObject.getClass() || length() != ((CharSequence)anObject).length()) {
return false;
}
return CharArrayUtil.regionMatches(myChars, myStart, myEnd, (CharSequence)anObject);
}
/**
* See {@link java.io.Reader#read(char[], int, int)};
*/
public int readCharsTo(int start, char[] cbuf, int off, int len) {
final int readChars = Math.min(len, length() - start);
if (readChars <= 0) return -1;
System.arraycopy(myChars, myStart + start, cbuf, off, readChars);
return readChars;
}
private transient int hash;
@Override
public int hashCode() {
int h = hash;
if (h == 0) {
hash = h = Strings.stringHashCode(myChars, myStart, myEnd);
}
return h;
}
}
| siosio/intellij-community | platform/util/strings/src/com/intellij/util/text/CharArrayCharSequence.java | Java | apache-2.0 | 2,636 |
module Api
class TenantsController < BaseController
INVALID_TENANT_ATTRS = %w(id href ancestry).freeze
include Subcollections::Tags
include Subcollections::Quotas
def create_resource(_type, _id, data)
bad_attrs = data_includes_invalid_attrs(data)
if bad_attrs.present?
raise BadRequestError,
"Attribute(s) #{bad_attrs} should not be specified for creating a new tenant resource"
end
parse_set_parent(data)
tenant = Tenant.create(data)
if tenant.invalid?
raise BadRequestError, "Failed to add a new tenant resource - #{tenant.errors.full_messages.join(', ')}"
end
tenant
end
def edit_resource(type, id, data)
bad_attrs = data_includes_invalid_attrs(data)
if bad_attrs.present?
raise BadRequestError, "Attributes #{bad_attrs} should not be specified for updating a tenant resource"
end
parse_set_parent(data)
super
end
private
def parse_set_parent(data)
parent = parse_fetch_tenant(data.delete("parent"))
data["parent"] = parent if parent
end
def data_includes_invalid_attrs(data)
data.keys.select { |k| INVALID_TENANT_ATTRS.include?(k) }.compact.join(", ") if data
end
end
end
| fbladilo/manageiq | app/controllers/api/tenants_controller.rb | Ruby | apache-2.0 | 1,268 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.source.xml;
import com.intellij.lang.html.HTMLLanguage;
import com.intellij.lang.xhtml.XHTMLLanguage;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.html.ScriptSupportUtil;
import com.intellij.psi.scope.PsiScopeProcessor;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.ProjectScope;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import org.jetbrains.annotations.NotNull;
public class XmlFileImpl extends PsiFileImpl implements XmlFile {
public XmlFileImpl(FileViewProvider viewProvider, IElementType elementType) {
super(elementType, elementType, viewProvider);
}
@Override
public XmlDocument getDocument() {
PsiElement child = getFirstChild();
while (child != null) {
if (child instanceof XmlDocument) return (XmlDocument)child;
child = child.getNextSibling();
}
return null;
}
@Override
public XmlTag getRootTag() {
XmlDocument document = getDocument();
return document == null ? null : document.getRootTag();
}
@Override
public boolean processElements(PsiElementProcessor processor, PsiElement place){
final XmlDocument document = getDocument();
return document == null || document.processElements(processor, place);
}
@Override
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof XmlElementVisitor) {
((XmlElementVisitor)visitor).visitXmlFile(this);
}
else {
visitor.visitFile(this);
}
}
@Override
public String toString() {
return "XmlFile:" + getName();
}
private FileType myType = null;
@Override
@NotNull
public FileType getFileType() {
if (myType == null) {
myType = getLanguage().getAssociatedFileType();
if (myType == null) {
VirtualFile virtualFile = getOriginalFile().getVirtualFile();
myType = virtualFile == null ? FileTypeRegistry.getInstance().getFileTypeByFileName(getName()) : virtualFile.getFileType();
}
}
return myType;
}
@Override
public void clearCaches() {
super.clearCaches();
if (isWebFileType()) {
ScriptSupportUtil.clearCaches(this);
}
}
private boolean isWebFileType() {
return getLanguage() == XHTMLLanguage.INSTANCE || getLanguage() == HTMLLanguage.INSTANCE;
}
@Override
public boolean processDeclarations(@NotNull PsiScopeProcessor processor, @NotNull ResolveState state, PsiElement lastParent, @NotNull PsiElement place) {
return super.processDeclarations(processor, state, lastParent, place) &&
(!isWebFileType() || ScriptSupportUtil.processDeclarations(this, processor, state, lastParent, place));
}
@NotNull
@Override
public GlobalSearchScope getFileResolveScope() {
return ProjectScope.getAllScope(getProject());
}
@Override
public boolean ignoreReferencedElementAccessibility() {
return true;
}
}
| smmribeiro/intellij-community | xml/xml-psi-impl/src/com/intellij/psi/impl/source/xml/XmlFileImpl.java | Java | apache-2.0 | 3,408 |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.core.processors;
public class ProcessingEntity {
private final String className;
private final String id;
public ProcessingEntity(final String className,
final String id) {
this.className = className;
this.id = id;
}
public String getClassName() {
return className;
}
public String getId() {
return id;
}
}
| romartin/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-core/kie-wb-common-stunner-processors/src/main/java/org/kie/workbench/common/stunner/core/processors/ProcessingEntity.java | Java | apache-2.0 | 1,066 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.history.integration;
import com.intellij.history.LocalHistoryAction;
import com.intellij.openapi.util.NlsContexts;
public class LocalHistoryActionImpl implements LocalHistoryAction {
private final @NlsContexts.Label String myName;
private final LocalHistoryEventDispatcher myDispatcher;
public LocalHistoryActionImpl(LocalHistoryEventDispatcher l, @NlsContexts.Label String name) {
myName = name;
myDispatcher = l;
}
public void start() {
myDispatcher.startAction();
}
@Override
public void finish() {
myDispatcher.finishAction(myName);
}
}
| siosio/intellij-community | platform/lvcs-impl/src/com/intellij/history/integration/LocalHistoryActionImpl.java | Java | apache-2.0 | 743 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.lookup;
import com.google.common.collect.ImmutableMap;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.api.core.ClassNamesResourceConfig;
import com.sun.jersey.spi.container.servlet.WebComponent;
import com.sun.jersey.spi.inject.SingletonTypeInjectableProvider;
import com.sun.jersey.test.framework.JerseyTest;
import com.sun.jersey.test.framework.WebAppDescriptor;
import com.sun.jersey.test.framework.spi.container.TestContainerFactory;
import com.sun.jersey.test.framework.spi.container.grizzly2.GrizzlyTestContainerFactory;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import javax.ws.rs.core.Context;
import javax.ws.rs.ext.Provider;
public class LookupIntrospectionResourceImplTest extends JerseyTest
{
static LookupReferencesManager lookupReferencesManager = EasyMock.createMock(LookupReferencesManager.class);
@Before
public void setUp() throws Exception
{
super.setUp();
EasyMock.reset(lookupReferencesManager);
LookupExtractorFactory lookupExtractorFactory1 = new MapLookupExtractorFactory(ImmutableMap.of(
"key",
"value",
"key2",
"value2"
), false);
EasyMock.expect(lookupReferencesManager.get("lookupId1")).andReturn(lookupExtractorFactory1).anyTimes();
EasyMock.replay(lookupReferencesManager);
}
@Provider
public static class MockTodoServiceProvider extends
SingletonTypeInjectableProvider<Context, LookupReferencesManager>
{
public MockTodoServiceProvider()
{
super(LookupReferencesManager.class, lookupReferencesManager);
}
}
public LookupIntrospectionResourceImplTest()
{
super(new WebAppDescriptor.Builder().initParam(
WebComponent.RESOURCE_CONFIG_CLASS,
ClassNamesResourceConfig.class.getName()
)
.initParam(
ClassNamesResourceConfig.PROPERTY_CLASSNAMES,
LookupIntrospectionResource.class.getName()
+ ';'
+ MockTodoServiceProvider.class.getName()
+ ';'
+ LookupIntrospectHandler.class.getName()
)
.build());
}
@Override
protected TestContainerFactory getTestContainerFactory()
{
return new GrizzlyTestContainerFactory();
}
@Test
public void testGetKey()
{
WebResource r = resource().path("/druid/v1/lookups/introspect/lookupId1/keys");
String s = r.get(String.class);
Assert.assertEquals("[key, key2]", s);
}
@Test
public void testGetValue()
{
WebResource r = resource().path("/druid/v1/lookups/introspect/lookupId1/values");
String s = r.get(String.class);
Assert.assertEquals("[value, value2]", s);
}
@Test
public void testGetMap()
{
WebResource r = resource().path("/druid/v1/lookups/introspect/lookupId1/");
String s = r.get(String.class);
Assert.assertEquals("{\"key\":\"value\",\"key2\":\"value2\"}", s);
}
}
| tubemogul/druid | server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceImplTest.java | Java | apache-2.0 | 4,150 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.documentation.docstrings;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.jetbrains.python.psi.PyIndentUtil;
import com.jetbrains.python.toolbox.Substring;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Mikhail Golubev
*/
public abstract class DocStringUpdater<T extends DocStringLineParser> {
protected final T myOriginalDocString;
private final StringBuilder myBuilder;
private final List<Modification> myUpdates = new ArrayList<>();
protected final String myMinContentIndent;
public DocStringUpdater(@NotNull T docString, @NotNull String minContentIndent) {
myBuilder = new StringBuilder(docString.getDocStringContent().getSuperString());
myOriginalDocString = docString;
myMinContentIndent = minContentIndent;
}
protected final void replace(@NotNull TextRange range, @NotNull String text) {
myUpdates.add(new Modification(range, text));
}
protected final void replace(int startOffset, int endOffset, @NotNull String text) {
replace(new TextRange(startOffset, endOffset), text);
}
protected final void insert(int offset, @NotNull String text) {
replace(offset, offset, text);
}
protected final void insertAfterLine(int lineNumber, @NotNull String text) {
final Substring line = myOriginalDocString.getLines().get(lineNumber);
insert(line.getEndOffset(), '\n' + text);
}
protected final void remove(int startOffset, int endOffset) {
replace(startOffset, endOffset, "");
}
/**
* @param startLine inclusive
* @param endLine exclusive
*/
protected final void removeLines(int startLine, int endLine) {
final List<Substring> lines = myOriginalDocString.getLines();
final int startOffset = lines.get(startLine).getStartOffset();
final int endOffset = endLine < lines.size() ? lines.get(endLine).getStartOffset() : lines.get(endLine - 1).getEndOffset();
remove(startOffset, endOffset);
}
protected final void removeLinesAndSpacesAfter(int startLine, int endLine) {
removeLines(startLine, skipEmptyLines(endLine));
}
private int skipEmptyLines(int startLine) {
return Math.min(myOriginalDocString.consumeEmptyLines(startLine), myOriginalDocString.getLineCount() - 1);
}
protected final void removeLine(int line) {
removeLines(line, line + 1);
}
protected final void insertBeforeLine(int lineNumber, @NotNull String text) {
final Substring line = myOriginalDocString.getLines().get(lineNumber);
insert(line.getStartOffset(), text + '\n');
}
@NotNull
public final String getDocStringText() {
beforeApplyingModifications();
// Move closing quotes to the next line, if new lines are going to be inserted
if (myOriginalDocString.getLineCount() == 1 && !myUpdates.isEmpty()) {
insertAfterLine(0, myMinContentIndent);
}
// If several updates insert in one place (e.g. new field), insert them in backward order,
// so the first added is placed above
Collections.reverse(myUpdates);
myUpdates.sort(Collections.reverseOrder());
for (final Modification update : myUpdates) {
final TextRange updateRange = update.range;
if (updateRange.getStartOffset() == updateRange.getEndOffset()) {
myBuilder.insert(updateRange.getStartOffset(), update.text);
}
else {
myBuilder.replace(updateRange.getStartOffset(), updateRange.getEndOffset(), update.text);
}
}
return myBuilder.toString();
}
protected void beforeApplyingModifications() {
}
@NotNull
public T getOriginalDocString() {
return myOriginalDocString;
}
@NotNull
protected String getLineIndent(int lineNum) {
final String lastLineIndent = myOriginalDocString.getLineIndent(lineNum);
if (PyIndentUtil.getLineIndentSize(lastLineIndent) < PyIndentUtil.getLineIndentSize(myMinContentIndent)) {
return myMinContentIndent;
}
return lastLineIndent;
}
protected int getLineIndentSize(int lineNum) {
return PyIndentUtil.getLineIndentSize(getLineIndent(lineNum));
}
protected int findLastNonEmptyLine() {
for (int i = myOriginalDocString.getLineCount() - 1; i >= 0; i--) {
if (!StringUtil.isEmptyOrSpaces(myOriginalDocString.getLine(i))) {
return i;
}
}
return 0;
}
public abstract void addParameter(@NotNull String name, @Nullable String type);
public abstract void addReturnValue(@Nullable String type);
public abstract void removeParameter(@NotNull String name);
private static class Modification implements Comparable<Modification> {
@NotNull final TextRange range;
@NotNull final String text;
Modification(@NotNull TextRange range, @NotNull String newText) {
this.range = range;
this.text = newText;
}
@Override
public int compareTo(Modification o) {
return range.getStartOffset() - o.range.getStartOffset();
}
}
}
| jwren/intellij-community | python/python-psi-impl/src/com/jetbrains/python/documentation/docstrings/DocStringUpdater.java | Java | apache-2.0 | 5,669 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.tasks.jira.jql;
import com.intellij.lang.ASTNode;
import com.intellij.lang.ParserDefinition;
import com.intellij.lang.PsiParser;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.tree.IFileElementType;
import com.intellij.psi.tree.TokenSet;
import org.jetbrains.annotations.NotNull;
/**
* @author Mikhail Golubev
*/
public class JqlParserDefinition implements ParserDefinition {
private static final Logger LOG = Logger.getInstance(JqlParserDefinition.class);
@NotNull
@Override
public Lexer createLexer(Project project) {
return new JqlLexer();
}
@Override
public @NotNull PsiParser createParser(Project project) {
return new JqlParser();
}
@Override
public @NotNull IFileElementType getFileNodeType() {
return JqlElementTypes.FILE;
}
@NotNull
@Override
public TokenSet getWhitespaceTokens() {
return JqlTokenTypes.WHITESPACES;
}
@NotNull
@Override
public TokenSet getCommentTokens() {
return TokenSet.EMPTY;
}
@NotNull
@Override
public TokenSet getStringLiteralElements() {
return TokenSet.create(JqlTokenTypes.STRING_LITERAL);
}
@NotNull
@Override
public PsiElement createElement(ASTNode node) {
return JqlElementTypes.Factory.createElement(node);
}
@Override
public @NotNull PsiFile createFile(@NotNull FileViewProvider viewProvider) {
return new JqlFile(viewProvider);
}
@Override
public @NotNull SpaceRequirements spaceExistenceTypeBetweenTokens(ASTNode left, ASTNode right) {
return SpaceRequirements.MAY;
}
}
| smmribeiro/intellij-community | plugins/tasks/tasks-core/jira/src/com/intellij/tasks/jira/jql/JqlParserDefinition.java | Java | apache-2.0 | 1,904 |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.scope;
import com.intellij.openapi.util.Key;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.Map;
/**
* Base processor which stores hints in a map
*/
public abstract class ProcessorWithHints implements PsiScopeProcessor {
private final Map<Key<?>, Object> myHints = new HashMap<>();
protected final <H> void hint(@NotNull Key<H> key, @NotNull H hint) {
myHints.put(key, hint);
}
@Nullable
@Override
public <T> T getHint(@NotNull Key<T> hintKey) {
return (T)myHints.get(hintKey);
}
}
| siosio/intellij-community | platform/core-impl/src/com/intellij/psi/scope/ProcessorWithHints.java | Java | apache-2.0 | 758 |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.devkit.threadingModelHelper;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.incremental.BuilderService;
import org.jetbrains.jps.incremental.ModuleLevelBuilder;
import java.util.Collections;
import java.util.List;
public class TMHBuilderService extends BuilderService {
@NotNull
@Override
public List<? extends ModuleLevelBuilder> createModuleLevelBuilders() {
return Collections.singletonList(new TMHInstrumentingBuilder());
}
}
| jwren/intellij-community | plugins/devkit/jps-plugin/src/org/jetbrains/jps/devkit/threadingModelHelper/TMHBuilderService.java | Java | apache-2.0 | 632 |
#!/usr/bin/env python
#
# Copyright (c) 2016 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Yun, Liu<yunx.liu@intel.com>
import os
import sys
import stat
import shutil
import urllib2
import subprocess
import time
import json
SCRIPT_PATH = os.path.realpath(__file__)
ConstPath = os.path.dirname(SCRIPT_PATH)
DEFAULT_CMD_TIMEOUT = 600
def setUp():
global device_x86, device_arm, crosswalkVersion, ARCH_ARM, ARCH_X86, PLATFORMS, HOST_PREFIX, SHELL_FLAG, MODE, ANDROID_MODE, BIT, TARGETS, apptools, apktype
ARCH_ARM = ""
ARCH_X86 = ""
BIT = "32"
device_x86 = ""
device_arm = ""
TARGETS = ""
host = open(ConstPath + "/platforms.txt", 'r')
PLATFORMS = host.read().strip("\n\t")
if PLATFORMS != "windows":
HOST_PREFIX = ""
SHELL_FLAG = "True"
else:
HOST_PREFIX = "node "
SHELL_FLAG = "False"
host.close()
if HOST_PREFIX != "":
apptools = "%crosswalk-pkg%"
else:
apptools = "crosswalk-pkg"
if os.system(HOST_PREFIX + apptools) != 0:
print "crosswalk-pkg is not work, Please set the env"
sys.exit(1)
if PLATFORMS == "android":
apktype = ".apk"
elif PLATFORMS == "ios":
apktype = ".ipa"
elif PLATFORMS == "deb":
apktype = ".deb"
else:
apktype = ".msi"
if PLATFORMS == "android":
fp = open(ConstPath + "/arch.txt", 'r')
fp_arch = fp.read().strip("\n\t")
if "x86" in fp_arch:
ARCH_X86 = "x86"
if "arm" in fp_arch:
ARCH_ARM = "arm"
if "64" in fp_arch:
BIT = "64"
fp.close()
if BIT == "32":
if ARCH_X86 == "x86" and ARCH_ARM == "":
TARGETS = "x86"
elif ARCH_ARM == "arm" and ARCH_X86 == "":
TARGETS = "armeabi-v7a"
elif ARCH_ARM == "arm" and ARCH_X86 == "x86":
TARGETS = "armeabi-v7a x86"
else:
if ARCH_X86 == "x86" and ARCH_ARM == "":
TARGETS = "x86_64"
elif ARCH_ARM == "arm" and ARCH_X86 == "":
TARGETS = "arm64-v8a"
elif ARCH_ARM == "arm" and ARCH_X86 == "x86":
TARGETS = "arm64-v8a x86_64"
mode = open(ConstPath + "/mode.txt", 'r')
mode_type = mode.read().strip("\n\t")
if mode_type == "embedded":
MODE = ""
ANDROID_MODE = "embedded"
elif mode_type == "shared":
MODE = " --android-shared"
ANDROID_MODE = "shared"
else:
MODE = " --android-lite"
ANDROID_MODE = "lite"
mode.close()
device = ""
if PLATFORMS == "android":
#device = "Medfield61809467,066e11baf0ecb889"
device = os.environ.get('DEVICE_ID')
if not device:
print ("Get DEVICE_ID env error\n")
sys.exit(1)
if device:
if ARCH_ARM != "" and ARCH_X86 != "":
if "," in device:
if getDeviceCpuAbi(device.split(',')[0]) == "x86":
device_x86 = device.split(',')[0]
else:
device_arm = device.split(',')[0]
if getDeviceCpuAbi(device.split(',')[1]) == "x86":
device_x86 = device.split(',')[1]
else:
device_arm = device.split(',')[1]
if not device_x86 or not device_arm:
print ("Need x86 and arm architecture devices id\n")
sys.exit(1)
else:
print ("Need x86 and arm architecture devices id\n")
sys.exit(1)
elif ARCH_ARM != "" and ARCH_X86 == "":
if getDeviceCpuAbi(device) == "arm":
device_arm = device
if not device_arm:
print ("Need arm architecture devices id\n")
sys.exit(1)
elif ARCH_ARM == "" and ARCH_X86 != "":
if getDeviceCpuAbi(device) == "x86":
device_x86 = device
if not device_x86:
print ("Need x86 architecture devices id\n")
sys.exit(1)
if PLATFORMS == "android" or PLATFORMS == "windows":
if not os.path.exists(ConstPath + "/VERSION"):
version_path = ConstPath + "/../../VERSION"
else:
version_path = ConstPath + "/VERSION"
with open(version_path) as json_file:
data = json.load(json_file)
crosswalkVersion = data['main-version'].strip(os.linesep)
def getstatusoutput(cmd, time_out=DEFAULT_CMD_TIMEOUT):
pre_time = time.time()
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=SHELL_FLAG)
while True:
output_line = cmd_proc.stdout.read()
cmd_return_code = cmd_proc.poll()
elapsed_time = time.time() - pre_time
if cmd_return_code is None:
if elapsed_time >= time_out:
killProcesses(ppid=cmd_proc.pid)
return False
elif output_line == '' and cmd_return_code is not None:
break
sys.stdout.write(output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def getDeviceCpuAbi(device):
cmd = 'adb -s ' + device + ' shell getprop'
(return_code, output) = getstatusoutput(cmd)
for line in output[0].split('/n'):
if "[ro.product.cpu.abi]" in line and "x86" in line:
return "x86"
else:
return "arm"
def overwriteCopy(src, dest, symlinks=False, ignore=None):
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copystat(src, dest)
sub_list = os.listdir(src)
if ignore:
excl = ignore(src, sub_list)
sub_list = [x for x in sub_list if x not in excl]
for i_sub in sub_list:
s_path = os.path.join(src, i_sub)
d_path = os.path.join(dest, i_sub)
if symlinks and os.path.islink(s_path):
if os.path.lexists(d_path):
os.remove(d_path)
os.symlink(os.readlink(s_path), d_path)
try:
s_path_s = os.lstat(s_path)
s_path_mode = stat.S_IMODE(s_path_s.st_mode)
os.lchmod(d_path, s_path_mode)
except Exception:
pass
elif os.path.isdir(s_path):
overwriteCopy(s_path, d_path, symlinks, ignore)
else:
shutil.copy2(s_path, d_path)
def doCopy(src_item=None, dest_item=None):
try:
if os.path.isdir(src_item):
overwriteCopy(src_item, dest_item, symlinks=True)
else:
if not os.path.exists(os.path.dirname(dest_item)):
os.makedirs(os.path.dirname(dest_item))
shutil.copy2(src_item, dest_item)
except Exception as e:
return False
return True
| ibelem/crosswalk-test-suite | apptools/apptools-manifest-tests/comm.py | Python | bsd-3-clause | 8,359 |
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.webui.jsptag;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.BodyTagSupport;
import javax.servlet.jsp.tagext.TagSupport;
/**
* Tag for including a "sidebar" - a column on the right-hand side of the page.
* Must be used within a dspace:layout tag.
*
* @author Peter Breton
* @version $Revision$
*/
public class SidebarTag extends BodyTagSupport
{
public SidebarTag()
{
super();
}
public int doAfterBody() throws JspException
{
LayoutTag tag = (LayoutTag) TagSupport.findAncestorWithClass(this,
LayoutTag.class);
if (tag == null)
{
throw new JspException(
"Sidebar tag must be in an enclosing Layout tag");
}
tag.setSidebar(getBodyContent().getString());
return SKIP_BODY;
}
}
| jamie-dryad/dryad-repo | dspace-jspui/dspace-jspui-api/src/main/java/org/dspace/app/webui/jsptag/SidebarTag.java | Java | bsd-3-clause | 1,080 |
<?php
/**
* Base static class for performing query and update operations on the 'plugin_data' table.
*
*
*
* @package propel.generator.datawrapper.om
*/
abstract class BasePluginDataPeer
{
/** the default database name for this class */
const DATABASE_NAME = 'datawrapper';
/** the table name for this class */
const TABLE_NAME = 'plugin_data';
/** the related Propel class for this table */
const OM_CLASS = 'PluginData';
/** the related TableMap class for this table */
const TM_CLASS = 'PluginDataTableMap';
/** The total number of columns. */
const NUM_COLUMNS = 5;
/** The number of lazy-loaded columns. */
const NUM_LAZY_LOAD_COLUMNS = 0;
/** The number of columns to hydrate (NUM_COLUMNS - NUM_LAZY_LOAD_COLUMNS) */
const NUM_HYDRATE_COLUMNS = 5;
/** the column name for the id field */
const ID = 'plugin_data.id';
/** the column name for the plugin_id field */
const PLUGIN_ID = 'plugin_data.plugin_id';
/** the column name for the stored_at field */
const STORED_AT = 'plugin_data.stored_at';
/** the column name for the key field */
const KEY = 'plugin_data.key';
/** the column name for the data field */
const DATA = 'plugin_data.data';
/** The default string format for model objects of the related table **/
const DEFAULT_STRING_FORMAT = 'YAML';
/**
* An identiy map to hold any loaded instances of PluginData objects.
* This must be public so that other peer classes can access this when hydrating from JOIN
* queries.
* @var array PluginData[]
*/
public static $instances = array();
/**
* holds an array of fieldnames
*
* first dimension keys are the type constants
* e.g. PluginDataPeer::$fieldNames[PluginDataPeer::TYPE_PHPNAME][0] = 'Id'
*/
protected static $fieldNames = array (
BasePeer::TYPE_PHPNAME => array ('Id', 'PluginId', 'StoredAt', 'Key', 'Data', ),
BasePeer::TYPE_STUDLYPHPNAME => array ('id', 'pluginId', 'storedAt', 'key', 'data', ),
BasePeer::TYPE_COLNAME => array (PluginDataPeer::ID, PluginDataPeer::PLUGIN_ID, PluginDataPeer::STORED_AT, PluginDataPeer::KEY, PluginDataPeer::DATA, ),
BasePeer::TYPE_RAW_COLNAME => array ('ID', 'PLUGIN_ID', 'STORED_AT', 'KEY', 'DATA', ),
BasePeer::TYPE_FIELDNAME => array ('id', 'plugin_id', 'stored_at', 'key', 'data', ),
BasePeer::TYPE_NUM => array (0, 1, 2, 3, 4, )
);
/**
* holds an array of keys for quick access to the fieldnames array
*
* first dimension keys are the type constants
* e.g. PluginDataPeer::$fieldNames[BasePeer::TYPE_PHPNAME]['Id'] = 0
*/
protected static $fieldKeys = array (
BasePeer::TYPE_PHPNAME => array ('Id' => 0, 'PluginId' => 1, 'StoredAt' => 2, 'Key' => 3, 'Data' => 4, ),
BasePeer::TYPE_STUDLYPHPNAME => array ('id' => 0, 'pluginId' => 1, 'storedAt' => 2, 'key' => 3, 'data' => 4, ),
BasePeer::TYPE_COLNAME => array (PluginDataPeer::ID => 0, PluginDataPeer::PLUGIN_ID => 1, PluginDataPeer::STORED_AT => 2, PluginDataPeer::KEY => 3, PluginDataPeer::DATA => 4, ),
BasePeer::TYPE_RAW_COLNAME => array ('ID' => 0, 'PLUGIN_ID' => 1, 'STORED_AT' => 2, 'KEY' => 3, 'DATA' => 4, ),
BasePeer::TYPE_FIELDNAME => array ('id' => 0, 'plugin_id' => 1, 'stored_at' => 2, 'key' => 3, 'data' => 4, ),
BasePeer::TYPE_NUM => array (0, 1, 2, 3, 4, )
);
/**
* Translates a fieldname to another type
*
* @param string $name field name
* @param string $fromType One of the class type constants BasePeer::TYPE_PHPNAME, BasePeer::TYPE_STUDLYPHPNAME
* BasePeer::TYPE_COLNAME, BasePeer::TYPE_FIELDNAME, BasePeer::TYPE_NUM
* @param string $toType One of the class type constants
* @return string translated name of the field.
* @throws PropelException - if the specified name could not be found in the fieldname mappings.
*/
public static function translateFieldName($name, $fromType, $toType)
{
$toNames = PluginDataPeer::getFieldNames($toType);
$key = isset(PluginDataPeer::$fieldKeys[$fromType][$name]) ? PluginDataPeer::$fieldKeys[$fromType][$name] : null;
if ($key === null) {
throw new PropelException("'$name' could not be found in the field names of type '$fromType'. These are: " . print_r(PluginDataPeer::$fieldKeys[$fromType], true));
}
return $toNames[$key];
}
/**
* Returns an array of field names.
*
* @param string $type The type of fieldnames to return:
* One of the class type constants BasePeer::TYPE_PHPNAME, BasePeer::TYPE_STUDLYPHPNAME
* BasePeer::TYPE_COLNAME, BasePeer::TYPE_FIELDNAME, BasePeer::TYPE_NUM
* @return array A list of field names
* @throws PropelException - if the type is not valid.
*/
public static function getFieldNames($type = BasePeer::TYPE_PHPNAME)
{
if (!array_key_exists($type, PluginDataPeer::$fieldNames)) {
throw new PropelException('Method getFieldNames() expects the parameter $type to be one of the class constants BasePeer::TYPE_PHPNAME, BasePeer::TYPE_STUDLYPHPNAME, BasePeer::TYPE_COLNAME, BasePeer::TYPE_FIELDNAME, BasePeer::TYPE_NUM. ' . $type . ' was given.');
}
return PluginDataPeer::$fieldNames[$type];
}
/**
* Convenience method which changes table.column to alias.column.
*
* Using this method you can maintain SQL abstraction while using column aliases.
* <code>
* $c->addAlias("alias1", TablePeer::TABLE_NAME);
* $c->addJoin(TablePeer::alias("alias1", TablePeer::PRIMARY_KEY_COLUMN), TablePeer::PRIMARY_KEY_COLUMN);
* </code>
* @param string $alias The alias for the current table.
* @param string $column The column name for current table. (i.e. PluginDataPeer::COLUMN_NAME).
* @return string
*/
public static function alias($alias, $column)
{
return str_replace(PluginDataPeer::TABLE_NAME.'.', $alias.'.', $column);
}
/**
* Add all the columns needed to create a new object.
*
* Note: any columns that were marked with lazyLoad="true" in the
* XML schema will not be added to the select list and only loaded
* on demand.
*
* @param Criteria $criteria object containing the columns to add.
* @param string $alias optional table alias
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function addSelectColumns(Criteria $criteria, $alias = null)
{
if (null === $alias) {
$criteria->addSelectColumn(PluginDataPeer::ID);
$criteria->addSelectColumn(PluginDataPeer::PLUGIN_ID);
$criteria->addSelectColumn(PluginDataPeer::STORED_AT);
$criteria->addSelectColumn(PluginDataPeer::KEY);
$criteria->addSelectColumn(PluginDataPeer::DATA);
} else {
$criteria->addSelectColumn($alias . '.id');
$criteria->addSelectColumn($alias . '.plugin_id');
$criteria->addSelectColumn($alias . '.stored_at');
$criteria->addSelectColumn($alias . '.key');
$criteria->addSelectColumn($alias . '.data');
}
}
/**
* Returns the number of rows matching criteria.
*
* @param Criteria $criteria
* @param boolean $distinct Whether to select only distinct columns; deprecated: use Criteria->setDistinct() instead.
* @param PropelPDO $con
* @return int Number of matching rows.
*/
public static function doCount(Criteria $criteria, $distinct = false, PropelPDO $con = null)
{
// we may modify criteria, so copy it first
$criteria = clone $criteria;
// We need to set the primary table name, since in the case that there are no WHERE columns
// it will be impossible for the BasePeer::createSelectSql() method to determine which
// tables go into the FROM clause.
$criteria->setPrimaryTableName(PluginDataPeer::TABLE_NAME);
if ($distinct && !in_array(Criteria::DISTINCT, $criteria->getSelectModifiers())) {
$criteria->setDistinct();
}
if (!$criteria->hasSelectClause()) {
PluginDataPeer::addSelectColumns($criteria);
}
$criteria->clearOrderByColumns(); // ORDER BY won't ever affect the count
$criteria->setDbName(PluginDataPeer::DATABASE_NAME); // Set the correct dbName
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
// BasePeer returns a PDOStatement
$stmt = BasePeer::doCount($criteria, $con);
if ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$count = (int) $row[0];
} else {
$count = 0; // no rows returned; we infer that means 0 matches.
}
$stmt->closeCursor();
return $count;
}
/**
* Selects one object from the DB.
*
* @param Criteria $criteria object used to create the SELECT statement.
* @param PropelPDO $con
* @return PluginData
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doSelectOne(Criteria $criteria, PropelPDO $con = null)
{
$critcopy = clone $criteria;
$critcopy->setLimit(1);
$objects = PluginDataPeer::doSelect($critcopy, $con);
if ($objects) {
return $objects[0];
}
return null;
}
/**
* Selects several row from the DB.
*
* @param Criteria $criteria The Criteria object used to build the SELECT statement.
* @param PropelPDO $con
* @return array Array of selected Objects
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doSelect(Criteria $criteria, PropelPDO $con = null)
{
return PluginDataPeer::populateObjects(PluginDataPeer::doSelectStmt($criteria, $con));
}
/**
* Prepares the Criteria object and uses the parent doSelect() method to execute a PDOStatement.
*
* Use this method directly if you want to work with an executed statement directly (for example
* to perform your own object hydration).
*
* @param Criteria $criteria The Criteria object used to build the SELECT statement.
* @param PropelPDO $con The connection to use
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
* @return PDOStatement The executed PDOStatement object.
* @see BasePeer::doSelect()
*/
public static function doSelectStmt(Criteria $criteria, PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
if (!$criteria->hasSelectClause()) {
$criteria = clone $criteria;
PluginDataPeer::addSelectColumns($criteria);
}
// Set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
// BasePeer returns a PDOStatement
return BasePeer::doSelect($criteria, $con);
}
/**
* Adds an object to the instance pool.
*
* Propel keeps cached copies of objects in an instance pool when they are retrieved
* from the database. In some cases -- especially when you override doSelect*()
* methods in your stub classes -- you may need to explicitly add objects
* to the cache in order to ensure that the same objects are always returned by doSelect*()
* and retrieveByPK*() calls.
*
* @param PluginData $obj A PluginData object.
* @param string $key (optional) key to use for instance map (for performance boost if key was already calculated externally).
*/
public static function addInstanceToPool($obj, $key = null)
{
if (Propel::isInstancePoolingEnabled()) {
if ($key === null) {
$key = (string) $obj->getId();
} // if key === null
PluginDataPeer::$instances[$key] = $obj;
}
}
/**
* Removes an object from the instance pool.
*
* Propel keeps cached copies of objects in an instance pool when they are retrieved
* from the database. In some cases -- especially when you override doDelete
* methods in your stub classes -- you may need to explicitly remove objects
* from the cache in order to prevent returning objects that no longer exist.
*
* @param mixed $value A PluginData object or a primary key value.
*
* @return void
* @throws PropelException - if the value is invalid.
*/
public static function removeInstanceFromPool($value)
{
if (Propel::isInstancePoolingEnabled() && $value !== null) {
if (is_object($value) && $value instanceof PluginData) {
$key = (string) $value->getId();
} elseif (is_scalar($value)) {
// assume we've been passed a primary key
$key = (string) $value;
} else {
$e = new PropelException("Invalid value passed to removeInstanceFromPool(). Expected primary key or PluginData object; got " . (is_object($value) ? get_class($value) . ' object.' : var_export($value,true)));
throw $e;
}
unset(PluginDataPeer::$instances[$key]);
}
} // removeInstanceFromPool()
/**
* Retrieves a string version of the primary key from the DB resultset row that can be used to uniquely identify a row in this table.
*
* For tables with a single-column primary key, that simple pkey value will be returned. For tables with
* a multi-column primary key, a serialize()d version of the primary key will be returned.
*
* @param string $key The key (@see getPrimaryKeyHash()) for this instance.
* @return PluginData Found object or null if 1) no instance exists for specified key or 2) instance pooling has been disabled.
* @see getPrimaryKeyHash()
*/
public static function getInstanceFromPool($key)
{
if (Propel::isInstancePoolingEnabled()) {
if (isset(PluginDataPeer::$instances[$key])) {
return PluginDataPeer::$instances[$key];
}
}
return null; // just to be explicit
}
/**
* Clear the instance pool.
*
* @return void
*/
public static function clearInstancePool($and_clear_all_references = false)
{
if ($and_clear_all_references)
{
foreach (PluginDataPeer::$instances as $instance)
{
$instance->clearAllReferences(true);
}
}
PluginDataPeer::$instances = array();
}
/**
* Method to invalidate the instance pool of all tables related to plugin_data
* by a foreign key with ON DELETE CASCADE
*/
public static function clearRelatedInstancePool()
{
}
/**
* Retrieves a string version of the primary key from the DB resultset row that can be used to uniquely identify a row in this table.
*
* For tables with a single-column primary key, that simple pkey value will be returned. For tables with
* a multi-column primary key, a serialize()d version of the primary key will be returned.
*
* @param array $row PropelPDO resultset row.
* @param int $startcol The 0-based offset for reading from the resultset row.
* @return string A string version of PK or null if the components of primary key in result array are all null.
*/
public static function getPrimaryKeyHashFromRow($row, $startcol = 0)
{
// If the PK cannot be derived from the row, return null.
if ($row[$startcol] === null) {
return null;
}
return (string) $row[$startcol];
}
/**
* Retrieves the primary key from the DB resultset row
* For tables with a single-column primary key, that simple pkey value will be returned. For tables with
* a multi-column primary key, an array of the primary key columns will be returned.
*
* @param array $row PropelPDO resultset row.
* @param int $startcol The 0-based offset for reading from the resultset row.
* @return mixed The primary key of the row
*/
public static function getPrimaryKeyFromRow($row, $startcol = 0)
{
return (int) $row[$startcol];
}
/**
* The returned array will contain objects of the default type or
* objects that inherit from the default.
*
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function populateObjects(PDOStatement $stmt)
{
$results = array();
// set the class once to avoid overhead in the loop
$cls = PluginDataPeer::getOMClass();
// populate the object(s)
while ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$key = PluginDataPeer::getPrimaryKeyHashFromRow($row, 0);
if (null !== ($obj = PluginDataPeer::getInstanceFromPool($key))) {
// We no longer rehydrate the object, since this can cause data loss.
// See http://www.propelorm.org/ticket/509
// $obj->hydrate($row, 0, true); // rehydrate
$results[] = $obj;
} else {
$obj = new $cls();
$obj->hydrate($row);
$results[] = $obj;
PluginDataPeer::addInstanceToPool($obj, $key);
} // if key exists
}
$stmt->closeCursor();
return $results;
}
/**
* Populates an object of the default type or an object that inherit from the default.
*
* @param array $row PropelPDO resultset row.
* @param int $startcol The 0-based offset for reading from the resultset row.
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
* @return array (PluginData object, last column rank)
*/
public static function populateObject($row, $startcol = 0)
{
$key = PluginDataPeer::getPrimaryKeyHashFromRow($row, $startcol);
if (null !== ($obj = PluginDataPeer::getInstanceFromPool($key))) {
// We no longer rehydrate the object, since this can cause data loss.
// See http://www.propelorm.org/ticket/509
// $obj->hydrate($row, $startcol, true); // rehydrate
$col = $startcol + PluginDataPeer::NUM_HYDRATE_COLUMNS;
} else {
$cls = PluginDataPeer::OM_CLASS;
$obj = new $cls();
$col = $obj->hydrate($row, $startcol);
PluginDataPeer::addInstanceToPool($obj, $key);
}
return array($obj, $col);
}
/**
* Returns the number of rows matching criteria, joining the related Plugin table
*
* @param Criteria $criteria
* @param boolean $distinct Whether to select only distinct columns; deprecated: use Criteria->setDistinct() instead.
* @param PropelPDO $con
* @param String $join_behavior the type of joins to use, defaults to Criteria::LEFT_JOIN
* @return int Number of matching rows.
*/
public static function doCountJoinPlugin(Criteria $criteria, $distinct = false, PropelPDO $con = null, $join_behavior = Criteria::LEFT_JOIN)
{
// we're going to modify criteria, so copy it first
$criteria = clone $criteria;
// We need to set the primary table name, since in the case that there are no WHERE columns
// it will be impossible for the BasePeer::createSelectSql() method to determine which
// tables go into the FROM clause.
$criteria->setPrimaryTableName(PluginDataPeer::TABLE_NAME);
if ($distinct && !in_array(Criteria::DISTINCT, $criteria->getSelectModifiers())) {
$criteria->setDistinct();
}
if (!$criteria->hasSelectClause()) {
PluginDataPeer::addSelectColumns($criteria);
}
$criteria->clearOrderByColumns(); // ORDER BY won't ever affect the count
// Set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
$criteria->addJoin(PluginDataPeer::PLUGIN_ID, PluginPeer::ID, $join_behavior);
$stmt = BasePeer::doCount($criteria, $con);
if ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$count = (int) $row[0];
} else {
$count = 0; // no rows returned; we infer that means 0 matches.
}
$stmt->closeCursor();
return $count;
}
/**
* Selects a collection of PluginData objects pre-filled with their Plugin objects.
* @param Criteria $criteria
* @param PropelPDO $con
* @param String $join_behavior the type of joins to use, defaults to Criteria::LEFT_JOIN
* @return array Array of PluginData objects.
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doSelectJoinPlugin(Criteria $criteria, $con = null, $join_behavior = Criteria::LEFT_JOIN)
{
$criteria = clone $criteria;
// Set the correct dbName if it has not been overridden
if ($criteria->getDbName() == Propel::getDefaultDB()) {
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
}
PluginDataPeer::addSelectColumns($criteria);
$startcol = PluginDataPeer::NUM_HYDRATE_COLUMNS;
PluginPeer::addSelectColumns($criteria);
$criteria->addJoin(PluginDataPeer::PLUGIN_ID, PluginPeer::ID, $join_behavior);
$stmt = BasePeer::doSelect($criteria, $con);
$results = array();
while ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$key1 = PluginDataPeer::getPrimaryKeyHashFromRow($row, 0);
if (null !== ($obj1 = PluginDataPeer::getInstanceFromPool($key1))) {
// We no longer rehydrate the object, since this can cause data loss.
// See http://www.propelorm.org/ticket/509
// $obj1->hydrate($row, 0, true); // rehydrate
} else {
$cls = PluginDataPeer::getOMClass();
$obj1 = new $cls();
$obj1->hydrate($row);
PluginDataPeer::addInstanceToPool($obj1, $key1);
} // if $obj1 already loaded
$key2 = PluginPeer::getPrimaryKeyHashFromRow($row, $startcol);
if ($key2 !== null) {
$obj2 = PluginPeer::getInstanceFromPool($key2);
if (!$obj2) {
$cls = PluginPeer::getOMClass();
$obj2 = new $cls();
$obj2->hydrate($row, $startcol);
PluginPeer::addInstanceToPool($obj2, $key2);
} // if obj2 already loaded
// Add the $obj1 (PluginData) to $obj2 (Plugin)
$obj2->addPluginData($obj1);
} // if joined row was not null
$results[] = $obj1;
}
$stmt->closeCursor();
return $results;
}
/**
* Returns the number of rows matching criteria, joining all related tables
*
* @param Criteria $criteria
* @param boolean $distinct Whether to select only distinct columns; deprecated: use Criteria->setDistinct() instead.
* @param PropelPDO $con
* @param String $join_behavior the type of joins to use, defaults to Criteria::LEFT_JOIN
* @return int Number of matching rows.
*/
public static function doCountJoinAll(Criteria $criteria, $distinct = false, PropelPDO $con = null, $join_behavior = Criteria::LEFT_JOIN)
{
// we're going to modify criteria, so copy it first
$criteria = clone $criteria;
// We need to set the primary table name, since in the case that there are no WHERE columns
// it will be impossible for the BasePeer::createSelectSql() method to determine which
// tables go into the FROM clause.
$criteria->setPrimaryTableName(PluginDataPeer::TABLE_NAME);
if ($distinct && !in_array(Criteria::DISTINCT, $criteria->getSelectModifiers())) {
$criteria->setDistinct();
}
if (!$criteria->hasSelectClause()) {
PluginDataPeer::addSelectColumns($criteria);
}
$criteria->clearOrderByColumns(); // ORDER BY won't ever affect the count
// Set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
$criteria->addJoin(PluginDataPeer::PLUGIN_ID, PluginPeer::ID, $join_behavior);
$stmt = BasePeer::doCount($criteria, $con);
if ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$count = (int) $row[0];
} else {
$count = 0; // no rows returned; we infer that means 0 matches.
}
$stmt->closeCursor();
return $count;
}
/**
* Selects a collection of PluginData objects pre-filled with all related objects.
*
* @param Criteria $criteria
* @param PropelPDO $con
* @param String $join_behavior the type of joins to use, defaults to Criteria::LEFT_JOIN
* @return array Array of PluginData objects.
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doSelectJoinAll(Criteria $criteria, $con = null, $join_behavior = Criteria::LEFT_JOIN)
{
$criteria = clone $criteria;
// Set the correct dbName if it has not been overridden
if ($criteria->getDbName() == Propel::getDefaultDB()) {
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
}
PluginDataPeer::addSelectColumns($criteria);
$startcol2 = PluginDataPeer::NUM_HYDRATE_COLUMNS;
PluginPeer::addSelectColumns($criteria);
$startcol3 = $startcol2 + PluginPeer::NUM_HYDRATE_COLUMNS;
$criteria->addJoin(PluginDataPeer::PLUGIN_ID, PluginPeer::ID, $join_behavior);
$stmt = BasePeer::doSelect($criteria, $con);
$results = array();
while ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$key1 = PluginDataPeer::getPrimaryKeyHashFromRow($row, 0);
if (null !== ($obj1 = PluginDataPeer::getInstanceFromPool($key1))) {
// We no longer rehydrate the object, since this can cause data loss.
// See http://www.propelorm.org/ticket/509
// $obj1->hydrate($row, 0, true); // rehydrate
} else {
$cls = PluginDataPeer::getOMClass();
$obj1 = new $cls();
$obj1->hydrate($row);
PluginDataPeer::addInstanceToPool($obj1, $key1);
} // if obj1 already loaded
// Add objects for joined Plugin rows
$key2 = PluginPeer::getPrimaryKeyHashFromRow($row, $startcol2);
if ($key2 !== null) {
$obj2 = PluginPeer::getInstanceFromPool($key2);
if (!$obj2) {
$cls = PluginPeer::getOMClass();
$obj2 = new $cls();
$obj2->hydrate($row, $startcol2);
PluginPeer::addInstanceToPool($obj2, $key2);
} // if obj2 loaded
// Add the $obj1 (PluginData) to the collection in $obj2 (Plugin)
$obj2->addPluginData($obj1);
} // if joined row not null
$results[] = $obj1;
}
$stmt->closeCursor();
return $results;
}
/**
* Returns the TableMap related to this peer.
* This method is not needed for general use but a specific application could have a need.
* @return TableMap
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function getTableMap()
{
return Propel::getDatabaseMap(PluginDataPeer::DATABASE_NAME)->getTable(PluginDataPeer::TABLE_NAME);
}
/**
* Add a TableMap instance to the database for this peer class.
*/
public static function buildTableMap()
{
$dbMap = Propel::getDatabaseMap(BasePluginDataPeer::DATABASE_NAME);
if (!$dbMap->hasTable(BasePluginDataPeer::TABLE_NAME)) {
$dbMap->addTableObject(new PluginDataTableMap());
}
}
/**
* The class that the Peer will make instances of.
*
*
* @return string ClassName
*/
public static function getOMClass($row = 0, $colnum = 0)
{
return PluginDataPeer::OM_CLASS;
}
/**
* Performs an INSERT on the database, given a PluginData or Criteria object.
*
* @param mixed $values Criteria or PluginData object containing data that is used to create the INSERT statement.
* @param PropelPDO $con the PropelPDO connection to use
* @return mixed The new primary key.
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doInsert($values, PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_WRITE);
}
if ($values instanceof Criteria) {
$criteria = clone $values; // rename for clarity
} else {
$criteria = $values->buildCriteria(); // build Criteria from PluginData object
}
if ($criteria->containsKey(PluginDataPeer::ID) && $criteria->keyContainsValue(PluginDataPeer::ID) ) {
throw new PropelException('Cannot insert a value for auto-increment primary key ('.PluginDataPeer::ID.')');
}
// Set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
try {
// use transaction because $criteria could contain info
// for more than one table (I guess, conceivably)
$con->beginTransaction();
$pk = BasePeer::doInsert($criteria, $con);
$con->commit();
} catch (PropelException $e) {
$con->rollBack();
throw $e;
}
return $pk;
}
/**
* Performs an UPDATE on the database, given a PluginData or Criteria object.
*
* @param mixed $values Criteria or PluginData object containing data that is used to create the UPDATE statement.
* @param PropelPDO $con The connection to use (specify PropelPDO connection object to exert more control over transactions).
* @return int The number of affected rows (if supported by underlying database driver).
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doUpdate($values, PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_WRITE);
}
$selectCriteria = new Criteria(PluginDataPeer::DATABASE_NAME);
if ($values instanceof Criteria) {
$criteria = clone $values; // rename for clarity
$comparison = $criteria->getComparison(PluginDataPeer::ID);
$value = $criteria->remove(PluginDataPeer::ID);
if ($value) {
$selectCriteria->add(PluginDataPeer::ID, $value, $comparison);
} else {
$selectCriteria->setPrimaryTableName(PluginDataPeer::TABLE_NAME);
}
} else { // $values is PluginData object
$criteria = $values->buildCriteria(); // gets full criteria
$selectCriteria = $values->buildPkeyCriteria(); // gets criteria w/ primary key(s)
}
// set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
return BasePeer::doUpdate($selectCriteria, $criteria, $con);
}
/**
* Deletes all rows from the plugin_data table.
*
* @param PropelPDO $con the connection to use
* @return int The number of affected rows (if supported by underlying database driver).
* @throws PropelException
*/
public static function doDeleteAll(PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_WRITE);
}
$affectedRows = 0; // initialize var to track total num of affected rows
try {
// use transaction because $criteria could contain info
// for more than one table or we could emulating ON DELETE CASCADE, etc.
$con->beginTransaction();
$affectedRows += BasePeer::doDeleteAll(PluginDataPeer::TABLE_NAME, $con, PluginDataPeer::DATABASE_NAME);
// Because this db requires some delete cascade/set null emulation, we have to
// clear the cached instance *after* the emulation has happened (since
// instances get re-added by the select statement contained therein).
PluginDataPeer::clearInstancePool();
PluginDataPeer::clearRelatedInstancePool();
$con->commit();
return $affectedRows;
} catch (PropelException $e) {
$con->rollBack();
throw $e;
}
}
/**
* Performs a DELETE on the database, given a PluginData or Criteria object OR a primary key value.
*
* @param mixed $values Criteria or PluginData object or primary key or array of primary keys
* which is used to create the DELETE statement
* @param PropelPDO $con the connection to use
* @return int The number of affected rows (if supported by underlying database driver). This includes CASCADE-related rows
* if supported by native driver or if emulated using Propel.
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function doDelete($values, PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_WRITE);
}
if ($values instanceof Criteria) {
// invalidate the cache for all objects of this type, since we have no
// way of knowing (without running a query) what objects should be invalidated
// from the cache based on this Criteria.
PluginDataPeer::clearInstancePool();
// rename for clarity
$criteria = clone $values;
} elseif ($values instanceof PluginData) { // it's a model object
// invalidate the cache for this single object
PluginDataPeer::removeInstanceFromPool($values);
// create criteria based on pk values
$criteria = $values->buildPkeyCriteria();
} else { // it's a primary key, or an array of pks
$criteria = new Criteria(PluginDataPeer::DATABASE_NAME);
$criteria->add(PluginDataPeer::ID, (array) $values, Criteria::IN);
// invalidate the cache for this object(s)
foreach ((array) $values as $singleval) {
PluginDataPeer::removeInstanceFromPool($singleval);
}
}
// Set the correct dbName
$criteria->setDbName(PluginDataPeer::DATABASE_NAME);
$affectedRows = 0; // initialize var to track total num of affected rows
try {
// use transaction because $criteria could contain info
// for more than one table or we could emulating ON DELETE CASCADE, etc.
$con->beginTransaction();
$affectedRows += BasePeer::doDelete($criteria, $con);
PluginDataPeer::clearRelatedInstancePool();
$con->commit();
return $affectedRows;
} catch (PropelException $e) {
$con->rollBack();
throw $e;
}
}
/**
* Validates all modified columns of given PluginData object.
* If parameter $columns is either a single column name or an array of column names
* than only those columns are validated.
*
* NOTICE: This does not apply to primary or foreign keys for now.
*
* @param PluginData $obj The object to validate.
* @param mixed $cols Column name or array of column names.
*
* @return mixed TRUE if all columns are valid or the error message of the first invalid column.
*/
public static function doValidate($obj, $cols = null)
{
$columns = array();
if ($cols) {
$dbMap = Propel::getDatabaseMap(PluginDataPeer::DATABASE_NAME);
$tableMap = $dbMap->getTable(PluginDataPeer::TABLE_NAME);
if (! is_array($cols)) {
$cols = array($cols);
}
foreach ($cols as $colName) {
if ($tableMap->hasColumn($colName)) {
$get = 'get' . $tableMap->getColumn($colName)->getPhpName();
$columns[$colName] = $obj->$get();
}
}
} else {
}
return BasePeer::doValidate(PluginDataPeer::DATABASE_NAME, PluginDataPeer::TABLE_NAME, $columns);
}
/**
* Retrieve a single object by pkey.
*
* @param int $pk the primary key.
* @param PropelPDO $con the connection to use
* @return PluginData
*/
public static function retrieveByPK($pk, PropelPDO $con = null)
{
if (null !== ($obj = PluginDataPeer::getInstanceFromPool((string) $pk))) {
return $obj;
}
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
$criteria = new Criteria(PluginDataPeer::DATABASE_NAME);
$criteria->add(PluginDataPeer::ID, $pk);
$v = PluginDataPeer::doSelect($criteria, $con);
return !empty($v) > 0 ? $v[0] : null;
}
/**
* Retrieve multiple objects by pkey.
*
* @param array $pks List of primary keys
* @param PropelPDO $con the connection to use
* @return PluginData[]
* @throws PropelException Any exceptions caught during processing will be
* rethrown wrapped into a PropelException.
*/
public static function retrieveByPKs($pks, PropelPDO $con = null)
{
if ($con === null) {
$con = Propel::getConnection(PluginDataPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
$objs = null;
if (empty($pks)) {
$objs = array();
} else {
$criteria = new Criteria(PluginDataPeer::DATABASE_NAME);
$criteria->add(PluginDataPeer::ID, $pks, Criteria::IN);
$objs = PluginDataPeer::doSelect($criteria, $con);
}
return $objs;
}
} // BasePluginDataPeer
// This is the static code needed to register the TableMap for this table with the main Propel class.
//
BasePluginDataPeer::buildTableMap();
| kendrick-k/datawrapper | lib/core/build/classes/datawrapper/om/BasePluginDataPeer.php | PHP | mit | 40,214 |
<?php
use Illuminate\Database\Seeder;
use Illuminate\Database\Eloquent\Model;
use App\Models\Role, App\Models\User, App\Models\Contact, App\Models\Post, App\Models\Tag, App\Models\PostTag, App\Models\Comment;
use App\Services\LoremIpsumGenerator;
class DatabaseSeeder extends Seeder {
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
Model::unguard();
$lipsum = new LoremIpsumGenerator;
Role::create([
'title' => 'Administrator',
'slug' => 'admin'
]);
Role::create([
'title' => 'Redactor',
'slug' => 'redac'
]);
Role::create([
'title' => 'User',
'slug' => 'user'
]);
User::create([
'username' => 'GreatAdmin',
'email' => 'admin@la.fr',
'password' => bcrypt('admin'),
'seen' => true,
'role_id' => 1,
'confirmed' => true
]);
User::create([
'username' => 'GreatRedactor',
'email' => 'redac@la.fr',
'password' => bcrypt('redac'),
'seen' => true,
'role_id' => 2,
'valid' => true,
'confirmed' => true
]);
User::create([
'username' => 'Walker',
'email' => 'walker@la.fr',
'password' => bcrypt('walker'),
'role_id' => 3,
'confirmed' => true
]);
User::create([
'username' => 'Slacker',
'email' => 'slacker@la.fr',
'password' => bcrypt('slacker'),
'role_id' => 3,
'confirmed' => true
]);
Contact::create([
'name' => 'Dupont',
'email' => 'dupont@la.fr',
'text' => 'Lorem ipsum inceptos malesuada leo fusce tortor sociosqu semper, facilisis semper class tempus faucibus tristique duis eros, cubilia quisque habitasse aliquam fringilla orci non. Vel laoreet dolor enim justo facilisis neque accumsan, in ad venenatis hac per dictumst nulla ligula, donec mollis massa porttitor ullamcorper risus. Eu platea fringilla, habitasse.'
]);
Contact::create([
'name' => 'Durand',
'email' => 'durand@la.fr',
'text' => ' Lorem ipsum erat non elit ultrices placerat, netus metus feugiat non conubia fusce porttitor, sociosqu diam commodo metus in. Himenaeos vitae aptent consequat luctus purus eleifend enim, sollicitudin eleifend porta malesuada ac class conubia, condimentum mauris facilisis conubia quis scelerisque. Lacinia tempus nullam felis fusce ac potenti netus ornare semper molestie, iaculis fermentum ornare curabitur tincidunt imperdiet scelerisque imperdiet euismod.'
]);
Contact::create([
'name' => 'Martin',
'email' => 'martin@la.fr',
'text' => 'Lorem ipsum tempor netus aenean ligula habitant vehicula tempor ultrices, placerat sociosqu ultrices consectetur ullamcorper tincidunt quisque tellus, ante nostra euismod nec suspendisse sem curabitur elit. Malesuada lacus viverra sagittis sit ornare orci, augue nullam adipiscing pulvinar libero aliquam vestibulum, platea cursus pellentesque leo dui. Lectus curabitur euismod ad, erat.',
'seen' => true
]);
Tag::create([
'tag' => 'Tag1'
]);
Tag::create([
'tag' => 'Tag2'
]);
Tag::create([
'tag' => 'Tag3'
]);
Tag::create([
'tag' => 'Tag4'
]);
Post::create([
'title' => 'Post 1',
'slug' => 'post-1',
'summary' => '<img alt="" src="/filemanager/userfiles/greatredactor/mega-champignon-icone-8453-128.png" style="float:left; height:128px; width:128px" />' . $lipsum->getContent(50),
'content' => $lipsum->getContent(500),
'active' => true,
'user_id' => 1
]);
Post::create([
'title' => 'Post 2',
'slug' => 'post-2',
'summary' => '<img alt="" src="/filemanager/userfiles/greatredactor/goomba-icone-7704-128.png" style="float:left; height:128px; width:128px" />' . $lipsum->getContent(50),
'content' => '<p>Lorem ipsum convallis ac curae non elit ultrices placerat netus metus feugiat, non conubia fusce porttitor sociosqu diam commodo metus in himenaeos, vitae aptent consequat luctus purus eleifend enim sollicitudin eleifend porta. Malesuada ac class conubia condimentum mauris facilisis conubia quis scelerisque lacinia, tempus nullam felis fusce ac potenti netus ornare semper. Molestie iaculis fermentum ornare curabitur tincidunt imperdiet scelerisque, imperdiet euismod scelerisque torquent curae rhoncus, sollicitudin tortor placerat aptent hac nec. Posuere suscipit sed tortor neque urna hendrerit vehicula duis litora tristique congue nec auctor felis libero, ornare habitasse nec elit felis inceptos tellus inceptos cubilia quis mattis faucibus sem non.</p>
<p>Odio fringilla class aliquam metus ipsum lorem luctus pharetra dictum, vehicula tempus in venenatis gravida ut gravida proin orci, quis sed platea mi quisque hendrerit semper hendrerit. Facilisis ante sapien faucibus ligula commodo vestibulum rutrum pretium, varius sem aliquet himenaeos dolor cursus nunc habitasse, aliquam ut curabitur ipsum luctus ut rutrum. Odio condimentum donec suscipit molestie est etiam sit rutrum dui nostra, sem aliquet conubia nullam sollicitudin rhoncus venenatis vivamus rhoncus netus, risus tortor non mauris turpis eget integer nibh dolor. Commodo venenatis ut molestie semper adipiscing amet cras, class donec sapien malesuada auctor sapien arcu inceptos, aenean consequat metus litora mattis vivamus.</p>
<pre>
<code class="language-php">protected function getUserByRecaller($recaller)
{
if ($this->validRecaller($recaller) && ! $this->tokenRetrievalAttempted)
{
$this->tokenRetrievalAttempted = true;
list($id, $token) = explode("|", $recaller, 2);
$this->viaRemember = ! is_null($user = $this->provider->retrieveByToken($id, $token));
return $user;
}
}</code></pre>
<p>Feugiat arcu adipiscing mauris primis ante ullamcorper ad nisi, lobortis arcu per orci malesuada blandit metus tortor, urna turpis consectetur porttitor egestas sed eleifend. Eget tincidunt pharetra varius tincidunt morbi malesuada elementum mi torquent mollis, eu lobortis curae purus amet vivamus amet nulla torquent, nibh eu diam aliquam pretium donec aliquam tempus lacus. Tempus feugiat lectus cras non velit mollis sit et integer, egestas habitant auctor integer sem at nam massa himenaeos, netus vel dapibus nibh malesuada leo fusce tortor. Sociosqu semper facilisis semper class tempus faucibus tristique duis eros, cubilia quisque habitasse aliquam fringilla orci non vel, laoreet dolor enim justo facilisis neque accumsan in.</p>
<p>Ad venenatis hac per dictumst nulla ligula donec, mollis massa porttitor ullamcorper risus eu platea, fringilla habitasse suscipit pellentesque donec est. Habitant vehicula tempor ultrices placerat sociosqu ultrices consectetur ullamcorper tincidunt quisque tellus, ante nostra euismod nec suspendisse sem curabitur elit malesuada lacus. Viverra sagittis sit ornare orci augue nullam adipiscing pulvinar libero aliquam vestibulum platea cursus pellentesque leo dui lectus, curabitur euismod ad erat curae non elit ultrices placerat netus metus feugiat non conubia fusce porttitor. Sociosqu diam commodo metus in himenaeos vitae aptent consequat luctus purus eleifend enim sollicitudin eleifend, porta malesuada ac class conubia condimentum mauris facilisis conubia quis scelerisque lacinia.</p>
<p>Tempus nullam felis fusce ac potenti netus ornare semper molestie iaculis, fermentum ornare curabitur tincidunt imperdiet scelerisque imperdiet euismod. Scelerisque torquent curae rhoncus sollicitudin tortor placerat aptent hac, nec posuere suscipit sed tortor neque urna hendrerit, vehicula duis litora tristique congue nec auctor. Felis libero ornare habitasse nec elit felis, inceptos tellus inceptos cubilia quis mattis, faucibus sem non odio fringilla. Class aliquam metus ipsum lorem luctus pharetra dictum vehicula, tempus in venenatis gravida ut gravida proin orci, quis sed platea mi quisque hendrerit semper.</p>
',
'active' => true,
'user_id' => 2
]);
Post::create([
'title' => 'Post 3',
'slug' => 'post-3',
'summary' => '<img alt="" src="/filemanager/userfiles/greatredactor/rouge-shell--icone-5599-128.png" style="float:left; height:128px; width:128px" />' . $lipsum->getContent(50),
'content' => $lipsum->getContent(500),
'active' => true,
'user_id' => 2
]);
Post::create([
'title' => 'Post 4',
'slug' => 'post-4',
'summary' => '<img alt="" src="/filemanager/userfiles/greatredactor/rouge-shyguy-icone-6870-128.png" style="float:left; height:128px; width:128px" />' . $lipsum->getContent(50),
'content' => $lipsum->getContent(500),
'active' => true,
'user_id' => 2
]);
PostTag::create([
'post_id' => 1,
'tag_id' => 1
]);
PostTag::create([
'post_id' => 1,
'tag_id' => 2
]);
PostTag::create([
'post_id' => 2,
'tag_id' => 1
]);
PostTag::create([
'post_id' => 2,
'tag_id' => 2
]);
PostTag::create([
'post_id' => 2,
'tag_id' => 3
]);
PostTag::create([
'post_id' => 3,
'tag_id' => 1
]);
PostTag::create([
'post_id' => 3,
'tag_id' => 2
]);
PostTag::create([
'post_id' => 3,
'tag_id' => 4
]);
Comment::create([
'content' => $lipsum->getContent(200),
'user_id' => 2,
'post_id' => 1
]);
Comment::create([
'content' => $lipsum->getContent(200),
'user_id' => 2,
'post_id' => 2
]);
Comment::create([
'content' => $lipsum->getContent(200),
'user_id' => 3,
'post_id' => 1
]);
}
}
| sdlyhu/laravel5-example | database/seeds/DatabaseSeeder.php | PHP | mit | 9,256 |
(function(__global) {
var isWorker = typeof window == 'undefined' && typeof self != 'undefined' && typeof importScripts != 'undefined';
var isBrowser = typeof window != 'undefined' && typeof document != 'undefined';
var isWindows = typeof process != 'undefined' && typeof process.platform != 'undefined' && !!process.platform.match(/^win/);
if (!__global.console)
__global.console = { assert: function() {} };
// IE8 support
var indexOf = Array.prototype.indexOf || function(item) {
for (var i = 0, thisLen = this.length; i < thisLen; i++) {
if (this[i] === item) {
return i;
}
}
return -1;
};
var defineProperty;
(function () {
try {
if (!!Object.defineProperty({}, 'a', {}))
defineProperty = Object.defineProperty;
}
catch (e) {
defineProperty = function(obj, prop, opt) {
try {
obj[prop] = opt.value || opt.get.call(obj);
}
catch(e) {}
}
}
})();
function addToError(err, msg) {
var newErr;
if (err instanceof Error) {
newErr = new Error(err.message, err.fileName, err.lineNumber);
if (isBrowser) {
newErr.message = err.message + '\n\t' + msg;
newErr.stack = err.stack;
}
else {
// node errors only look correct with the stack modified
newErr.message = err.message;
newErr.stack = err.stack + '\n\t' + msg;
}
}
else {
newErr = err + '\n\t' + msg;
}
return newErr;
}
function __eval(source, debugName, context) {
try {
new Function(source).call(context);
}
catch(e) {
throw addToError(e, 'Evaluating ' + debugName);
}
}
var baseURI;
// environent baseURI detection
if (typeof document != 'undefined' && document.getElementsByTagName) {
baseURI = document.baseURI;
if (!baseURI) {
var bases = document.getElementsByTagName('base');
baseURI = bases[0] && bases[0].href || window.location.href;
}
// sanitize out the hash and querystring
baseURI = baseURI.split('#')[0].split('?')[0];
baseURI = baseURI.substr(0, baseURI.lastIndexOf('/') + 1);
}
else if (typeof process != 'undefined' && process.cwd) {
baseURI = 'file://' + (isWindows ? '/' : '') + process.cwd() + '/';
if (isWindows)
baseURI = baseURI.replace(/\\/g, '/');
}
else if (typeof location != 'undefined') {
baseURI = __global.location.href;
}
else {
throw new TypeError('No environment baseURI');
}
var URL = __global.URLPolyfill || __global.URL;
| ApprecieOpenSource/Apprecie | public/a/node_modules/es6-module-loader/src/wrapper-start.js | JavaScript | mit | 2,575 |
import { Animation } from '../animations/animation';
import { isPresent } from '../util/util';
import { PageTransition } from './page-transition';
const /** @type {?} */ DURATION = 500;
const /** @type {?} */ EASING = 'cubic-bezier(0.36,0.66,0.04,1)';
const /** @type {?} */ OPACITY = 'opacity';
const /** @type {?} */ TRANSFORM = 'transform';
const /** @type {?} */ TRANSLATEX = 'translateX';
const /** @type {?} */ OFF_RIGHT = '99.5%';
const /** @type {?} */ OFF_LEFT = '-33%';
const /** @type {?} */ CENTER = '0%';
const /** @type {?} */ OFF_OPACITY = 0.8;
const /** @type {?} */ SHOW_BACK_BTN_CSS = 'show-back-button';
export class IOSTransition extends PageTransition {
/**
* @return {?}
*/
init() {
super.init();
const /** @type {?} */ plt = this.plt;
const /** @type {?} */ enteringView = this.enteringView;
const /** @type {?} */ leavingView = this.leavingView;
const /** @type {?} */ opts = this.opts;
this.duration(isPresent(opts.duration) ? opts.duration : DURATION);
this.easing(isPresent(opts.easing) ? opts.easing : EASING);
const /** @type {?} */ backDirection = (opts.direction === 'back');
const /** @type {?} */ enteringHasNavbar = (enteringView && enteringView.hasNavbar());
const /** @type {?} */ leavingHasNavbar = (leavingView && leavingView.hasNavbar());
if (enteringView) {
// get the native element for the entering page
const /** @type {?} */ enteringPageEle = enteringView.pageRef().nativeElement;
// entering content
const /** @type {?} */ enteringContent = new Animation(plt, enteringView.contentRef());
enteringContent.element(enteringPageEle.querySelectorAll('ion-header > *:not(ion-navbar),ion-footer > *'));
this.add(enteringContent);
if (backDirection) {
// entering content, back direction
enteringContent
.fromTo(TRANSLATEX, OFF_LEFT, CENTER, true)
.fromTo(OPACITY, OFF_OPACITY, 1, true);
}
else {
// entering content, forward direction
enteringContent
.beforeClearStyles([OPACITY])
.fromTo(TRANSLATEX, OFF_RIGHT, CENTER, true);
}
if (enteringHasNavbar) {
// entering page has a navbar
const /** @type {?} */ enteringNavbarEle = enteringPageEle.querySelector('ion-navbar');
const /** @type {?} */ enteringNavBar = new Animation(plt, enteringNavbarEle);
this.add(enteringNavBar);
const /** @type {?} */ enteringTitle = new Animation(plt, enteringNavbarEle.querySelector('ion-title'));
const /** @type {?} */ enteringNavbarItems = new Animation(plt, enteringNavbarEle.querySelectorAll('ion-buttons,[menuToggle]'));
const /** @type {?} */ enteringNavbarBg = new Animation(plt, enteringNavbarEle.querySelector('.toolbar-background'));
const /** @type {?} */ enteringBackButton = new Animation(plt, enteringNavbarEle.querySelector('.back-button'));
enteringNavBar
.add(enteringTitle)
.add(enteringNavbarItems)
.add(enteringNavbarBg)
.add(enteringBackButton);
enteringTitle.fromTo(OPACITY, 0.01, 1, true);
enteringNavbarItems.fromTo(OPACITY, 0.01, 1, true);
// set properties depending on direction
if (backDirection) {
// entering navbar, back direction
enteringTitle.fromTo(TRANSLATEX, OFF_LEFT, CENTER, true);
if (enteringView.enableBack()) {
// back direction, entering page has a back button
enteringBackButton
.beforeAddClass(SHOW_BACK_BTN_CSS)
.fromTo(OPACITY, 0.01, 1, true);
}
}
else {
// entering navbar, forward direction
enteringTitle.fromTo(TRANSLATEX, OFF_RIGHT, CENTER, true);
enteringNavbarBg
.beforeClearStyles([OPACITY])
.fromTo(TRANSLATEX, OFF_RIGHT, CENTER, true);
if (enteringView.enableBack()) {
// forward direction, entering page has a back button
enteringBackButton
.beforeAddClass(SHOW_BACK_BTN_CSS)
.fromTo(OPACITY, 0.01, 1, true);
const /** @type {?} */ enteringBackBtnText = new Animation(plt, enteringNavbarEle.querySelector('.back-button-text'));
enteringBackBtnText.fromTo(TRANSLATEX, '100px', '0px');
enteringNavBar.add(enteringBackBtnText);
}
else {
enteringBackButton.beforeRemoveClass(SHOW_BACK_BTN_CSS);
}
}
}
}
// setup leaving view
if (leavingView && leavingView.pageRef()) {
// leaving content
const /** @type {?} */ leavingPageEle = leavingView.pageRef().nativeElement;
const /** @type {?} */ leavingContent = new Animation(plt, leavingView.contentRef());
leavingContent.element(leavingPageEle.querySelectorAll('ion-header > *:not(ion-navbar),ion-footer > *'));
this.add(leavingContent);
if (backDirection) {
// leaving content, back direction
leavingContent
.beforeClearStyles([OPACITY])
.fromTo(TRANSLATEX, CENTER, '100%');
}
else {
// leaving content, forward direction
leavingContent
.fromTo(TRANSLATEX, CENTER, OFF_LEFT)
.fromTo(OPACITY, 1, OFF_OPACITY)
.afterClearStyles([TRANSFORM, OPACITY]);
}
if (leavingHasNavbar) {
// leaving page has a navbar
const /** @type {?} */ leavingNavbarEle = leavingPageEle.querySelector('ion-navbar');
const /** @type {?} */ leavingNavBar = new Animation(plt, leavingNavbarEle);
const /** @type {?} */ leavingTitle = new Animation(plt, leavingNavbarEle.querySelector('ion-title'));
const /** @type {?} */ leavingNavbarItems = new Animation(plt, leavingNavbarEle.querySelectorAll('ion-buttons,[menuToggle]'));
const /** @type {?} */ leavingNavbarBg = new Animation(plt, leavingNavbarEle.querySelector('.toolbar-background'));
const /** @type {?} */ leavingBackButton = new Animation(plt, leavingNavbarEle.querySelector('.back-button'));
leavingNavBar
.add(leavingTitle)
.add(leavingNavbarItems)
.add(leavingBackButton)
.add(leavingNavbarBg);
this.add(leavingNavBar);
// fade out leaving navbar items
leavingBackButton.fromTo(OPACITY, 0.99, 0);
leavingTitle.fromTo(OPACITY, 0.99, 0);
leavingNavbarItems.fromTo(OPACITY, 0.99, 0);
if (backDirection) {
// leaving navbar, back direction
leavingTitle.fromTo(TRANSLATEX, CENTER, '100%');
// leaving navbar, back direction, and there's no entering navbar
// should just slide out, no fading out
leavingNavbarBg
.beforeClearStyles([OPACITY])
.fromTo(TRANSLATEX, CENTER, '100%');
let /** @type {?} */ leavingBackBtnText = new Animation(plt, leavingNavbarEle.querySelector('.back-button-text'));
leavingBackBtnText.fromTo(TRANSLATEX, CENTER, (300) + 'px');
leavingNavBar.add(leavingBackBtnText);
}
else {
// leaving navbar, forward direction
leavingTitle
.fromTo(TRANSLATEX, CENTER, OFF_LEFT)
.afterClearStyles([TRANSFORM]);
leavingBackButton.afterClearStyles([OPACITY]);
leavingTitle.afterClearStyles([OPACITY]);
leavingNavbarItems.afterClearStyles([OPACITY]);
}
}
}
}
}
//# sourceMappingURL=transition-ios.js.map | ortroyaner/GithubFinder | node_modules/ionic-angular/es2015/transitions/transition-ios.js | JavaScript | mit | 8,705 |
// <copyright file="SparseMatrix.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2013 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using MathNet.Numerics.LinearAlgebra.Storage;
namespace MathNet.Numerics.LinearAlgebra.Double
{
/// <summary>
/// A Matrix with sparse storage, intended for very large matrices where most of the cells are zero.
/// The underlying storage scheme is 3-array compressed-sparse-row (CSR) Format.
/// <a href="http://en.wikipedia.org/wiki/Sparse_matrix#Compressed_sparse_row_.28CSR_or_CRS.29">Wikipedia - CSR</a>.
/// </summary>
[Serializable]
[DebuggerDisplay("SparseMatrix {RowCount}x{ColumnCount}-Double {NonZerosCount}-NonZero")]
public class SparseMatrix : Matrix
{
readonly SparseCompressedRowMatrixStorage<double> _storage;
/// <summary>
/// Gets the number of non zero elements in the matrix.
/// </summary>
/// <value>The number of non zero elements.</value>
public int NonZerosCount
{
get { return _storage.ValueCount; }
}
/// <summary>
/// Create a new sparse matrix straight from an initialized matrix storage instance.
/// The storage is used directly without copying.
/// Intended for advanced scenarios where you're working directly with
/// storage for performance or interop reasons.
/// </summary>
public SparseMatrix(SparseCompressedRowMatrixStorage<double> storage)
: base(storage)
{
_storage = storage;
}
/// <summary>
/// Create a new square sparse matrix with the given number of rows and columns.
/// All cells of the matrix will be initialized to zero.
/// Zero-length matrices are not supported.
/// </summary>
/// <exception cref="ArgumentException">If the order is less than one.</exception>
public SparseMatrix(int order)
: this(order, order)
{
}
/// <summary>
/// Create a new sparse matrix with the given number of rows and columns.
/// All cells of the matrix will be initialized to zero.
/// Zero-length matrices are not supported.
/// </summary>
/// <exception cref="ArgumentException">If the row or column count is less than one.</exception>
public SparseMatrix(int rows, int columns)
: this(new SparseCompressedRowMatrixStorage<double>(rows, columns))
{
}
/// <summary>
/// Create a new sparse matrix as a copy of the given other matrix.
/// This new matrix will be independent from the other matrix.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfMatrix(Matrix<double> matrix)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfMatrix(matrix.Storage));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given two-dimensional array.
/// This new matrix will be independent from the provided array.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfArray(double[,] array)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfArray(array));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given indexed enumerable.
/// Keys must be provided at most once, zero is assumed if a key is omitted.
/// This new matrix will be independent from the enumerable.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfIndexed(int rows, int columns, IEnumerable<Tuple<int, int, double>> enumerable)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfIndexedEnumerable(rows, columns, enumerable));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given enumerable.
/// The enumerable is assumed to be in row-major order (row by row).
/// This new matrix will be independent from the enumerable.
/// A new memory block will be allocated for storing the vector.
/// </summary>
/// <seealso href="http://en.wikipedia.org/wiki/Row-major_order"/>
public static SparseMatrix OfRowMajor(int rows, int columns, IEnumerable<double> rowMajor)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowMajorEnumerable(rows, columns, rowMajor));
}
/// <summary>
/// Create a new sparse matrix with the given number of rows and columns as a copy of the given array.
/// The array is assumed to be in column-major order (column by column).
/// This new matrix will be independent from the provided array.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
/// <seealso href="http://en.wikipedia.org/wiki/Row-major_order"/>
public static SparseMatrix OfColumnMajor(int rows, int columns, IList<double> columnMajor)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnMajorList(rows, columns, columnMajor));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given enumerable of enumerable columns.
/// Each enumerable in the master enumerable specifies a column.
/// This new matrix will be independent from the enumerables.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumns(IEnumerable<IEnumerable<double>> data)
{
return OfColumnArrays(data.Select(v => v.ToArray()).ToArray());
}
/// <summary>
/// Create a new sparse matrix as a copy of the given enumerable of enumerable columns.
/// Each enumerable in the master enumerable specifies a column.
/// This new matrix will be independent from the enumerables.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumns(int rows, int columns, IEnumerable<IEnumerable<double>> data)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnEnumerables(rows, columns, data));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given column arrays.
/// This new matrix will be independent from the arrays.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumnArrays(params double[][] columns)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnArrays(columns));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given column arrays.
/// This new matrix will be independent from the arrays.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumnArrays(IEnumerable<double[]> columns)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnArrays((columns as double[][]) ?? columns.ToArray()));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given column vectors.
/// This new matrix will be independent from the vectors.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumnVectors(params Vector<double>[] columns)
{
var storage = new VectorStorage<double>[columns.Length];
for (int i = 0; i < columns.Length; i++)
{
storage[i] = columns[i].Storage;
}
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnVectors(storage));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given column vectors.
/// This new matrix will be independent from the vectors.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfColumnVectors(IEnumerable<Vector<double>> columns)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfColumnVectors(columns.Select(c => c.Storage).ToArray()));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given enumerable of enumerable rows.
/// Each enumerable in the master enumerable specifies a row.
/// This new matrix will be independent from the enumerables.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRows(IEnumerable<IEnumerable<double>> data)
{
return OfRowArrays(data.Select(v => v.ToArray()).ToArray());
}
/// <summary>
/// Create a new sparse matrix as a copy of the given enumerable of enumerable rows.
/// Each enumerable in the master enumerable specifies a row.
/// This new matrix will be independent from the enumerables.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRows(int rows, int columns, IEnumerable<IEnumerable<double>> data)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowEnumerables(rows, columns, data));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given row arrays.
/// This new matrix will be independent from the arrays.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRowArrays(params double[][] rows)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowArrays(rows));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given row arrays.
/// This new matrix will be independent from the arrays.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRowArrays(IEnumerable<double[]> rows)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowArrays((rows as double[][]) ?? rows.ToArray()));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given row vectors.
/// This new matrix will be independent from the vectors.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRowVectors(params Vector<double>[] rows)
{
var storage = new VectorStorage<double>[rows.Length];
for (int i = 0; i < rows.Length; i++)
{
storage[i] = rows[i].Storage;
}
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowVectors(storage));
}
/// <summary>
/// Create a new sparse matrix as a copy of the given row vectors.
/// This new matrix will be independent from the vectors.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfRowVectors(IEnumerable<Vector<double>> rows)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfRowVectors(rows.Select(r => r.Storage).ToArray()));
}
/// <summary>
/// Create a new sparse matrix with the diagonal as a copy of the given vector.
/// This new matrix will be independent from the vector.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfDiagonalVector(Vector<double> diagonal)
{
var m = new SparseMatrix(diagonal.Count, diagonal.Count);
m.SetDiagonal(diagonal);
return m;
}
/// <summary>
/// Create a new sparse matrix with the diagonal as a copy of the given vector.
/// This new matrix will be independent from the vector.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfDiagonalVector(int rows, int columns, Vector<double> diagonal)
{
var m = new SparseMatrix(rows, columns);
m.SetDiagonal(diagonal);
return m;
}
/// <summary>
/// Create a new sparse matrix with the diagonal as a copy of the given array.
/// This new matrix will be independent from the array.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfDiagonalArray(double[] diagonal)
{
var m = new SparseMatrix(diagonal.Length, diagonal.Length);
m.SetDiagonal(diagonal);
return m;
}
/// <summary>
/// Create a new sparse matrix with the diagonal as a copy of the given array.
/// This new matrix will be independent from the array.
/// A new memory block will be allocated for storing the matrix.
/// </summary>
public static SparseMatrix OfDiagonalArray(int rows, int columns, double[] diagonal)
{
var m = new SparseMatrix(rows, columns);
m.SetDiagonal(diagonal);
return m;
}
/// <summary>
/// Create a new sparse matrix and initialize each value to the same provided value.
/// </summary>
public static SparseMatrix Create(int rows, int columns, double value)
{
if (value == 0d) return new SparseMatrix(rows, columns);
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfValue(rows, columns, value));
}
/// <summary>
/// Create a new sparse matrix and initialize each value using the provided init function.
/// </summary>
public static SparseMatrix Create(int rows, int columns, Func<int, int, double> init)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfInit(rows, columns, init));
}
/// <summary>
/// Create a new diagonal sparse matrix and initialize each diagonal value to the same provided value.
/// </summary>
public static SparseMatrix CreateDiagonal(int rows, int columns, double value)
{
if (value == 0d) return new SparseMatrix(rows, columns);
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfDiagonalInit(rows, columns, i => value));
}
/// <summary>
/// Create a new diagonal sparse matrix and initialize each diagonal value using the provided init function.
/// </summary>
public static SparseMatrix CreateDiagonal(int rows, int columns, Func<int, double> init)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfDiagonalInit(rows, columns, init));
}
/// <summary>
/// Create a new square sparse identity matrix where each diagonal value is set to One.
/// </summary>
public static SparseMatrix CreateIdentity(int order)
{
return new SparseMatrix(SparseCompressedRowMatrixStorage<double>.OfDiagonalInit(order, order, i => One));
}
/// <summary>
/// Returns a new matrix containing the lower triangle of this matrix.
/// </summary>
/// <returns>The lower triangle of this matrix.</returns>
public override Matrix<double> LowerTriangle()
{
var result = Build.SameAs(this);
LowerTriangleImpl(result);
return result;
}
/// <summary>
/// Puts the lower triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
/// <exception cref="ArgumentNullException">If <paramref name="result"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException">If the result matrix's dimensions are not the same as this matrix.</exception>
public override void LowerTriangle(Matrix<double> result)
{
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.RowCount != RowCount || result.ColumnCount != ColumnCount)
{
throw DimensionsDontMatch<ArgumentException>(this, result, "result");
}
if (ReferenceEquals(this, result))
{
var tmp = Build.SameAs(result);
LowerTriangle(tmp);
tmp.CopyTo(result);
}
else
{
result.Clear();
LowerTriangleImpl(result);
}
}
/// <summary>
/// Puts the lower triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
private void LowerTriangleImpl(Matrix<double> result)
{
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < result.RowCount; row++)
{
var endIndex = rowPointers[row + 1];
for (var j = rowPointers[row]; j < endIndex; j++)
{
if (row >= columnIndices[j])
{
result.At(row, columnIndices[j], values[j]);
}
}
}
}
/// <summary>
/// Returns a new matrix containing the upper triangle of this matrix.
/// </summary>
/// <returns>The upper triangle of this matrix.</returns>
public override Matrix<double> UpperTriangle()
{
var result = Build.SameAs(this);
UpperTriangleImpl(result);
return result;
}
/// <summary>
/// Puts the upper triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
/// <exception cref="ArgumentNullException">If <paramref name="result"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException">If the result matrix's dimensions are not the same as this matrix.</exception>
public override void UpperTriangle(Matrix<double> result)
{
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.RowCount != RowCount || result.ColumnCount != ColumnCount)
{
throw DimensionsDontMatch<ArgumentException>(this, result, "result");
}
if (ReferenceEquals(this, result))
{
var tmp = Build.SameAs(result);
UpperTriangle(tmp);
tmp.CopyTo(result);
}
else
{
result.Clear();
UpperTriangleImpl(result);
}
}
/// <summary>
/// Puts the upper triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
private void UpperTriangleImpl(Matrix<double> result)
{
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < result.RowCount; row++)
{
var endIndex = rowPointers[row + 1];
for (var j = rowPointers[row]; j < endIndex; j++)
{
if (row <= columnIndices[j])
{
result.At(row, columnIndices[j], values[j]);
}
}
}
}
/// <summary>
/// Returns a new matrix containing the lower triangle of this matrix. The new matrix
/// does not contain the diagonal elements of this matrix.
/// </summary>
/// <returns>The lower triangle of this matrix.</returns>
public override Matrix<double> StrictlyLowerTriangle()
{
var result = Build.SameAs(this);
StrictlyLowerTriangleImpl(result);
return result;
}
/// <summary>
/// Puts the strictly lower triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
/// <exception cref="ArgumentNullException">If <paramref name="result"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException">If the result matrix's dimensions are not the same as this matrix.</exception>
public override void StrictlyLowerTriangle(Matrix<double> result)
{
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.RowCount != RowCount || result.ColumnCount != ColumnCount)
{
throw DimensionsDontMatch<ArgumentException>(this, result, "result");
}
if (ReferenceEquals(this, result))
{
var tmp = Build.SameAs(result);
StrictlyLowerTriangle(tmp);
tmp.CopyTo(result);
}
else
{
result.Clear();
StrictlyLowerTriangleImpl(result);
}
}
/// <summary>
/// Puts the strictly lower triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
private void StrictlyLowerTriangleImpl(Matrix<double> result)
{
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < result.RowCount; row++)
{
var endIndex = rowPointers[row + 1];
for (var j = rowPointers[row]; j < endIndex; j++)
{
if (row > columnIndices[j])
{
result.At(row, columnIndices[j], values[j]);
}
}
}
}
/// <summary>
/// Returns a new matrix containing the upper triangle of this matrix. The new matrix
/// does not contain the diagonal elements of this matrix.
/// </summary>
/// <returns>The upper triangle of this matrix.</returns>
public override Matrix<double> StrictlyUpperTriangle()
{
var result = Build.SameAs(this);
StrictlyUpperTriangleImpl(result);
return result;
}
/// <summary>
/// Puts the strictly upper triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
/// <exception cref="ArgumentNullException">If <paramref name="result"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException">If the result matrix's dimensions are not the same as this matrix.</exception>
public override void StrictlyUpperTriangle(Matrix<double> result)
{
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.RowCount != RowCount || result.ColumnCount != ColumnCount)
{
throw DimensionsDontMatch<ArgumentException>(this, result, "result");
}
if (ReferenceEquals(this, result))
{
var tmp = Build.SameAs(result);
StrictlyUpperTriangle(tmp);
tmp.CopyTo(result);
}
else
{
result.Clear();
StrictlyUpperTriangleImpl(result);
}
}
/// <summary>
/// Puts the strictly upper triangle of this matrix into the result matrix.
/// </summary>
/// <param name="result">Where to store the lower triangle.</param>
private void StrictlyUpperTriangleImpl(Matrix<double> result)
{
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < result.RowCount; row++)
{
var endIndex = rowPointers[row + 1];
for (var j = rowPointers[row]; j < endIndex; j++)
{
if (row < columnIndices[j])
{
result.At(row, columnIndices[j], values[j]);
}
}
}
}
/// <summary>
/// Negate each element of this matrix and place the results into the result matrix.
/// </summary>
/// <param name="result">The result of the negation.</param>
protected override void DoNegate(Matrix<double> result)
{
CopyTo(result);
DoMultiply(-1, result);
}
/// <summary>Calculates the induced infinity norm of this matrix.</summary>
/// <returns>The maximum absolute row sum of the matrix.</returns>
public override double InfinityNorm()
{
var rowPointers = _storage.RowPointers;
var values = _storage.Values;
var norm = 0d;
for (var i = 0; i < RowCount; i++)
{
var startIndex = rowPointers[i];
var endIndex = rowPointers[i + 1];
if (startIndex == endIndex)
{
// Begin and end are equal. There are no values in the row, Move to the next row
continue;
}
var s = 0d;
for (var j = startIndex; j < endIndex; j++)
{
s += Math.Abs(values[j]);
}
norm = Math.Max(norm, s);
}
return norm;
}
/// <summary>Calculates the entry-wise Frobenius norm of this matrix.</summary>
/// <returns>The square root of the sum of the squared values.</returns>
public override double FrobeniusNorm()
{
var aat = (SparseCompressedRowMatrixStorage<double>) (this*Transpose()).Storage;
var norm = 0d;
for (var i = 0; i < aat.RowCount; i++)
{
var startIndex = aat.RowPointers[i];
var endIndex = aat.RowPointers[i + 1];
if (startIndex == endIndex)
{
// Begin and end are equal. There are no values in the row, Move to the next row
continue;
}
for (var j = startIndex; j < endIndex; j++)
{
if (i == aat.ColumnIndices[j])
{
norm += Math.Abs(aat.Values[j]);
}
}
}
return Math.Sqrt(norm);
}
/// <summary>
/// Adds another matrix to this matrix.
/// </summary>
/// <param name="other">The matrix to add to this matrix.</param>
/// <param name="result">The matrix to store the result of the addition.</param>
/// <exception cref="ArgumentNullException">If the other matrix is <see langword="null"/>.</exception>
/// <exception cref="ArgumentOutOfRangeException">If the two matrices don't have the same dimensions.</exception>
protected override void DoAdd(Matrix<double> other, Matrix<double> result)
{
var sparseOther = other as SparseMatrix;
var sparseResult = result as SparseMatrix;
if (sparseOther == null || sparseResult == null)
{
base.DoAdd(other, result);
return;
}
if (ReferenceEquals(this, other))
{
if (!ReferenceEquals(this, result))
{
CopyTo(result);
}
Control.LinearAlgebraProvider.ScaleArray(2.0, _storage.Values, _storage.Values);
return;
}
SparseMatrix left;
if (ReferenceEquals(sparseOther, sparseResult))
{
left = this;
}
else if (ReferenceEquals(this, sparseResult))
{
left = sparseOther;
}
else
{
CopyTo(sparseResult);
left = sparseOther;
}
var leftStorage = left._storage;
for (var i = 0; i < leftStorage.RowCount; i++)
{
var endIndex = leftStorage.RowPointers[i + 1];
for (var j = leftStorage.RowPointers[i]; j < endIndex; j++)
{
var columnIndex = leftStorage.ColumnIndices[j];
var resVal = leftStorage.Values[j] + result.At(i, columnIndex);
result.At(i, columnIndex, resVal);
}
}
}
/// <summary>
/// Subtracts another matrix from this matrix.
/// </summary>
/// <param name="other">The matrix to subtract to this matrix.</param>
/// <param name="result">The matrix to store the result of subtraction.</param>
/// <exception cref="ArgumentNullException">If the other matrix is <see langword="null"/>.</exception>
/// <exception cref="ArgumentOutOfRangeException">If the two matrices don't have the same dimensions.</exception>
protected override void DoSubtract(Matrix<double> other, Matrix<double> result)
{
var sparseOther = other as SparseMatrix;
var sparseResult = result as SparseMatrix;
if (sparseOther == null || sparseResult == null)
{
base.DoSubtract(other, result);
return;
}
if (ReferenceEquals(this, other))
{
result.Clear();
return;
}
var otherStorage = sparseOther._storage;
if (ReferenceEquals(this, sparseResult))
{
for (var i = 0; i < otherStorage.RowCount; i++)
{
var endIndex = otherStorage.RowPointers[i + 1];
for (var j = otherStorage.RowPointers[i]; j < endIndex; j++)
{
var columnIndex = otherStorage.ColumnIndices[j];
var resVal = sparseResult.At(i, columnIndex) - otherStorage.Values[j];
result.At(i, columnIndex, resVal);
}
}
}
else
{
if (!ReferenceEquals(sparseOther, sparseResult))
{
sparseOther.CopyTo(sparseResult);
}
sparseResult.Negate(sparseResult);
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var i = 0; i < RowCount; i++)
{
var endIndex = rowPointers[i + 1];
for (var j = rowPointers[i]; j < endIndex; j++)
{
var columnIndex = columnIndices[j];
var resVal = sparseResult.At(i, columnIndex) + values[j];
result.At(i, columnIndex, resVal);
}
}
}
}
/// <summary>
/// Multiplies each element of the matrix by a scalar and places results into the result matrix.
/// </summary>
/// <param name="scalar">The scalar to multiply the matrix with.</param>
/// <param name="result">The matrix to store the result of the multiplication.</param>
protected override void DoMultiply(double scalar, Matrix<double> result)
{
if (scalar == 1.0)
{
CopyTo(result);
return;
}
if (scalar == 0.0 || NonZerosCount == 0)
{
result.Clear();
return;
}
var sparseResult = result as SparseMatrix;
if (sparseResult == null)
{
result.Clear();
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < RowCount; row++)
{
var start = rowPointers[row];
var end = rowPointers[row + 1];
if (start == end)
{
continue;
}
for (var index = start; index < end; index++)
{
var column = columnIndices[index];
result.At(row, column, values[index] * scalar);
}
}
}
else
{
if (!ReferenceEquals(this, result))
{
CopyTo(sparseResult);
}
Control.LinearAlgebraProvider.ScaleArray(scalar, sparseResult._storage.Values, sparseResult._storage.Values);
}
}
/// <summary>
/// Multiplies this matrix with another matrix and places the results into the result matrix.
/// </summary>
/// <param name="other">The matrix to multiply with.</param>
/// <param name="result">The result of the multiplication.</param>
protected override void DoMultiply(Matrix<double> other, Matrix<double> result)
{
var sparseOther = other as SparseMatrix;
var sparseResult = result as SparseMatrix;
if (sparseOther != null && sparseResult != null)
{
DoMultiplySparse(sparseOther, sparseResult);
return;
}
var diagonalOther = other.Storage as DiagonalMatrixStorage<double>;
if (diagonalOther != null && sparseResult != null)
{
var diagonal = diagonalOther.Data;
if (other.ColumnCount == other.RowCount)
{
Storage.MapIndexedTo(result.Storage, (i, j, x) => x*diagonal[j], Zeros.AllowSkip, ExistingData.Clear);
}
else
{
result.Storage.Clear();
Storage.MapSubMatrixIndexedTo(result.Storage, (i, j, x) => x*diagonal[j], 0, 0, RowCount, 0, 0, ColumnCount, Zeros.AllowSkip, ExistingData.AssumeZeros);
}
return;
}
result.Clear();
var columnVector = new DenseVector(other.RowCount);
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < RowCount; row++)
{
var startIndex = rowPointers[row];
var endIndex = rowPointers[row + 1];
if (startIndex == endIndex)
{
continue;
}
for (var column = 0; column < other.ColumnCount; column++)
{
// Multiply row of matrix A on column of matrix B
other.Column(column, columnVector);
var sum = 0d;
for (var index = startIndex; index < endIndex; index++)
{
sum += values[index] * columnVector[columnIndices[index]];
}
result.At(row, column, sum);
}
}
}
void DoMultiplySparse(SparseMatrix other, SparseMatrix result)
{
result.Clear();
var ax = _storage.Values;
var ap = _storage.RowPointers;
var ai = _storage.ColumnIndices;
var bx = other._storage.Values;
var bp = other._storage.RowPointers;
var bi = other._storage.ColumnIndices;
int rows = RowCount;
int cols = other.ColumnCount;
int[] cp = result._storage.RowPointers;
var marker = new int[cols];
for (int ib = 0; ib < cols; ib++)
{
marker[ib] = -1;
}
int count = 0;
for (int i = 0; i < rows; i++)
{
// For each row of A
for (int j = ap[i]; j < ap[i + 1]; j++)
{
// Row number to be added
int a = ai[j];
for (int k = bp[a]; k < bp[a + 1]; k++)
{
int b = bi[k];
if (marker[b] != i)
{
marker[b] = i;
count++;
}
}
}
// Record non-zero count.
cp[i + 1] = count;
}
var ci = new int[count];
var cx = new double[count];
for (int ib = 0; ib < cols; ib++)
{
marker[ib] = -1;
}
count = 0;
for (int i = 0; i < rows; i++)
{
int rowStart = cp[i];
for (int j = ap[i]; j < ap[i + 1]; j++)
{
int a = ai[j];
double aEntry = ax[j];
for (int k = bp[a]; k < bp[a + 1]; k++)
{
int b = bi[k];
double bEntry = bx[k];
if (marker[b] < rowStart)
{
marker[b] = count;
ci[marker[b]] = b;
cx[marker[b]] = aEntry * bEntry;
count++;
}
else
{
cx[marker[b]] += aEntry * bEntry;
}
}
}
}
result._storage.Values = cx;
result._storage.ColumnIndices = ci;
result._storage.Normalize();
}
/// <summary>
/// Multiplies this matrix with a vector and places the results into the result vector.
/// </summary>
/// <param name="rightSide">The vector to multiply with.</param>
/// <param name="result">The result of the multiplication.</param>
protected override void DoMultiply(Vector<double> rightSide, Vector<double> result)
{
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < RowCount; row++)
{
var startIndex = rowPointers[row];
var endIndex = rowPointers[row + 1];
if (startIndex == endIndex)
{
continue;
}
var sum = 0d;
for (var index = startIndex; index < endIndex; index++)
{
sum += values[index] * rightSide[columnIndices[index]];
}
result[row] = sum;
}
}
/// <summary>
/// Multiplies this matrix with transpose of another matrix and places the results into the result matrix.
/// </summary>
/// <param name="other">The matrix to multiply with.</param>
/// <param name="result">The result of the multiplication.</param>
protected override void DoTransposeAndMultiply(Matrix<double> other, Matrix<double> result)
{
var otherSparse = other as SparseMatrix;
var resultSparse = result as SparseMatrix;
if (otherSparse == null || resultSparse == null)
{
base.DoTransposeAndMultiply(other, result);
return;
}
resultSparse.Clear();
var rowPointers = _storage.RowPointers;
var values = _storage.Values;
var otherStorage = otherSparse._storage;
for (var j = 0; j < RowCount; j++)
{
var startIndexOther = otherStorage.RowPointers[j];
var endIndexOther = otherStorage.RowPointers[j + 1];
if (startIndexOther == endIndexOther)
{
continue;
}
for (var i = 0; i < RowCount; i++)
{
var startIndexThis = rowPointers[i];
var endIndexThis = rowPointers[i + 1];
if (startIndexThis == endIndexThis)
{
continue;
}
var sum = 0d;
for (var index = startIndexOther; index < endIndexOther; index++)
{
var ind = _storage.FindItem(i, otherStorage.ColumnIndices[index]);
if (ind >= 0)
{
sum += otherStorage.Values[index]*values[ind];
}
}
resultSparse._storage.At(i, j, sum + result.At(i, j));
}
}
}
/// <summary>
/// Pointwise multiplies this matrix with another matrix and stores the result into the result matrix.
/// </summary>
/// <param name="other">The matrix to pointwise multiply with this one.</param>
/// <param name="result">The matrix to store the result of the pointwise multiplication.</param>
protected override void DoPointwiseMultiply(Matrix<double> other, Matrix<double> result)
{
result.Clear();
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var i = 0; i < RowCount; i++)
{
var endIndex = rowPointers[i + 1];
for (var j = rowPointers[i]; j < endIndex; j++)
{
var resVal = values[j]*other.At(i, columnIndices[j]);
if (resVal != 0d)
{
result.At(i, columnIndices[j], resVal);
}
}
}
}
/// <summary>
/// Pointwise divide this matrix by another matrix and stores the result into the result matrix.
/// </summary>
/// <param name="divisor">The matrix to pointwise divide this one by.</param>
/// <param name="result">The matrix to store the result of the pointwise division.</param>
protected override void DoPointwiseDivide(Matrix<double> divisor, Matrix<double> result)
{
result.Clear();
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var i = 0; i < RowCount; i++)
{
var endIndex = rowPointers[i + 1];
for (var j = rowPointers[i]; j < endIndex; j++)
{
if (values[j] != 0d)
{
result.At(i, columnIndices[j], values[j]/divisor.At(i, columnIndices[j]));
}
}
}
}
public override void KroneckerProduct(Matrix<double> other, Matrix<double> result)
{
if (other == null)
{
throw new ArgumentNullException("other");
}
if (result == null)
{
throw new ArgumentNullException("result");
}
if (result.RowCount != (RowCount*other.RowCount) || result.ColumnCount != (ColumnCount*other.ColumnCount))
{
throw DimensionsDontMatch<ArgumentOutOfRangeException>(this, other, result);
}
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var i = 0; i < RowCount; i++)
{
var endIndex = rowPointers[i + 1];
for (var j = rowPointers[i]; j < endIndex; j++)
{
if (values[j] != 0d)
{
result.SetSubMatrix(i*other.RowCount, other.RowCount, columnIndices[j]*other.ColumnCount, other.ColumnCount, values[j]*other);
}
}
}
}
/// <summary>
/// Computes the canonical modulus, where the result has the sign of the divisor,
/// for the given divisor each element of the matrix.
/// </summary>
/// <param name="divisor">The scalar denominator to use.</param>
/// <param name="result">Matrix to store the results in.</param>
protected override void DoModulus(double divisor, Matrix<double> result)
{
var sparseResult = result as SparseMatrix;
if (sparseResult == null)
{
base.DoModulus(divisor, result);
return;
}
if (!ReferenceEquals(this, result))
{
CopyTo(result);
}
var resultStorage = sparseResult._storage;
for (var index = 0; index < resultStorage.Values.Length; index++)
{
resultStorage.Values[index] = Euclid.Modulus(resultStorage.Values[index], divisor);
}
}
/// <summary>
/// Computes the remainder (% operator), where the result has the sign of the dividend,
/// for the given divisor each element of the matrix.
/// </summary>
/// <param name="divisor">The scalar denominator to use.</param>
/// <param name="result">Matrix to store the results in.</param>
protected override void DoRemainder(double divisor, Matrix<double> result)
{
var sparseResult = result as SparseMatrix;
if (sparseResult == null)
{
base.DoRemainder(divisor, result);
return;
}
if (!ReferenceEquals(this, result))
{
CopyTo(result);
}
var resultStorage = sparseResult._storage;
for (var index = 0; index < resultStorage.Values.Length; index++)
{
resultStorage.Values[index] %= divisor;
}
}
/// <summary>
/// Evaluates whether this matrix is symmetric.
/// </summary>
public override bool IsSymmetric()
{
if (RowCount != ColumnCount)
{
return false;
}
var rowPointers = _storage.RowPointers;
var columnIndices = _storage.ColumnIndices;
var values = _storage.Values;
for (var row = 0; row < RowCount; row++)
{
var start = rowPointers[row];
var end = rowPointers[row + 1];
if (start == end)
{
continue;
}
for (var index = start; index < end; index++)
{
var column = columnIndices[index];
if (!values[index].Equals(At(column, row)))
{
return false;
}
}
}
return true;
}
/// <summary>
/// Adds two matrices together and returns the results.
/// </summary>
/// <remarks>This operator will allocate new memory for the result. It will
/// choose the representation of either <paramref name="leftSide"/> or <paramref name="rightSide"/> depending on which
/// is denser.</remarks>
/// <param name="leftSide">The left matrix to add.</param>
/// <param name="rightSide">The right matrix to add.</param>
/// <returns>The result of the addition.</returns>
/// <exception cref="ArgumentOutOfRangeException">If <paramref name="leftSide"/> and <paramref name="rightSide"/> don't have the same dimensions.</exception>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> or <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator +(SparseMatrix leftSide, SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
if (leftSide.RowCount != rightSide.RowCount || leftSide.ColumnCount != rightSide.ColumnCount)
{
throw DimensionsDontMatch<ArgumentOutOfRangeException>(leftSide, rightSide);
}
return (SparseMatrix)leftSide.Add(rightSide);
}
/// <summary>
/// Returns a <strong>Matrix</strong> containing the same values of <paramref name="rightSide"/>.
/// </summary>
/// <param name="rightSide">The matrix to get the values from.</param>
/// <returns>A matrix containing a the same values as <paramref name="rightSide"/>.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator +(SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
return (SparseMatrix)rightSide.Clone();
}
/// <summary>
/// Subtracts two matrices together and returns the results.
/// </summary>
/// <remarks>This operator will allocate new memory for the result. It will
/// choose the representation of either <paramref name="leftSide"/> or <paramref name="rightSide"/> depending on which
/// is denser.</remarks>
/// <param name="leftSide">The left matrix to subtract.</param>
/// <param name="rightSide">The right matrix to subtract.</param>
/// <returns>The result of the addition.</returns>
/// <exception cref="ArgumentOutOfRangeException">If <paramref name="leftSide"/> and <paramref name="rightSide"/> don't have the same dimensions.</exception>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> or <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator -(SparseMatrix leftSide, SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
if (leftSide.RowCount != rightSide.RowCount || leftSide.ColumnCount != rightSide.ColumnCount)
{
throw DimensionsDontMatch<ArgumentException>(leftSide, rightSide);
}
return (SparseMatrix)leftSide.Subtract(rightSide);
}
/// <summary>
/// Negates each element of the matrix.
/// </summary>
/// <param name="rightSide">The matrix to negate.</param>
/// <returns>A matrix containing the negated values.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator -(SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
return (SparseMatrix)rightSide.Negate();
}
/// <summary>
/// Multiplies a <strong>Matrix</strong> by a constant and returns the result.
/// </summary>
/// <param name="leftSide">The matrix to multiply.</param>
/// <param name="rightSide">The constant to multiply the matrix by.</param>
/// <returns>The result of the multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator *(SparseMatrix leftSide, double rightSide)
{
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
return (SparseMatrix)leftSide.Multiply(rightSide);
}
/// <summary>
/// Multiplies a <strong>Matrix</strong> by a constant and returns the result.
/// </summary>
/// <param name="leftSide">The matrix to multiply.</param>
/// <param name="rightSide">The constant to multiply the matrix by.</param>
/// <returns>The result of the multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator *(double leftSide, SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
return (SparseMatrix)rightSide.Multiply(leftSide);
}
/// <summary>
/// Multiplies two matrices.
/// </summary>
/// <remarks>This operator will allocate new memory for the result. It will
/// choose the representation of either <paramref name="leftSide"/> or <paramref name="rightSide"/> depending on which
/// is denser.</remarks>
/// <param name="leftSide">The left matrix to multiply.</param>
/// <param name="rightSide">The right matrix to multiply.</param>
/// <returns>The result of multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> or <paramref name="rightSide"/> is <see langword="null" />.</exception>
/// <exception cref="ArgumentException">If the dimensions of <paramref name="leftSide"/> or <paramref name="rightSide"/> don't conform.</exception>
public static SparseMatrix operator *(SparseMatrix leftSide, SparseMatrix rightSide)
{
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
if (leftSide.ColumnCount != rightSide.RowCount)
{
throw DimensionsDontMatch<ArgumentException>(leftSide, rightSide);
}
return (SparseMatrix)leftSide.Multiply(rightSide);
}
/// <summary>
/// Multiplies a <strong>Matrix</strong> and a Vector.
/// </summary>
/// <param name="leftSide">The matrix to multiply.</param>
/// <param name="rightSide">The vector to multiply.</param>
/// <returns>The result of multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> or <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseVector operator *(SparseMatrix leftSide, SparseVector rightSide)
{
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
return (SparseVector)leftSide.Multiply(rightSide);
}
/// <summary>
/// Multiplies a Vector and a <strong>Matrix</strong>.
/// </summary>
/// <param name="leftSide">The vector to multiply.</param>
/// <param name="rightSide">The matrix to multiply.</param>
/// <returns>The result of multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> or <paramref name="rightSide"/> is <see langword="null" />.</exception>
public static SparseVector operator *(SparseVector leftSide, SparseMatrix rightSide)
{
if (rightSide == null)
{
throw new ArgumentNullException("rightSide");
}
return (SparseVector)rightSide.LeftMultiply(leftSide);
}
/// <summary>
/// Multiplies a <strong>Matrix</strong> by a constant and returns the result.
/// </summary>
/// <param name="leftSide">The matrix to multiply.</param>
/// <param name="rightSide">The constant to multiply the matrix by.</param>
/// <returns>The result of the multiplication.</returns>
/// <exception cref="ArgumentNullException">If <paramref name="leftSide"/> is <see langword="null" />.</exception>
public static SparseMatrix operator %(SparseMatrix leftSide, double rightSide)
{
if (leftSide == null)
{
throw new ArgumentNullException("leftSide");
}
return (SparseMatrix)leftSide.Remainder(rightSide);
}
public override string ToTypeString()
{
return string.Format("SparseMatrix {0}x{1}-Double {2:P2} Filled", RowCount, ColumnCount, NonZerosCount / (RowCount * (double)ColumnCount));
}
}
}
| grovesNL/mathnet-numerics | src/Numerics/LinearAlgebra/Double/SparseMatrix.cs | C# | mit | 60,299 |
using System;
using System.Reflection;
class Program {
static void PrintFieldValues (FieldInfo[] fields, object obj) {
foreach (FieldInfo field in fields) {
// not printing field names because automatic property backing field names differ between .NET and JSIL
Console.WriteLine(field.GetValue(obj));
}
}
static void AssertThrows (Action action) {
try {
action();
Console.WriteLine("Not OK: exception was not thrown");
} catch (Exception) {
Console.WriteLine("OK: exception was thrown");
}
}
public static void Main () {
BindingFlags all = BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static;
BindingFlags allStatic = all ^ BindingFlags.Instance;
PrintFieldValues(typeof(MyStruct).GetFields(all), new MyStruct(1, 2, "3"));
PrintFieldValues(typeof(MyEnum).GetFields(allStatic), null);
PrintFieldValues(typeof(MyClass).GetFields(all), new MyClass());
PrintFieldValues(typeof(MyClass).GetFields(all), new MySubclass());
PrintFieldValues(typeof(MyClass).GetFields(allStatic), null);
AssertThrows(() => PrintFieldValues(typeof(MyClass).GetFields(all), null));
AssertThrows(() => PrintFieldValues(typeof(MyStruct).GetFields(all), new MyClass()));
}
}
struct MyStruct {
public int Field1;
public long Field2;
public string Field3;
public MyStruct (byte field1, int field2, string field3) {
Field1 = field1;
Field2 = field2;
Field3 = field3;
}
}
enum MyEnum {
A = 3,
B = 5,
C = 7
}
class MyClass {
public int Field1 = 4;
public long Field2 = 8;
public string Field3 = "15";
public static uint StaticField1 = 16;
public static ulong StaticField2 = 23;
public static string AutomaticProperty1 { get; set; }
static MyClass() {
AutomaticProperty1 = "42";
}
}
class MySubclass : MyClass {
}
| TukekeSoft/JSIL | Tests/ReflectionTestCases/FieldGetValue.cs | C# | mit | 2,066 |