text stringlengths 2 1.04M | meta dict |
|---|---|
/*
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public
License as published by the Free Software Foundation; either
version 2 of the license, or (at your option) any later version.
*/
package org.gjt.jclasslib.structures.elementvalues;
import org.gjt.jclasslib.structures.InvalidByteCodeException;
import java.io.*;
/**
* Describes an <tt>ArrayElementValue</tt> attribute structure.
*
* @author <a href="mailto:vitor.carreira@gmail.com">Vitor Carreira</a>
* @version $Revision: 1.1 $ $Date: 2005/11/01 13:18:24 $
*/
public class ArrayElementValue extends ElementValue {
public final static String ENTRY_NAME = "ArrayElement";
private static final int INITIAL_LENGTH = 2;
private ElementValue[] elementValueEntries;
protected ArrayElementValue() {
super(ARRAY_TAG);
}
/**
* Get the list of element values associations of the this array
* element value entry.
*
* @return the array
*/
public ElementValue[] getElementValueEntries() {
return this.elementValueEntries;
}
/**
* Set the list of element values associations of this array
* element value entry.
*
* @param elementValueEntries the array
*/
public void setConstValueIndex(ElementValue[] elementValueEntries) {
this.elementValueEntries = elementValueEntries;
}
protected int getSpecificLength() {
int length = INITIAL_LENGTH;
for (int i = 0; i < elementValueEntries.length; i++) {
length += elementValueEntries[i].getLength();
}
return length;
}
public void read(DataInput in) throws InvalidByteCodeException, IOException {
super.read(in);
int elementValueEntriesLength = in.readUnsignedShort();
elementValueEntries = new ElementValue[elementValueEntriesLength];
for (int i = 0; i < elementValueEntries.length; i++) {
elementValueEntries[i] = ElementValue.create(in, classFile);
}
if (debug) debug("read ");
}
public void write(DataOutput out) throws InvalidByteCodeException, IOException {
super.write(out);
int elementValueEntriesLength = getLength(elementValueEntries);
out.writeShort(elementValueEntriesLength);
for (int i = 0; i < elementValueEntriesLength; i++) {
elementValueEntries[i].write(out);
}
if (debug) debug("wrote ");
}
protected void debug(String message) {
super.debug(message +
"ArrayElementValue with " +
getLength(elementValueEntries) + " entries");
}
public String getEntryName() {
return ENTRY_NAME;
}
}
| {
"content_hash": "61275c9a8b1fa375291d40b7a2910663",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 84,
"avg_line_length": 30.340425531914892,
"alnum_prop": 0.6339410939691444,
"repo_name": "seanzwx/tmp",
"id": "c21c3ee4fbea03b7c9b59e9c16046d4a17a13677",
"size": "2852",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "bytecode_editor/src/main/java/org/gjt/jclasslib/structures/elementvalues/ArrayElementValue.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "461"
},
{
"name": "CSS",
"bytes": "47056"
},
{
"name": "Java",
"bytes": "5879803"
},
{
"name": "JavaScript",
"bytes": "1717562"
},
{
"name": "Shell",
"bytes": "1563"
}
],
"symlink_target": ""
} |
var map = new maptalks.Map('map', {
center: [-0.113049,51.498568],
zoom: 14,
baseLayer: new maptalks.TileLayer('base', {
urlTemplate: '$(urlTemplate)',
subdomains: $(subdomains),
attribution: '$(attribution)'
})
});
var textPanel = new maptalks.control.Panel({
'position' : 'top-right',
'draggable' : true,
'custom' : false,
'content' : 'A draggable text panel.',
'closeButton' : true
});
map.addControl(textPanel);
var customPanel = new maptalks.control.Panel({
'position' : 'bottom-right',
'draggable' : true,
'custom' : true,
'content' : '<div class="content">' +
'A custom panel.<br>' +
'<input type="text" height=10 value="a text input"/><br>' +
'<br><a href="javascript:;" onclick="hide()">close</a>' +
'</div>'
});
map.addControl(customPanel);
function hide() {
customPanel.hide();
}
| {
"content_hash": "e3f3a20cc6142ea541c1779ae8c36f68",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 63,
"avg_line_length": 26.352941176470587,
"alnum_prop": 0.5870535714285714,
"repo_name": "maptalks/examples",
"id": "9cc6eea39382b149af8573ae70fb020163b50fd8",
"size": "896",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/ui-control/control-panel/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "63274"
},
{
"name": "Go",
"bytes": "1792"
},
{
"name": "HTML",
"bytes": "15354"
},
{
"name": "Handlebars",
"bytes": "7339"
},
{
"name": "JavaScript",
"bytes": "288366"
}
],
"symlink_target": ""
} |
import java.awt.Color;
import static org.junit.Assert.*;
import org.junit.Test;
/*
* This testing framework provides basic level tests for
* each of the methods, however additional testing will be
* required, along with extensive testing of ALL helper methods
* that you write.
*/
public class PictureECTest_ChromaKey {
/*
* Validate that chromaKey works and does not modify the
* original Picture object.
*/
@Test
public void testChromaKey_Logos()
{
Picture pic = Picture.loadPicture("Logos.bmp");
Picture bg = Picture.loadPicture("Creek.bmp");
Picture picCopy = new Picture(pic);
Picture picCorrect = Picture.loadPicture("Logos_chromaKeyCreek.bmp");
Picture picTest = pic.chromaKey(118, 54, bg, 30);
assertTrue(pic.equals(picCopy));
assertTrue(picCorrect.equals(picTest));
}
/*
* Validate that chromaKey works and does not modify the
* original Picture object.
*/
@Test
public void testChromaKey_Maria()
{
Picture pic = Picture.loadPicture("Maria1.bmp");
Picture bg = Picture.loadPicture("HMC.bmp");
Picture picCopy = new Picture(pic);
Picture picCorrect = Picture.loadPicture("Maria1_ChromaKeyHMC.bmp");
Picture picTest = pic.chromaKey(118, 54, bg, 30);
assertTrue(pic.equals(picCopy));
assertTrue(picCorrect.equals(picTest));
}
} | {
"content_hash": "9ea0ebf7bf272833e3111181f88d1c89",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 74,
"avg_line_length": 31.068181818181817,
"alnum_prop": 0.6891002194586686,
"repo_name": "CSGreater-Developers/HMC-Grader",
"id": "8ac19a72129ea50495997705e8662571a1cb0e30",
"size": "1367",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "TestFiles/CS60Tests/hw7/PictureECTest_ChromaKey.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "99385"
},
{
"name": "HTML",
"bytes": "680842"
},
{
"name": "Java",
"bytes": "680013"
},
{
"name": "JavaScript",
"bytes": "163008"
},
{
"name": "Makefile",
"bytes": "4561"
},
{
"name": "Prolog",
"bytes": "23824"
},
{
"name": "Python",
"bytes": "624020"
},
{
"name": "Racket",
"bytes": "42680"
},
{
"name": "Shell",
"bytes": "1029"
}
],
"symlink_target": ""
} |
/* generate by x-do-view */
const DashboardIndex = () => import('src/pages/dashboard/Index');
const routerList = [];
routerList.push({
path: 'index',
component: DashboardIndex,
meta: {
title: '',
},
name: 'dashboardIndex',
hidden: false,
});
export default routerList; | {
"content_hash": "c85a282e1b96488e178eca017d02a798",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 65,
"avg_line_length": 17.9375,
"alnum_prop": 0.6480836236933798,
"repo_name": "huixisheng/x-scaffold-vue-webpack",
"id": "bd504ece01ea006a182fda537dbb88d9a406dbe9",
"size": "287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "template/src/routers/children/dashboard.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "52"
},
{
"name": "HTML",
"bytes": "1219"
},
{
"name": "JavaScript",
"bytes": "26952"
},
{
"name": "Vue",
"bytes": "4881"
}
],
"symlink_target": ""
} |
function confirmGameOver(score){
var r = confirm("Game Over.\nYour score: " + score + ". Play again?");
if (r==true){
location.href="game.html";
}
else{
location.href="index.html";
}
}
enchant();
var width = screen.width + 15;
var height = screen.height;
var game = new Core(width, height);
game.preload('images/player.png', 'images/attack.png', 'images/enemy.png', 'images/ally.png');
game.fps = 20;
game.onload = function(){
var Player = enchant.Class.create(enchant.Sprite, {
initialize: function(){
enchant.Sprite.call(this, 50, 61);
this.image = game.assets['images/player.png'];
this.frame = 5;
game.rootScene.addChild(this);
}
});
var Attack = enchant.Class.create(enchant.Sprite, {
initialize: function(){
enchant.Sprite.call(this, 8, 8);
this.image = game.assets['images/attack.png'];
this.moveTo(player.x + 18, 60);
this.tl.moveBy(0, height, 30);
this.frame = 15;
game.rootScene.addChild(this);
}
});
var Enemy = enchant.Class.create(enchant.Sprite, {
initialize: function(){
enchant.Sprite.call(this, 35, 30);
this.image = game.assets['images/enemy.png'];
this.moveTo(Math.floor(Math.random() * width), height);
this.scaleX = -1;
this.tl.moveBy(0, -height-height/8, height * ((Math.random()+50) % 0.9));
game.rootScene.addChild(this);
}
});
var Ally = enchant.Class.create(enchant.Sprite, {
initialize: function(){
enchant.Sprite.call(this, 24, 29);
this.image = game.assets['images/ally.png'];
this.moveTo(Math.floor(Math.random() * width), height);
this.scaleX = -1;
this.tl.moveBy(0, -height-height/8, height * ((Math.random()+50) % 0.9));
game.rootScene.addChild(this);
}
});
var player = new Player();
game.rootScene.tl.then(function() {
var enemy = new Enemy();
if (paused)
game.pause();
}).delay(Math.random()).loop();
game.rootScene.tl.then(function() {
var ally = new Ally();
if (paused){
game.pause();
}
}).delay(Math.random()*22).loop();
game.rootScene.on('touchstart', function(evt){
player.x = evt.localX;
var attack = new Attack();
});
game.rootScene.on('touchmove', function(evt){
player.x = evt.localX;
});
game.rootScene.on('enterframe', function(){
var hits = Attack.intersect(Enemy);
for(var i = 0, len = hits.length; i < len; i++){
game.rootScene.removeChild(hits[i][0]);
game.rootScene.removeChild(hits[i][1]);
score++;
document.getElementById('score').innerHTML = "Score: " + score;
}
});
game.rootScene.on('enterframe', function(){
var hitsAlly = Attack.intersect(Ally);
for(var i = 0, len = hitsAlly.length; i < len; i++){
game.rootScene.removeChild(hitsAlly[i][0]);
game.rootScene.removeChild(hitsAlly[i][1]);
confirmGameOver(score);
game.stop();
}
});
setInterval(function(){
if(paused == false){
if(timeS > 0){
timeS--;
document.getElementById('time').innerHTML = timeS + "s left";
}
}
else{
clearTimeout(timetoOver);
}
}, 1000);
timetoOver = setTimeout(function(){
confirmGameOver(score);
game.stop();
}, 60000);
};
game.start(); | {
"content_hash": "044999437ea74dcae228977a400a83a7",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 94,
"avg_line_length": 25.669354838709676,
"alnum_prop": 0.6226830034558593,
"repo_name": "bigmarcolino/azureus",
"id": "6479a8bd86c4dc53d86f76535369e471a20fa933",
"size": "3183",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "js/game.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4048"
},
{
"name": "HTML",
"bytes": "2364"
},
{
"name": "JavaScript",
"bytes": "210530"
}
],
"symlink_target": ""
} |
package fr.xebia.demo.amazon.aws;
import java.io.IOException;
import java.io.PrintWriter;
import java.math.BigInteger;
import java.security.InvalidKeyException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.SecureRandom;
import java.security.Security;
import java.security.SignatureException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.Date;
import javax.security.auth.x500.X500Principal;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openssl.PEMWriter;
import org.bouncycastle.x509.X509V1CertificateGenerator;
import com.google.common.base.Strings;
/**
* Demo of a generation of a X509 Self Signed Certificate using <a
* href="http://www.bouncycastle.org/">Bouncy Castle</a> library.
*
* @author <a href="mailto:cyrille@cyrilleleclerc.com">Cyrille Le Clerc</a>
*/
public class SelfSignedX509CertificateGeneratorDemo {
static {
// adds the Bouncy castle provider to java security
Security.addProvider(new BouncyCastleProvider());
}
/**
* <p>
* Generate a self signed X509 certificate .
* </p>
* <p>
* TODO : do the same with
* {@link org.bouncycastle.cert.X509v1CertificateBuilder} instead of the
* deprecated {@link org.bouncycastle.x509.X509V1CertificateGenerator}.
* </p>
*/
@SuppressWarnings("deprecation")
static void generateSelfSignedX509Certificate() throws NoSuchAlgorithmException, NoSuchProviderException, CertificateEncodingException,
SignatureException, InvalidKeyException, IOException {
// yesterday
Date validityBeginDate = new Date(System.currentTimeMillis() - 24 * 60 * 60 * 1000);
// in 2 years
Date validityEndDate = new Date(System.currentTimeMillis() + 2 * 365 * 24 * 60 * 60 * 1000);
// GENERATE THE PUBLIC/PRIVATE RSA KEY PAIR
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA", "BC");
keyPairGenerator.initialize(1024, new SecureRandom());
KeyPair keyPair = keyPairGenerator.generateKeyPair();
// GENERATE THE X509 CERTIFICATE
X509V1CertificateGenerator certGen = new X509V1CertificateGenerator();
X500Principal dnName = new X500Principal("CN=John Doe");
certGen.setSerialNumber(BigInteger.valueOf(System.currentTimeMillis()));
certGen.setSubjectDN(dnName);
certGen.setIssuerDN(dnName); // use the same
certGen.setNotBefore(validityBeginDate);
certGen.setNotAfter(validityEndDate);
certGen.setPublicKey(keyPair.getPublic());
certGen.setSignatureAlgorithm("SHA256WithRSAEncryption");
X509Certificate cert = certGen.generate(keyPair.getPrivate(), "BC");
// DUMP CERTIFICATE AND KEY PAIR
System.out.println(Strings.repeat("=", 80));
System.out.println("CERTIFICATE TO_STRING");
System.out.println(Strings.repeat("=", 80));
System.out.println();
System.out.println(cert);
System.out.println();
System.out.println(Strings.repeat("=", 80));
System.out.println("CERTIFICATE PEM (to store in a cert-johndoe.pem file)");
System.out.println(Strings.repeat("=", 80));
System.out.println();
PEMWriter pemWriter = new PEMWriter(new PrintWriter(System.out));
pemWriter.writeObject(cert);
pemWriter.flush();
System.out.println();
System.out.println(Strings.repeat("=", 80));
System.out.println("PRIVATE KEY PEM (to store in a priv-johndoe.pem file)");
System.out.println(Strings.repeat("=", 80));
System.out.println();
pemWriter.writeObject(keyPair.getPrivate());
pemWriter.flush();
System.out.println();
}
public static void main(String[] args) {
try {
generateSelfSignedX509Certificate();
} catch (Exception e) {
e.printStackTrace();
}
}
}
| {
"content_hash": "c1a646547949933ca04ff87a26447e6e",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 139,
"avg_line_length": 36.50442477876106,
"alnum_prop": 0.6918787878787879,
"repo_name": "xebia-france/xebia-cloudcomputing-extras",
"id": "ef19fb370f5cb939c11c6c67805387035d20d3d4",
"size": "4755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/fr/xebia/demo/amazon/aws/SelfSignedX509CertificateGeneratorDemo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "408032"
},
{
"name": "Python",
"bytes": "9011"
},
{
"name": "Shell",
"bytes": "2008"
}
],
"symlink_target": ""
} |
package texteditor;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.util.ArrayList;
import javax.swing.*;
import javax.swing.text.StyleConstants;
public class GUI {
private Frame mainframe;
private JMenuBar menuBar;
private JPanel mainPanel, topPanel, centerPanel;
private Tabs tabs;
private ArrayList<Button> buttons;
private OpenedFilesFromLastSession openedFilesFromLastSession;
private ClosedFiles closedFiles;
private ArrayList<MenuItem> operationHolder;
//Bugs...
//The Exit button in the File menu does not call windowClosing(...) in the Frame window listener...
//The Style Changers do not toggle like... if what you select is already bold it does not unbold it...
//Does not saved the format of the text
//Bugs fixed:
//make the button wrap around off of screen Found code online see WrapLayout.java
//The Exit button in the File menu does not call windowClosing(...) in the Frame window listener...
// No duplication now, calls a onWindowClosing(...)
//Duplicated code in:
//ColoredButton in actionPerformed(...) AND ChangeSelectedText in actionPerformed(...)
//Add:
//change selected font style, font size,
//Testing:
//caret listener on text area
public GUI() {
mainPanel = new JPanel(new BorderLayout());
menuBar = new JMenuBar();
topPanel = new JPanel(new WrapLayout(FlowLayout.LEFT));//new FlowLayout(FlowLayout.LEFT));
centerPanel = new JPanel(new BorderLayout());
closedFiles = new ClosedFiles();
tabs = new Tabs(closedFiles);
centerPanel.add(tabs.getTabbedPane(), BorderLayout.CENTER);
openedFilesFromLastSession = new OpenedFilesFromLastSession(tabs);
buttons = new ArrayList<>();
operationHolder = new ArrayList<>();
mainframe = new Frame("Text Processor", 1360, 600, false, mainPanel, openedFilesFromLastSession, tabs);
createOperations();
addMenuItems();
mainframe.setJMenuBar(menuBar);
addButtons();
//mainframe.setgl
mainPanel.add(topPanel, BorderLayout.NORTH);
mainPanel.add(centerPanel, BorderLayout.CENTER);
}
private void createOperations() {
String fileChooserStartDirectory = ".";
operationHolder.add(new MenuItem("Open", KeyEvent.VK_O, true, new OpenFileOperation(tabs, fileChooserStartDirectory, mainframe)));
operationHolder.add(new MenuItem("Save", KeyEvent.VK_S, false, new SaveOperation(tabs, fileChooserStartDirectory, mainframe)));
operationHolder.add(new MenuItem("Save As", KeyEvent.VK_A, true, new SaveAsOperation(tabs, fileChooserStartDirectory, mainframe)));
operationHolder.add(new MenuItem("Rename", KeyEvent.VK_R, true, new RenameFileOperation(tabs)));
operationHolder.add(new MenuItem("New Tab", KeyEvent.VK_T, false, new AddNewTabOperation(tabs)));
operationHolder.add(new MenuItem("Close Tab", KeyEvent.VK_C, false, new CloseCurrentTabOperation(tabs)));
operationHolder.add(new MenuItem("Close All Tabs", KeyEvent.VK_A, true, new CloseAllTabsOperation(tabs)));
operationHolder.add(new MenuItem("Open Last File", KeyEvent.VK_L, true, new OpenLastFileClosed(tabs, closedFiles)));
//operationHolder.add(new MenuItem("Exit", KeyEvent.VK_E, false, new ExitOperation(mainframe)));
operationHolder.add(new MenuItem("<html><b>B</b></html>", KeyEvent.VK_B, false, new ChangeSelectedText(tabs, new TextToBold())));
operationHolder.add(new MenuItem("<html><i>I</i></html>", KeyEvent.VK_I, false, new ChangeSelectedText(tabs, new TextToItalics())));
operationHolder.add(new MenuItem("<html><u>U</u></html>", KeyEvent.VK_U, false, new ChangeSelectedText(tabs, new TextToUnderline())));
operationHolder.add(new MenuItem("<html><strike>S</strike></html>", KeyEvent.VK_S, true, new ChangeSelectedText(tabs, new TextToStrikeThrough())));
//strike is depreciated should use del but it does not work in java
operationHolder.add(new MenuItem("Align Left", KeyEvent.VK_L, false, new ChangeTextAlignmentOperation(tabs, StyleConstants.ALIGN_LEFT)));
operationHolder.add(new MenuItem("Align Center", KeyEvent.VK_C, false, new ChangeTextAlignmentOperation(tabs, StyleConstants.ALIGN_CENTER)));
operationHolder.add(new MenuItem("Align Right", KeyEvent.VK_R, true, new ChangeTextAlignmentOperation(tabs, StyleConstants.ALIGN_RIGHT)));
//operationHolders.add(new MenuItem("Foreground Color", false, new ChangeTextColorOperation(new ColoredButton(topPanel, Color.BLACK, tabs, new ChangeForegroundColor()))));
//operationHolders.add(new MenuItem("Background Color", false, new ChangeTextColorOperation(new ColoredButton(topPanel, Color.WHITE, tabs, new ChangeForegroundColor()))));
}
private void addButtons() {
for (MenuItem item : operationHolder) {
buttons.add(new Button(item.getName(), topPanel, item.getActionEventOperation()));
}
buttons.add(new Button("FG Color", topPanel, new ChangeTextColorOperation(new ColoredButton(topPanel, Color.BLACK, tabs, new ChangeForegroundColor()))));
buttons.add(new Button("BG Color", topPanel, new ChangeTextColorOperation(new ColoredButton(topPanel, Color.WHITE, tabs, new ChangeBackgroundColor()))));
}
private void addMenuItems() {
//going to need cut, copy, paste, rename... etc.
ArrayList<MenuItem> menuItems = new ArrayList<>();
final int NEXT_MENU_START = 7; //which is starting at the B (bold action)
int i;
for (i = 0; i < operationHolder.size() - NEXT_MENU_START; i++) {
menuItems.add(operationHolder.get(i));
}
menuItems.add(new MenuItem("Exit", KeyEvent.VK_E, false, new ExitOperation(mainframe)));
new Menu("File", KeyEvent.VK_F, menuItems, menuBar);
menuItems.clear();
for ( ; i < operationHolder.size(); i++) {
menuItems.add(operationHolder.get(i));
}
new Menu("Edit", KeyEvent.VK_E, menuItems, menuBar);
menuItems.clear();
}
}
//public class GUI {
//
// private JPanel gui;
// private JTextArea textArea;
// private JButton open, save;
//
// public static void main(String[] args) {
// new GUI();
// }
//
// public GUI() {
// JFrame guiFrame = new JFrame();
// init(guiFrame, "Text Editor", new Dimension(400, 300));
// addComponentsTo(guiFrame);
//
// }
//
// public void init(JFrame frame, String title, Dimension dim) {
// frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
// frame.setTitle(title);
// frame.setSize(dim);
// frame.setLocationRelativeTo(null);
// frame.setVisible(true);
// }
//
// public void addComponentsTo(JFrame frame) {
// textArea = new JTextArea();
// open = createButton("Load");
// open.addActionListener(new OpenFile());
// save = createButton("Save");
// save.addActionListener(new SaveFile());
// gui = new JPanel(new BorderLayout());
// frame.add(gui);
//
//
//// JTabbedPane tabs = new JTabbedPane();
//// tabs.addTab("Tab1", null);
//// tabs.addTab("Tab2", null);
//// gui.add(tabs, BorderLayout.SOUTH);
//
//
// JPanel topPanel = new JPanel();
// gui.add(topPanel, BorderLayout.NORTH);
// topPanel.add(open);
// topPanel.add(save);
//
// JScrollPane scrollBar = new JScrollPane(textArea, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
// JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
// gui.add(scrollBar, BorderLayout.CENTER);
//
//
// }
//
// public JLabel createLabel(String name) {
// return new JLabel(name);
// }
//
// private JButton createButton(String name) {
// return new JButton(name);
// }
//
// private void addComp(JPanel thePanel, JComponent comp, int xPos, int yPos, int compWidth, int compHeight, int place, int stretch){
//
// GridBagConstraints gridConstraints = new GridBagConstraints();
//
// gridConstraints.gridx = xPos;
// gridConstraints.gridy = yPos;
// gridConstraints.gridwidth = compWidth;
// gridConstraints.gridheight = compHeight;
// gridConstraints.weightx = 100;
// gridConstraints.weighty = 100;
// gridConstraints.insets = new Insets(5,5,5,5);
// gridConstraints.anchor = place;
// gridConstraints.fill = stretch;
//
// thePanel.add(comp, gridConstraints);
//
// }
//}
//
//class OpenFile implements ActionListener {
// private final JFileChooser fc = new JFileChooser();
// @Override
// public void actionPerformed(ActionEvent e) {
// int returnVal = fc.showOpenDialog();
// //if (e.getSource() == openButton) {
// int returnVal = fc.showOpenDialog(FileChooserDemo.this);
//
// if (returnVal == JFileChooser.APPROVE_OPTION) {
// File file = fc.getSelectedFile();
// //This is where a real application would open the file.
// //log.append("Opening: " + file.getName() + "." + newline);
// } else {
// //log.append("Open command cancelled by user." + newline);
// }
// //}
// }
//
//}
//
//class SaveFile implements ActionListener {
//
// @Override
// public void actionPerformed(ActionEvent e) {
//
// }
//
//}
//JPanel gui = new JPanel(new GridBagLayout());
//frame.add(gui);
//
//addComp(gui, new JLabel("File: "), 0, 0, 1, 1, GridBagConstraints.EAST, GridBagConstraints.NONE);
//
//JComboBox files = new JComboBox();
//files.setEditable(true);
//files.setPreferredSize(new Dimension(300, files.getPreferredSize().height));
//addComp(gui, files, 1, 0, 2, 1, GridBagConstraints.WEST, GridBagConstraints.NONE);
//JPanel GUI = new JPanel();
//JPanel mainPanel = new JPanel(new GridBagLayout());
//GridBagConstraints constraints = new GridBagConstraints();
//GUI.add(mainPanel);
//frame.setContentPane(GUI);
//
//constraints.gridx = 0;
//constraints.gridy = 0;
//constraints.fill = GridBagConstraints.NONE;
//mainPanel.add(createLabel("File: "), constraints);
//constraints.gridx = 1;
////constraints.gridwidth = 3;
//JComboBox input = new JComboBox();
//input.setEditable(true);
//mainPanel.add(input);
//JPanel panel = new JPanel(new BorderLayout());
//frame.add(panel);
//JPanel topPanel = new JPanel();
//JLabel labels[] = new JLabel[1];
//for (JLabel label : labels) {
// label = new JLabel("File: ");
// topPanel.add(label, BorderLayout.WEST);
//}
//
//JComboBox usedFiles = new JComboBox();
//usedFiles.setEditable(true);
////usedFiles.setSize(d);
//topPanel.add(usedFiles, BorderLayout.EAST);
//panel.add(topPanel, BorderLayout.WEST);
//
//JPanel bottomPanel = new JPanel();
//JButton convertButton = new JButton("Convert");
//convertButton.addActionListener(new ActionListener() {
//
// @Override
// public void actionPerformed(ActionEvent e) {
// System.out.println(e);
// //get the string in usedFiles...
// //convert the file
// }
//
//});
//bottomPanel.add(convertButton);
//panel.add(bottomPanel, BorderLayout.SOUTH);
//JPanel panel = new JPanel(new GridBagLayout());
//GridBagConstraints gbc = new GridBagConstraints();
//frame.add(panel);
//gbc.anchor = gbc.NORTHEAST;
//JPanel topPanel = new JPanel();
//JLabel labels[] = new JLabel[1];
//for (JLabel label : labels) {
// label = new JLabel("File: ");
// topPanel.add(label);
//}
////gbc.fill = gbc.HORIZONTAL;
//JComboBox usedFiles = new JComboBox();
//usedFiles.setEditable(true);
//topPanel.add(usedFiles);
//panel.add(topPanel, gbc);
//
//
////JPanel bottomPanel = new JPanel();
////JButton convertButton = new JButton("Convert");
////bottomPanel.add(convertButton);
////panel.add(bottomPanel, gbc); | {
"content_hash": "4053db054c7c7acff13d0667732e38d1",
"timestamp": "",
"source": "github",
"line_count": 329,
"max_line_length": 179,
"avg_line_length": 36.98176291793313,
"alnum_prop": 0.6455987507191584,
"repo_name": "chrisstopher/TextEditor",
"id": "00df724bbcb31a4556eeaf0c32e3fac8c9b89bda",
"size": "12167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/texteditor/GUI.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "56190"
}
],
"symlink_target": ""
} |
package com.twitter.finagle.thrift
/**
* OutputBuffers are convenient ways of getting at TProtocols for
* output to byte arrays
*/
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TMemoryBuffer
import org.apache.thrift.TBase
private[thrift] object OutputBuffer {
private[thrift] val protocolFactory = new TBinaryProtocol.Factory()
def messageToArray(message: TBase[_, _]) = {
val buffer = new OutputBuffer
message.write(buffer())
buffer.toArray
}
}
private[thrift] class OutputBuffer {
import OutputBuffer._
private[this] val memoryBuffer = new TMemoryBuffer(512)
private[this] val oprot = protocolFactory.getProtocol(memoryBuffer)
def apply() = oprot
def toArray = {
oprot.getTransport().flush()
java.util.Arrays.copyOfRange(
memoryBuffer.getArray(), 0, memoryBuffer.length())
}
}
| {
"content_hash": "2bf099d149db226b990ec5ec1460235e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 69,
"avg_line_length": 24.27777777777778,
"alnum_prop": 0.7379862700228833,
"repo_name": "enachb/finagle_2.9_durgh",
"id": "a654e6f114187d92e39868e11c99c9d7ddad00cf",
"size": "874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "finagle-thrift/src/main/scala/com/twitter/finagle/thrift/OutputBuffer.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "496384"
},
{
"name": "Ruby",
"bytes": "8081"
},
{
"name": "Scala",
"bytes": "839869"
}
],
"symlink_target": ""
} |
<?php
// include configurations
include "config.inc.php";
/* Get data */
if (isset($_GET['token']))
$token = $_GET['token'];
else {
$json = array("status" => 0, "msg" => "Error, token required!");
goto output;
}
if (isset($_GET['post']))
$post = $_GET['post'];
else {
$json = array("status" => 0, "msg" => "Error, post required!");
goto output;
}
/* Identifying user */
$sql = "SELECT password from users where password='$token'";
$res = $mysqli->query($sql);
if ($res) {
if ($res->num_rows == 0)// check that the token is valid
{
$json = array("status" => 0, "msg" => "token wrong");
goto output;
}
} else // problem with database
{
$json = array("status" => 0, "msg" => "Error checking token!", "details" => $mysqli->error);
goto output;
}
$sql = "SELECT * FROM comments where post_id=$post";
$res = $mysqli->query($sql);
if ($res) {
if ($res->num_rows > 0) {
while ($row = $res->fetch_assoc()) {
$rows[] = $row;
}
$json = array("status" => 1, "msg" => "comments found !", "comments" => $rows);
} else {
$json = array("status" => 1, "msg" => "no comments to be found!", "comments" => array());
}
} else {
$json = array("status" => 0, "msg" => "Error getting comments!", "details" => ($mysqli->error));
}
/* Output header */
output:
header('Content-type: application/json');
echo json_encode($json);
| {
"content_hash": "ee128a4a301bd46bfb09c4efb277b91d",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 100,
"avg_line_length": 24.67241379310345,
"alnum_prop": 0.5422781271837875,
"repo_name": "BOUAZIZamrou/mabinetnech-ws",
"id": "da82ea41f809226801f107da47a253e9d1f5ed0a",
"size": "1431",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "getpostcomments.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "89"
},
{
"name": "PHP",
"bytes": "9382"
}
],
"symlink_target": ""
} |
package Google::Ads::GoogleAds::V11::Resources::MerchantCenterLink;
use strict;
use warnings;
use base qw(Google::Ads::GoogleAds::BaseEntity);
use Google::Ads::GoogleAds::Utils::GoogleAdsHelper;
sub new {
my ($class, $args) = @_;
my $self = {
id => $args->{id},
merchantCenterAccountName => $args->{merchantCenterAccountName},
resourceName => $args->{resourceName},
status => $args->{status}};
# Delete the unassigned fields in this object for a more concise JSON payload
remove_unassigned_fields($self, $args);
bless $self, $class;
return $self;
}
1;
| {
"content_hash": "63f80624ed8bba160fbbf9eb2a3447b9",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 79,
"avg_line_length": 26.708333333333332,
"alnum_prop": 0.62402496099844,
"repo_name": "googleads/google-ads-perl",
"id": "a0fbf2c5b3f2e141aef7aaa25a9718a6c3dffcc6",
"size": "1217",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "lib/Google/Ads/GoogleAds/V11/Resources/MerchantCenterLink.pm",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "73"
},
{
"name": "Perl",
"bytes": "5866064"
}
],
"symlink_target": ""
} |
/**
* @addtogroup math
*/
/**
* @file value.h
* @author John Wiegley
*
* @ingroup math
*
* @brief Abstract dynamic type representing various numeric types
*
* A value_t object can be one of many types, and changes its type
* dynamically based on how it is used. For example, if you assign
* the number 10 to a value object, it's internal type will be
* INTEGER.
*/
#ifndef _VALUE_H
#define _VALUE_H
#include "balance.h" // includes amount.h
#include "mask.h"
namespace ledger {
DECLARE_EXCEPTION(value_error, std::runtime_error);
class scope_t;
/**
* @class value_t
*
* @brief Dynamic type representing various numeric types.
*
* The following type is a polymorphous value type used solely for
* performance reasons. The alternative is to compute value
* expressions (valexpr.cc) in terms of the largest data type,
* balance_t. This was found to be prohibitively expensive, especially
* when large logic chains were involved, since many temporary
* allocations would occur for every operator. With value_t, and the
* fact that logic chains only need boolean values to continue, no
* memory allocations need to take place at all.
*/
class value_t
: public ordered_field_operators<value_t,
equality_comparable<value_t, balance_t,
additive<value_t, balance_t,
multiplicative<value_t, balance_t,
ordered_field_operators<value_t, amount_t,
ordered_field_operators<value_t, double,
ordered_field_operators<value_t, unsigned long,
ordered_field_operators<value_t, long> > > > > > > >
{
public:
/**
* The sequence_t member type abstracts the type used to represent a
* resizable "array" of value_t objects.
*/
typedef ptr_deque<value_t> sequence_t;
typedef sequence_t::iterator iterator;
typedef sequence_t::const_iterator const_iterator;
typedef sequence_t::difference_type difference_type;
/**
* type_t gives the type of the data contained or referenced by a
* value_t object. Use the type() method to get a value of type
* type_t.
*/
enum type_t {
VOID, // a null value (i.e., uninitialized)
BOOLEAN, // a boolean
DATETIME, // a date and time (Boost posix_time)
DATE, // a date (Boost gregorian::date)
INTEGER, // a signed integer value
AMOUNT, // a ledger::amount_t
BALANCE, // a ledger::balance_t
STRING, // a string object
MASK, // a regular expression mask
SEQUENCE, // a vector of value_t objects
SCOPE, // a pointer to a scope
ANY // a pointer to an arbitrary object
};
class storage_t
{
friend class value_t;
/**
* The `data' member holds the actual bytes relating to whatever
* has been stuffed into this storage object. There is a set of
* asserts in value.cc to guarantee that the sizeof expression
* used here is indeed at least as big as the largest object that
* will ever be copied into `data'.
*
* The `type' member holds the value_t::type_t value representing
* the type of the object stored.
*/
variant<bool, // BOOLEAN
datetime_t, // DATETIME
date_t, // DATE
long, // INTEGER
amount_t, // AMOUNT
balance_t *, // BALANCE
string, // STRING
mask_t, // MASK
sequence_t *, // SEQUENCE
scope_t *, // SCOPE
boost::any // ANY
> data;
type_t type;
/**
* `refc' holds the current reference count for each storage_t
* object.
*/
mutable int refc;
/**
* Constructor. Since all storage object are assigned to after
* construction, the only constructors allowed are explicit, and
* copy (see below). The default starting type is VOID, which
* should rarely ever be seen in practice, since the first thing
* that value_t typically does is to assign a valid value.
*/
explicit storage_t() : type(VOID), refc(0) {
TRACE_CTOR(value_t::storage_t, "");
}
public: // so `checked_delete' can access it
/**
* Destructor. Must only be called when the reference count has
* reached zero. The `destroy' method is used to do the actual
* cleanup of the data, since it's quite possible for `destroy' to
* be called while the object is still active -- to clear the
* stored data for subsequent reuse of the storage_t object.
*/
~storage_t() {
TRACE_DTOR(value_t::storage_t);
VERIFY(refc == 0);
destroy();
}
private:
/**
* Assignment and copy operators. These are called when making a
* new copy of a storage object in order to modify the copy.
*/
explicit storage_t(const storage_t& rhs)
: type(rhs.type), refc(0) {
*this = rhs;
TRACE_CTOR(value_t::storage_t, "copy");
}
storage_t& operator=(const storage_t& rhs);
/**
* Reference counting methods. The intrusive_ptr_* methods are
* used by boost::intrusive_ptr to manage the calls to acquire and
* release.
*/
void acquire() const {
DEBUG("value.storage.refcount",
"Acquiring " << this << ", refc now " << refc + 1);
VERIFY(refc >= 0);
refc++;
}
void release() const {
DEBUG("value.storage.refcount",
"Releasing " << this << ", refc now " << refc - 1);
VERIFY(refc > 0);
if (--refc == 0)
checked_delete(this);
}
friend inline void intrusive_ptr_add_ref(value_t::storage_t * storage_ptr) {
storage_ptr->acquire();
}
friend inline void intrusive_ptr_release(value_t::storage_t * storage_ptr) {
storage_ptr->release();
}
void destroy() {
DEBUG("value.storage.refcount", "Destroying " << this);
switch (type) {
case VOID:
return;
case BALANCE:
checked_delete(boost::get<balance_t *>(data));
break;
case SEQUENCE:
checked_delete(boost::get<sequence_t *>(data));
break;
default:
break;
}
data = false;
type = VOID;
}
};
private:
/**
* The actual data for each value_t is kept in reference counted storage.
* Data is modified using a copy-on-write policy.
*/
intrusive_ptr<storage_t> storage;
/**
* Make a private copy of the current value (if necessary) so it can
* subsequently be modified.
*/
void _dup() {
if (storage && storage->refc > 1)
storage = new storage_t(*storage.get());
}
/**
* Because boolean "true" and "false" are so common, a pair of static
* references are kept to prevent the creation of throwaway storage_t
* objects just to represent these two common values.
*/
static intrusive_ptr<storage_t> true_value;
static intrusive_ptr<storage_t> false_value;
public:
static void initialize();
static void shutdown();
public:
/**
* Constructors. value_t objects may be constructed from almost any
* value type that they can contain, including variations on those
* types (such as long, unsigned long, etc). The ordering of the
* methods here reflects the ordering of the constants in type_t
* above.
*
* One constructor of special note is that taking a string or
* character pointer as an argument. Because value_t("$100") is
* interpreted as a commoditized amount, the form value_t("$100",
* true) is required to represent the literal string "$100", and not
* the amount "one hundred dollars".
*/
value_t() {
TRACE_CTOR(value_t, "");
}
value_t(const bool val) {
set_boolean(val);
TRACE_CTOR(value_t, "const bool");
}
value_t(const datetime_t& val) {
set_datetime(val);
TRACE_CTOR(value_t, "const datetime_t&");
}
value_t(const date_t& val) {
set_date(val);
TRACE_CTOR(value_t, "const date_t&");
}
value_t(const long val) {
set_long(val);
TRACE_CTOR(value_t, "const long");
}
value_t(const unsigned long val) {
set_amount(val);
TRACE_CTOR(value_t, "const unsigned long");
}
value_t(const double val) {
set_amount(val);
TRACE_CTOR(value_t, "const double");
}
value_t(const amount_t& val) {
set_amount(val);
TRACE_CTOR(value_t, "const amount_t&");
}
value_t(const balance_t& val) {
set_balance(val);
TRACE_CTOR(value_t, "const balance_t&");
}
value_t(const mask_t& val) {
set_mask(val);
TRACE_CTOR(value_t, "const mask_t&");
}
explicit value_t(const string& val, bool literal = false) {
if (literal)
set_string(val);
else
set_amount(amount_t(val));
TRACE_CTOR(value_t, "const string&, bool");
}
explicit value_t(const char * val, bool literal = false) {
if (literal)
set_string(val);
else
set_amount(amount_t(val));
TRACE_CTOR(value_t, "const char *");
}
value_t(const sequence_t& val) {
set_sequence(val);
TRACE_CTOR(value_t, "const sequence_t&");
}
explicit value_t(scope_t * item) {
set_scope(item);
TRACE_CTOR(value_t, "scope_t *");
}
#if 0
template <typename T>
explicit value_t(T& item) {
set_any(item);
TRACE_CTOR(value_t, "T&");
}
#endif
/**
* Destructor. This does not do anything, because the intrusive_ptr
* that refers to our storage object will decrease its reference
* count itself upon destruction.
*/
~value_t() {
TRACE_DTOR(value_t);
}
/**
* Assignment and copy operators. Values are cheaply copied by
* simply creating another reference to the other value's storage
* object. A true copy is only ever made prior to modification.
*/
value_t(const value_t& val) {
*this = val;
TRACE_CTOR(value_t, "copy");
}
value_t& operator=(const value_t& val) {
if (! (this == &val || storage == val.storage))
storage = val.storage;
return *this;
}
/**
* Comparison operators. Values can be compared to other values
*/
bool is_equal_to(const value_t& val) const;
bool is_less_than(const value_t& val) const;
bool is_greater_than(const value_t& val) const;
template <typename T>
bool operator==(const T& amt) const {
return is_equal_to(amt);
}
template <typename T>
bool operator<(const T& amt) const {
return is_less_than(amt);
}
template <typename T>
bool operator>(const T& amt) const {
return is_greater_than(amt);
}
/**
* Binary arithmetic operators.
*
* add(amount_t, optional<amount_t>) allows for the possibility of
* adding both an amount and its cost in a single operation.
* Otherwise, there is no way to separately represent the "cost"
* part of an amount addition statement.
*/
value_t& operator+=(const value_t& val);
value_t& operator-=(const value_t& val);
value_t& operator*=(const value_t& val);
value_t& operator/=(const value_t& val);
/**
* Unary arithmetic operators.
*/
value_t negated() const {
value_t temp = *this;
temp.in_place_negate();
return temp;
}
void in_place_negate(); // exists for efficiency's sake
void in_place_not(); // exists for efficiency's sake
value_t operator-() const {
return negated();
}
value_t abs() const;
value_t rounded() const {
value_t temp(*this);
temp.in_place_round();
return temp;
}
void in_place_round();
value_t roundto(int places) const {
value_t temp(*this);
temp.in_place_roundto(places);
return temp;
}
void in_place_roundto(int places);
value_t truncated() const {
value_t temp(*this);
temp.in_place_truncate();
return temp;
}
void in_place_truncate();
value_t floored() const {
value_t temp(*this);
temp.in_place_floor();
return temp;
}
void in_place_floor();
value_t ceilinged() const {
value_t temp(*this);
temp.in_place_ceiling();
return temp;
}
void in_place_ceiling();
value_t unrounded() const {
value_t temp(*this);
temp.in_place_unround();
return temp;
}
void in_place_unround();
value_t reduced() const {
value_t temp(*this);
temp.in_place_reduce();
return temp;
}
void in_place_reduce(); // exists for efficiency's sake
value_t unreduced() const {
value_t temp(*this);
temp.in_place_unreduce();
return temp;
}
void in_place_unreduce(); // exists for efficiency's sake
// Return the "market value" of a given value at a specific time.
value_t value(const datetime_t& moment = datetime_t(),
const commodity_t * in_terms_of = NULL) const;
value_t exchange_commodities(const std::string& commodities,
const bool add_prices = false,
const datetime_t& moment = datetime_t());
/**
* Truth tests.
*/
operator bool() const;
bool is_nonzero() const {
return ! is_zero();
}
bool is_realzero() const;
bool is_zero() const;
bool is_null() const {
if (! storage) {
VERIFY(is_type(VOID));
return true;
} else {
VERIFY(! is_type(VOID));
return false;
}
}
type_t type() const {
return storage ? storage->type : VOID;
}
bool is_type(type_t _type) const {
return type() == _type;
}
private:
void set_type(type_t new_type);
public:
/**
* Data manipulation methods. A value object may be truth tested for the
* existence of every type it can contain:
*
* is_boolean()
* is_long()
* is_datetime()
* is_date()
* is_amount()
* is_balance()
* is_string()
* is_mask()
* is_sequence()
* is_any()
*
* There are corresponding as_*() methods that represent a value as a
* reference to its underlying type. For example, as_long() returns a
* reference to a "const long".
*
* There are also as_*_lval() methods, which represent the underlying data
* as a reference to a non-const type. The difference here is that an
* _lval() call causes the underlying data to be fully copied before the
* resulting reference is returned.
*
* Lastly, there are corresponding set_*(data) methods for directly
* assigning data of a particular type, rather than using the regular
* assignment operator (whose implementation simply calls the various set_
* methods).
*/
bool is_boolean() const {
return is_type(BOOLEAN);
}
bool& as_boolean_lval() {
VERIFY(is_boolean());
_dup();
return boost::get<bool>(storage->data);
}
const bool& as_boolean() const {
VERIFY(is_boolean());
return boost::get<bool>(storage->data);
}
void set_boolean(const bool val) {
set_type(BOOLEAN);
storage = val ? true_value : false_value;
}
bool is_datetime() const {
return is_type(DATETIME);
}
datetime_t& as_datetime_lval() {
VERIFY(is_datetime());
_dup();
return boost::get<datetime_t>(storage->data);
}
const datetime_t& as_datetime() const {
VERIFY(is_datetime());
return boost::get<datetime_t>(storage->data);
}
void set_datetime(const datetime_t& val) {
set_type(DATETIME);
storage->data = val;
}
bool is_date() const {
return is_type(DATE);
}
date_t& as_date_lval() {
VERIFY(is_date());
_dup();
return boost::get<date_t>(storage->data);
}
const date_t& as_date() const {
VERIFY(is_date());
return boost::get<date_t>(storage->data);
}
void set_date(const date_t& val) {
set_type(DATE);
storage->data = val;
}
bool is_long() const {
return is_type(INTEGER);
}
long& as_long_lval() {
VERIFY(is_long());
_dup();
return boost::get<long>(storage->data);
}
const long& as_long() const {
VERIFY(is_long());
return boost::get<long>(storage->data);
}
void set_long(const long val) {
set_type(INTEGER);
storage->data = val;
}
bool is_amount() const {
return is_type(AMOUNT);
}
amount_t& as_amount_lval() {
VERIFY(is_amount());
_dup();
return boost::get<amount_t>(storage->data);
}
const amount_t& as_amount() const {
VERIFY(is_amount());
return boost::get<amount_t>(storage->data);
}
void set_amount(const amount_t& val) {
VERIFY(val.valid());
set_type(AMOUNT);
storage->data = val;
}
bool is_balance() const {
return is_type(BALANCE);
}
balance_t& as_balance_lval() {
VERIFY(is_balance());
_dup();
return *boost::get<balance_t *>(storage->data);
}
const balance_t& as_balance() const {
VERIFY(is_balance());
return *boost::get<balance_t *>(storage->data);
}
void set_balance(const balance_t& val) {
VERIFY(val.valid());
set_type(BALANCE);
storage->data = new balance_t(val);
}
bool is_string() const {
return is_type(STRING);
}
string& as_string_lval() {
VERIFY(is_string());
_dup();
return boost::get<string>(storage->data);
}
const string& as_string() const {
VERIFY(is_string());
return boost::get<string>(storage->data);
}
void set_string(const string& val = "") {
set_type(STRING);
storage->data = val;
VERIFY(boost::get<string>(storage->data) == val);
}
void set_string(const char * val = "") {
set_type(STRING);
storage->data = string(val);
VERIFY(boost::get<string>(storage->data) == val);
}
bool is_mask() const {
return is_type(MASK);
}
mask_t& as_mask_lval() {
VERIFY(is_mask());
_dup();
VERIFY(boost::get<mask_t>(storage->data).valid());
return boost::get<mask_t>(storage->data);
}
const mask_t& as_mask() const {
VERIFY(is_mask());
VERIFY(boost::get<mask_t>(storage->data).valid());
return boost::get<mask_t>(storage->data);
}
void set_mask(const string& val) {
set_type(MASK);
storage->data = mask_t(val);
}
void set_mask(const mask_t& val) {
set_type(MASK);
storage->data = val;
}
bool is_sequence() const {
return is_type(SEQUENCE);
}
sequence_t& as_sequence_lval() {
VERIFY(is_sequence());
_dup();
return *boost::get<sequence_t *>(storage->data);
}
const sequence_t& as_sequence() const {
VERIFY(is_sequence());
return *boost::get<sequence_t *>(storage->data);
}
void set_sequence(const sequence_t& val) {
set_type(SEQUENCE);
storage->data = new sequence_t(val);
}
/**
* Dealing with scope pointers.
*/
bool is_scope() const {
return is_type(SCOPE);
}
scope_t * as_scope() const {
VERIFY(is_scope());
return boost::get<scope_t *>(storage->data);
}
void set_scope(scope_t * val) {
set_type(SCOPE);
storage->data = val;
}
/**
* Dealing with any type at all is bit involved because we actually
* deal with typed object. For example, if you call as_any it returns
* a boost::any object, but if you use as_any<type_t>, then it returns
* a type_t by value.
*/
bool is_any() const {
return is_type(ANY);
}
template <typename T>
bool is_any() const {
return (is_type(ANY) &&
boost::get<boost::any>(storage->data).type() == typeid(T));
}
boost::any& as_any_lval() {
VERIFY(is_any());
_dup();
return boost::get<boost::any>(storage->data);
}
template <typename T>
T& as_any_lval() {
return any_cast<T&>(as_any_lval());
}
const boost::any& as_any() const {
VERIFY(is_any());
return boost::get<boost::any>(storage->data);
}
template <typename T>
const T& as_any() const {
return any_cast<const T&>(as_any());
}
void set_any(const boost::any& val) {
set_type(ANY);
storage->data = val;
}
template <typename T>
void set_any(T& val) {
set_type(ANY);
storage->data = boost::any(val);
}
/**
* Data conversion methods. These methods convert a value object to
* its underlying type, where possible. If not possible, an
* exception is thrown.
*/
bool to_boolean() const;
int to_int() const;
long to_long() const;
std::size_t to_size_t() const { return static_cast<std::size_t>(to_long()); }
datetime_t to_datetime() const;
date_t to_date() const;
amount_t to_amount() const;
balance_t to_balance() const;
string to_string() const;
mask_t to_mask() const;
sequence_t to_sequence() const;
/**
* Dynamic typing conversion methods.
*
* `cast(type_t)' returns a new value whose type has been cast to
* the given type, but whose value is based on the original value.
* For example, the uncommoditized AMOUNT "100.00" could be cast to
* an INTEGER value. If a cast would lose information or is not
* meaningful, an exception is thrown.
*
* `simplify()' is an automatic cast to the simplest type that can
* still represent the original value.
*
* There are also "in-place" versions of these two methods:
* in_place_cast
* in_place_simplify
*/
value_t casted(type_t cast_type) const {
value_t temp(*this);
temp.in_place_cast(cast_type);
return temp;
}
void in_place_cast(type_t cast_type);
value_t simplified() const {
value_t temp = *this;
temp.in_place_simplify();
return temp;
}
void in_place_simplify();
value_t number() const;
/**
* Annotated commodity methods.
*/
void annotate(const annotation_t& details);
bool has_annotation() const;
annotation_t& annotation();
const annotation_t& annotation() const {
return const_cast<value_t&>(*this).annotation();
}
value_t strip_annotations(const keep_details_t& what_to_keep) const;
/**
* Collection-style access methods for SEQUENCE values.
*/
value_t& operator[](const std::size_t index) {
VERIFY(! is_null());
if (is_sequence())
return as_sequence_lval()[index];
else if (index == 0)
return *this;
assert(false);
static value_t null;
return null;
}
const value_t& operator[](const std::size_t index) const {
VERIFY(! is_null());
if (is_sequence())
return as_sequence()[index];
else if (index == 0)
return *this;
assert(false);
static value_t null;
return null;
}
void push_front(const value_t& val) {
if (is_null())
*this = sequence_t();
if (! is_sequence())
in_place_cast(SEQUENCE);
as_sequence_lval().push_front(new value_t(val));
}
void push_back(const value_t& val) {
if (is_null())
*this = sequence_t();
if (! is_sequence())
in_place_cast(SEQUENCE);
as_sequence_lval().push_back(new value_t(val));
}
void pop_back() {
VERIFY(! is_null());
if (! is_sequence()) {
#if BOOST_VERSION >= 103700
storage.reset();
#else
storage = intrusive_ptr<storage_t>();
#endif
} else {
as_sequence_lval().pop_back();
const sequence_t& seq(as_sequence());
std::size_t new_size = seq.size();
if (new_size == 0) {
#if BOOST_VERSION >= 103700
storage.reset();
#else
storage = intrusive_ptr<storage_t>();
#endif
}
else if (new_size == 1) {
*this = seq.front();
}
}
}
sequence_t::iterator begin() {
VERIFY(is_sequence());
return as_sequence_lval().begin();
}
sequence_t::iterator end() {
VERIFY(is_sequence());
return as_sequence_lval().end();
}
sequence_t::const_iterator begin() const {
VERIFY(is_sequence());
return as_sequence().begin();
}
sequence_t::const_iterator end() const {
VERIFY(is_sequence());
return as_sequence().end();
}
std::size_t size() const {
if (is_null())
return 0;
else if (is_sequence())
return as_sequence().size();
else
return 1;
}
bool empty() const {
return size() == 0;
}
/**
* Informational methods.
*/
string label(optional<type_t> the_type = none) const;
/**
* Printing methods.
*/
void print(std::ostream& out,
const int first_width = -1,
const int latter_width = -1,
const uint_least8_t flags = AMOUNT_PRINT_NO_FLAGS) const;
void dump(std::ostream& out, const bool relaxed = true) const;
/**
* Debugging methods.
*/
bool valid() const;
};
#define NULL_VALUE (value_t())
inline value_t string_value(const string& str = "") {
return value_t(str, true);
}
#define VALUE_OR_ZERO(val) ((val).is_null() ? value_t(0L) : (val))
#define SIMPLIFIED_VALUE_OR_ZERO(val) \
((val).is_null() ? value_t(0L) : (val).simplified())
inline value_t mask_value(const string& str) {
return value_t(mask_t(str));
}
inline std::ostream& operator<<(std::ostream& out, const value_t& val) {
val.print(out);
return out;
}
inline string value_context(const value_t& val) {
std::ostringstream buf;
val.print(buf, 20, 20, true);
return buf.str();
}
inline value_t scope_value(scope_t * val) {
return value_t(val);
}
template <typename T>
inline value_t& add_or_set_value(value_t& lhs, const T& rhs) {
if (lhs.is_null())
lhs = rhs;
else
lhs += rhs;
return lhs;
}
struct sort_value_t
{
bool inverted;
value_t value;
sort_value_t() : inverted(false) {}
};
bool sort_value_is_less_than(const std::list<sort_value_t>& left_values,
const std::list<sort_value_t>& right_values);
void put_value(property_tree::ptree& pt, const value_t& value);
} // namespace ledger
#endif // _VALUE_H
| {
"content_hash": "10b0b71824921a4b418a089c3702f60a",
"timestamp": "",
"source": "github",
"line_count": 980,
"max_line_length": 80,
"avg_line_length": 26.220408163265308,
"alnum_prop": 0.5994707347447074,
"repo_name": "jwakely/ledger",
"id": "810d34f990ccb83c6b8ac07e02f87839cba42587",
"size": "27273",
"binary": false,
"copies": "1",
"ref": "refs/heads/next",
"path": "src/value.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "111"
},
{
"name": "C#",
"bytes": "11080"
},
{
"name": "C++",
"bytes": "1458826"
},
{
"name": "CMake",
"bytes": "18983"
},
{
"name": "CSS",
"bytes": "1344"
},
{
"name": "Emacs Lisp",
"bytes": "215073"
},
{
"name": "JavaScript",
"bytes": "7969"
},
{
"name": "Makefile",
"bytes": "1786"
},
{
"name": "Nix",
"bytes": "1162"
},
{
"name": "Perl",
"bytes": "69176"
},
{
"name": "Python",
"bytes": "207083"
},
{
"name": "Shell",
"bytes": "13684"
},
{
"name": "XSLT",
"bytes": "4092"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8" ?>
<registrationreport format="activity" regid="myreg27" instanceid="0">
<activity id="articulate_rise">
<title>ACE 19 - Case Finances: What Arbitrators Need to Know</title>
<attempts>1</attempts>
<complete>incomplete</complete>
<success>passed</success>
<timeacrossattempts>0000:15:41.98</timeacrossattempts>
<time></time>
<score>43</score>
<children>
<activity id="i1">
<title>ACE 19 - Case Finances: What Arbitrators Need to Know</title>
<attempts>2</attempts>
<complete>complete</complete>
<success>passed</success>
<timeacrossattempts>0000:15:41.98</timeacrossattempts>
<time>0000:15:41.98</time>
<score>43</score>
<children></children>
</activity>
</children>
</activity>
</registrationreport>
| {
"content_hash": "7a28200e2dc6ecf9666c44b93703bdcc",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 35.541666666666664,
"alnum_prop": 0.6459554513481829,
"repo_name": "blueskybroadcast/scorm_cloud_client",
"id": "bc589aa54a273293aa6b04db7b22666171b398b5",
"size": "853",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/support/fixtures/registration/results/activity.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "26188"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
} |
module Gifts::Grit
class Tag
alias_method :old_message, :message
def message
Gifts::GritExt.encode! old_message
end
end
end
| {
"content_hash": "65700344669488011148181708435526",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 40,
"avg_line_length": 14.8,
"alnum_prop": 0.6621621621621622,
"repo_name": "hiroponz/gifts",
"id": "805dae8ddc156ed820296a33449a01eedda876fa",
"size": "148",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "lib/gifts/grit_ext/tag.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "228132"
}
],
"symlink_target": ""
} |
import { Component, ChangeDetectionStrategy } from '@angular/core';
@Component({
exportAs: 'ngxNotificationContainer',
selector: 'ngx-notification-container',
template: ' <div class="notification-container"></div> ',
host: { class: 'ngx-notification-container' },
changeDetection: ChangeDetectionStrategy.OnPush
})
export class NotificationContainerComponent {}
| {
"content_hash": "cdecb0b0ef9ae44a9ddff925eb9dcb3d",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 67,
"avg_line_length": 37.3,
"alnum_prop": 0.7613941018766756,
"repo_name": "swimlane/ngx-ui",
"id": "5a902af67403961c33203f5d023b8fb2855cef97",
"size": "373",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "projects/swimlane/ngx-ui/src/lib/components/notification/notification-container.component.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "45171"
},
{
"name": "HTML",
"bytes": "754706"
},
{
"name": "JavaScript",
"bytes": "13785"
},
{
"name": "SCSS",
"bytes": "242028"
},
{
"name": "Shell",
"bytes": "136"
},
{
"name": "TypeScript",
"bytes": "1087353"
}
],
"symlink_target": ""
} |
#include <string.h>
#include "hclib.h"
#include "hclib-rt.h"
#include "hclib-task.h"
#include "hclib-async-struct.h"
#include "hclib-finish.h"
#include "hclib-module.h"
#include "hclib-fptr-list.h"
#ifdef __cplusplus
extern "C" {
#endif
// A list of allocation functions registered for specific locale types
static hclib_fptr_list_t *alloc_registrations = NULL;
static hclib_fptr_list_t *realloc_registrations = NULL;
static hclib_fptr_list_t *free_registrations = NULL;
static hclib_fptr_list_t *memset_registrations = NULL;
static hclib_fptr_list_t *copy_registrations = NULL;
// Register a particular allocation function for a particular locale type
void hclib_register_alloc_func(int locale_id,
hclib_module_alloc_impl_func_type func) {
hclib_register_func(&alloc_registrations, locale_id, func, MAY_USE);
}
// Register a particular reallocation function for a particular locale type
void hclib_register_realloc_func(int locale_id,
hclib_module_realloc_impl_func_type func) {
hclib_register_func(&realloc_registrations, locale_id, func, MAY_USE);
}
// Register a particular free-ing function for a particular locale type
void hclib_register_free_func(int locale_id,
hclib_module_free_impl_func_type func) {
hclib_register_func(&free_registrations, locale_id, func, MAY_USE);
}
// Register a particular memset function for a particular locale type
void hclib_register_memset_func(int locale_id,
hclib_module_memset_impl_func_type func) {
hclib_register_func(&memset_registrations, locale_id, func, MAY_USE);
}
// Register a particular copy function for a particular locale type
void hclib_register_copy_func(int locale_id,
hclib_module_copy_impl_func_type func, int priority) {
hclib_register_func(©_registrations, locale_id, func, priority);
}
typedef struct _malloc_struct {
size_t nbytes;
hclib_locale_t *locale;
hclib_promise_t *promise;
hclib_module_alloc_impl_func_type cb;
} malloc_struct;
static void allocate_kernel(void *arg) {
malloc_struct *ms = (malloc_struct *)arg;
void *allocated = (ms->cb)(ms->nbytes, ms->locale);
hclib_promise_put(ms->promise, allocated);
free(ms);
}
hclib_future_t *hclib_allocate_at(size_t nbytes, hclib_locale_t *locale) {
assert(hclib_has_func_for(alloc_registrations, locale->type));
hclib_promise_t *promise = hclib_promise_create();
malloc_struct *ms = (malloc_struct *)malloc(sizeof(malloc_struct));
ms->nbytes = nbytes;
ms->locale = locale;
ms->promise = promise;
ms->cb = hclib_get_func_for(alloc_registrations, locale->type);
hclib_async(allocate_kernel, ms, NULL, 0, locale);
return hclib_get_future_for_promise(promise);
}
typedef struct _realloc_struct {
void *ptr;
size_t nbytes;
hclib_locale_t *locale;
hclib_promise_t *promise;
hclib_module_realloc_impl_func_type cb;
} realloc_struct;
static void reallocate_kernel(void *arg) {
realloc_struct *rs = (realloc_struct *)arg;
void *reallocated = (rs->cb)(rs->ptr, rs->nbytes, rs->locale);
hclib_promise_put(rs->promise, reallocated);
free(rs);
}
hclib_future_t *hclib_reallocate_at(void *ptr, size_t new_nbytes,
hclib_locale_t *locale) {
assert(hclib_has_func_for(realloc_registrations, locale->type));
hclib_promise_t *promise = hclib_promise_create();
realloc_struct *rs = (realloc_struct *)malloc(sizeof(realloc_struct));
rs->ptr = ptr;
rs->nbytes = new_nbytes;
rs->locale = locale;
rs->promise = promise;
rs->cb = hclib_get_func_for(realloc_registrations, locale->type);
hclib_async(reallocate_kernel, rs, NULL, 0, locale);
return hclib_get_future_for_promise(promise);
}
typedef struct _memset_struct {
void *ptr;
size_t nbytes;
int pattern;
hclib_locale_t *locale;
hclib_promise_t *promise;
hclib_module_memset_impl_func_type cb;
} memset_struct;
static void memset_kernel(void *arg) {
memset_struct *ms = (memset_struct *)arg;
(ms->cb)(ms->ptr, ms->pattern, ms->nbytes, ms->locale);
hclib_promise_put(ms->promise, NULL);
free(ms);
}
hclib_future_t *hclib_memset_at(void *ptr, int pattern, size_t nbytes,
hclib_locale_t *locale) {
assert(hclib_has_func_for(memset_registrations, locale->type));
hclib_promise_t *promise = hclib_promise_create();
memset_struct *ms = (memset_struct *)malloc(sizeof(memset_struct));
ms->ptr = ptr;
ms->nbytes = nbytes;
ms->pattern = pattern;
ms->locale = locale;
ms->promise = promise;
ms->cb = hclib_get_func_for(memset_registrations, locale->type);
hclib_async(memset_kernel, ms, NULL, 0, locale);
return hclib_get_future_for_promise(promise);
}
// TODO at some point it may be useful to have this as a future too
void hclib_free_at(void *ptr, hclib_locale_t *locale) {
assert(hclib_has_func_for(free_registrations, locale->type));
hclib_module_free_impl_func_type func =
(hclib_module_free_impl_func_type)hclib_get_func_for(free_registrations,
locale->type);
func(ptr, locale);
}
typedef struct _copy_struct {
hclib_locale_t *dst_locale;
void *dst;
hclib_locale_t *src_locale;
/*
* The source of a copy can be specified as either a raw pointer or a future
* that returns a raw pointer. We assume that if it is the latter, this
* future must be satisfied when the copy starts (e.g. by placing it in the
* list of futures the async copy is dependent on).
*/
void *src;
hclib_future_t *src_fut;
size_t nbytes;
hclib_promise_t *promise;
hclib_module_copy_impl_func_type cb;
} copy_struct;
static void copy_kernel(void *arg) {
copy_struct *cs = (copy_struct *)arg;
// Only one option can be used
assert(cs->src || cs->src_fut);
assert(!(cs->src && cs->src_fut));
if (cs->src) {
(cs->cb)(cs->dst_locale, cs->dst, cs->src_locale, cs->src, cs->nbytes);
} else {
(cs->cb)(cs->dst_locale, cs->dst, cs->src_locale,
hclib_future_get(cs->src_fut), cs->nbytes);
}
hclib_promise_put(cs->promise, NULL);
free(cs);
}
hclib_future_t *hclib_async_copy(hclib_locale_t *dst_locale, void *dst,
hclib_locale_t *src_locale, void *src, size_t nbytes,
hclib_future_t **futures, const int nfutures) {
hclib_promise_t *promise = hclib_promise_create();
hclib_module_copy_impl_func_type dst_cb = hclib_get_func_for(
copy_registrations, dst_locale->type);
hclib_module_copy_impl_func_type src_cb = hclib_get_func_for(
copy_registrations, src_locale->type);
int dst_priority = hclib_get_priority_for(copy_registrations,
dst_locale->type);
int src_priority = hclib_get_priority_for(copy_registrations,
src_locale->type);
hclib_module_copy_impl_func_type copy_cb;
assert(dst_cb != NULL || src_cb != NULL);
if (dst_cb == NULL) {
copy_cb = src_cb;
} else if (src_cb == NULL) {
copy_cb = dst_cb;
} else {
if (dst_cb == src_cb) {
copy_cb = dst_cb;
} else {
// Must not both be MUST_USE
assert(!(dst_priority == MUST_USE && src_priority == MUST_USE));
if (src_priority == MUST_USE) copy_cb = src_cb;
else copy_cb = dst_cb;
}
}
copy_struct *cs = (copy_struct *)malloc(sizeof(copy_struct));
cs->dst_locale = dst_locale;
cs->dst = dst;
cs->src_locale = src_locale;
if (src == HCLIB_ASYNC_COPY_USE_FUTURE_AS_SRC) {
assert(nfutures == 1);
cs->src = NULL;
cs->src_fut = futures[0];
} else {
cs->src = src;
cs->src_fut = NULL;
}
cs->nbytes = nbytes;
cs->promise = promise;
cs->cb = copy_cb;
hclib_async(copy_kernel, cs, futures, nfutures, dst_locale);
return hclib_get_future_for_promise(promise);
}
| {
"content_hash": "017ed95ed0f3f1a27a0c3bf2c70be6fb",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 80,
"avg_line_length": 32.80082987551867,
"alnum_prop": 0.6538899430740038,
"repo_name": "habanero-rice/hcpp",
"id": "8f68aeece7d5db5f24c54e7a7d9d0097a8bf32cb",
"size": "7905",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/hclib-mem.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "26226"
},
{
"name": "C",
"bytes": "330261"
},
{
"name": "C++",
"bytes": "255831"
},
{
"name": "Cuda",
"bytes": "10347"
},
{
"name": "Makefile",
"bytes": "7838"
},
{
"name": "Perl",
"bytes": "1748"
},
{
"name": "Shell",
"bytes": "16630"
}
],
"symlink_target": ""
} |
title: The Binomial Theorem Formulas
localeTitle: Формулы биномиальной теоремы
---
## Формулы биномиальной теоремы
Это заглушка. [Помогите нашему сообществу расширить его](https://github.com/freecodecamp/guides/tree/master/src/pages/mathematics/the-binomial-theorem-formulas/index.md) .
[Это руководство по быстрому стилю поможет вам принять ваш запрос на тягу](https://github.com/freecodecamp/guides/blob/master/README.md) .
#### Дополнительная информация: | {
"content_hash": "dac9a8369e7e488a62a8df93491ab26e",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 171,
"avg_line_length": 46,
"alnum_prop": 0.8043478260869565,
"repo_name": "otavioarc/freeCodeCamp",
"id": "b6ba1a8bee57f2ec1f2a3bd082ad6e7276e8293b",
"size": "648",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "guide/russian/mathematics/the-binomial-theorem-formulas/index.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "35491"
},
{
"name": "HTML",
"bytes": "17600"
},
{
"name": "JavaScript",
"bytes": "777274"
}
],
"symlink_target": ""
} |
"""Utility to package and upload the USB gadget framework.
"""
import argparse
import hashlib
import io
import os
import zipfile
try:
from urllib.request import Request, urlopen
except ImportError: # For Py2 compatibility
from urllib2 import Request, urlopen
def MakeZip(directory=None, files=None):
"""Construct a zip file.
Args:
directory: Include Python source files from this directory
files: Include these files
Returns:
A tuple of the buffer containing the zip file and its MD5 hash.
"""
buf = io.BytesIO()
archive = zipfile.PyZipFile(buf, 'w')
if directory is not None:
archive.writepy(directory)
if files is not None:
for f in files:
archive.write(f, os.path.basename(f))
archive.close()
content = buf.getvalue()
buf.close()
md5 = hashlib.md5(content).hexdigest()
return content, md5
def EncodeBody(filename, buf):
return b'\r\n'.join([
b'--foo',
b'Content-Disposition: form-data; name="file"; filename="%s"' %
filename,
b'Content-Type: application/octet-stream',
b'',
buf,
b'--foo--',
b''
])
def UploadZip(content, md5, host):
filename = b'usb_gadget-%s.zip' % md5.encode('utf-8')
req = Request(url='http://{}/update'.format(host),
data=EncodeBody(filename, content))
req.add_header('Content-Type', 'multipart/form-data; boundary=foo')
urlopen(req)
def main():
parser = argparse.ArgumentParser(
description='Package (and upload) the USB gadget framework.')
parser.add_argument(
'--dir', type=str, metavar='DIR',
help='package all Python files from DIR')
parser.add_argument(
'--zip-file', type=str, metavar='FILE',
help='save package as FILE')
parser.add_argument(
'--hash-file', type=str, metavar='FILE',
help='save package hash as FILE')
parser.add_argument(
'--upload', type=str, metavar='HOST[:PORT]',
help='upload package to target system')
parser.add_argument(
'files', metavar='FILE', type=str, nargs='*',
help='source files')
args = parser.parse_args()
content, md5 = MakeZip(directory=args.dir, files=args.files)
if args.zip_file:
with open(args.zip_file, 'wb') as zip_file:
zip_file.write(content)
if args.hash_file:
with open(args.hash_file, 'w') as hash_file:
hash_file.write(md5)
if args.upload:
UploadZip(content, md5, args.upload)
if __name__ == '__main__':
main()
| {
"content_hash": "f689a25cae010e34bb627b6dcb1b9c57",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 69,
"avg_line_length": 25.821052631578947,
"alnum_prop": 0.6498165511618427,
"repo_name": "chromium/chromium",
"id": "cfb6efeb16048705e4a7d699fc2a96386d88b65b",
"size": "2616",
"binary": false,
"copies": "7",
"ref": "refs/heads/main",
"path": "tools/usb_gadget/package.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
namespace MVCHoldem.Services.Contracts
{
using System;
using System.Collections.Generic;
using MVCHoldem.Data.Models;
public interface IPostService
{
IEnumerable<Post> GetMostRecent();
Post GetById(Guid id);
IEnumerable<Post> AllWithoutDeleted(string searchPattern = "");
IEnumerable<Post> AllIncludingDeleted();
Post AddNewPost(string title, string description, string content, string author);
void Update(Post post);
void Delete(Post post);
void HardDelete(Post post);
}
} | {
"content_hash": "734769d1674be50bc8754a73cb98c4dc",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 89,
"avg_line_length": 22.84,
"alnum_prop": 0.6672504378283712,
"repo_name": "gchankov/MVCHoldem",
"id": "6651f67ce401891bfb93dc2ce3c8bbe3887545ac",
"size": "573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MVCHoldem/MVCHoldem.Services/Contracts/IPostService.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "104"
},
{
"name": "C#",
"bytes": "158396"
},
{
"name": "CSS",
"bytes": "704"
},
{
"name": "JavaScript",
"bytes": "357511"
}
],
"symlink_target": ""
} |
from os.path import join
from gtts import gTTS
from newspaper import Article
def make_an_audio(url, filename, lang=None):
if lang is None:
lang = 'en'
article = Article(url)
article.download()
article.parse()
tts = gTTS(text=article.text, lang=lang)
f = open(join('audio', filename), 'wb')
tts.write_to_fp(f)
f.close()
| {
"content_hash": "4527edc1d6b12e6a63108d3aa7a0c060",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 44,
"avg_line_length": 21.352941176470587,
"alnum_prop": 0.6391184573002755,
"repo_name": "Fillll/pockebot",
"id": "4657e4f5fcf9bfa948958fbc98911fb96cc19472",
"size": "380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "audio_actions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1443"
},
{
"name": "Python",
"bytes": "61682"
}
],
"symlink_target": ""
} |
UI bootcamp
| {
"content_hash": "80947de3b69a3345974c829ad37dc94a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 11,
"avg_line_length": 12,
"alnum_prop": 0.8333333333333334,
"repo_name": "uibootcamp94040/bootcamp1",
"id": "d19ad116e10b4b48115e271475fa92257cf7e42e",
"size": "24",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7465"
},
{
"name": "HTML",
"bytes": "46561"
},
{
"name": "Handlebars",
"bytes": "6126"
},
{
"name": "JavaScript",
"bytes": "641918"
}
],
"symlink_target": ""
} |
require 'spec_helper'
describe "Checkout" do
let!(:country) { create(:country, :states_required => true) }
let!(:state) { create(:state, :country => country) }
let!(:shipping_method) { create(:shipping_method) }
let!(:stock_location) { create(:stock_location) }
let!(:mug) { create(:product, :name => "RoR Mug") }
let!(:payment_method) { create(:payment_method) }
let!(:zone) { create(:zone) }
context "visitor makes checkout as guest without registration" do
before(:each) do
stock_location.stock_items.update_all(count_on_hand: 1)
end
describe 'errors messages', js: true do
before(:each) do
Spree::Config[:ship_address_has_cpf] = true
end
it 'has a valid error message regarding CPF field' do
add_mug_and_checkout
fill_in_address type: 'bill', cpf: 'invalid cpf', fill_cpf: true
click_button "Save and Continue"
page.should have_content 'Bill address CPF is invalid'
page.should have_content 'Ship address CPF is invalid'
end
end
context "full checkout", js: true do
before do
shipping_method.calculator.preferred_amount = 10
mug.shipping_category = shipping_method.shipping_categories.first
mug.save!
end
after do
click_button "Save and Continue"
page.should have_content("DELIVERY")
click_button "Save and Continue"
page.should have_content("PAYMENT INFORMATION")
click_button "Save and Continue"
page.should have_content("Your order has been processed successfully")
end
describe 'ship address cpf' do
describe 'has cpf' do
before do
Spree::Config[:ship_address_has_cpf] = true
end
it 'fill in ship address cpf, bill address cpf and checkout' do
add_mug_and_checkout
fill_in_address type: 'bill', fill_cpf: true
uncheck 'Use Billing Address'
fill_in_address type: 'ship', fill_cpf: true
end
end
describe 'does not have cpf' do
before do
Spree::Config[:ship_address_has_cpf] = false
end
it 'fill in bill address cpf and checkout' do
add_mug_and_checkout
fill_in_address type: 'bill', fill_cpf: true
uncheck 'Use Billing Address'
fill_in_address type: 'ship', fill_cpf: false
end
end
end
end
end
def fill_in_address(opts = {})
cpf = opts[:cpf] || "036.142.049-87"
type = opts[:type] || 'bill'
fill_cpf ||= opts[:fill_cpf]
address = "order_#{type}_address_attributes"
fill_in "#{address}_firstname", :with => "Ryan"
fill_in "#{address}_lastname", :with => "Bigg"
fill_in "#{address}_address1", :with => "143 Swan Street"
fill_in "#{address}_city", :with => "Richmond"
fill_in "#{address}_cpf", :with => cpf if fill_cpf
select "United States of America", :from => "#{address}_country_id"
select "Alabama", :from => "#{address}_state_id"
fill_in "#{address}_zipcode", :with => "12345"
fill_in "#{address}_phone", :with => "(555) 555-5555"
end
def add_mug_and_checkout
add_mug_to_cart
click_button "Checkout"
fill_in "order_email", :with => "ryan@spreecommerce.com"
end
def add_mug_to_cart
visit spree.root_path
click_link mug.name
click_button "add-to-cart-button"
end
end | {
"content_hash": "62a82593d855bca9fea16faeb9b29f9e",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 78,
"avg_line_length": 32.12149532710281,
"alnum_prop": 0.6060517893511783,
"repo_name": "tiagoamaro/spree_cpf",
"id": "b214ff407d1f47cfe84729c0a401a81aaaf2a225",
"size": "3437",
"binary": false,
"copies": "1",
"ref": "refs/heads/2-1-stable",
"path": "spec/features/frontend/checkout_spec.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "77"
},
{
"name": "JavaScript",
"bytes": "65"
},
{
"name": "Ruby",
"bytes": "14619"
}
],
"symlink_target": ""
} |
"""Accesses the google.cloud.language.v1beta2 LanguageService API."""
import collections
import json
import os
import pkg_resources
import platform
from google.gax import api_callable
from google.gax import config
from google.gax import path_template
import google.gax
from google.cloud.gapic.language.v1beta2 import enums
from google.cloud.proto.language.v1beta2 import language_service_pb2
class LanguageServiceClient(object):
"""
Provides text analysis operations such as sentiment analysis and entity
recognition.
"""
SERVICE_ADDRESS = 'language.googleapis.com'
"""The default address of the service."""
DEFAULT_SERVICE_PORT = 443
"""The default port of the service."""
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', )
def __init__(self,
service_path=SERVICE_ADDRESS,
port=DEFAULT_SERVICE_PORT,
channel=None,
credentials=None,
ssl_credentials=None,
scopes=None,
client_config=None,
app_name=None,
app_version='',
lib_name=None,
lib_version='',
metrics_headers=()):
"""Constructor.
Args:
service_path (string): The domain name of the API remote host.
port (int): The port on which to connect to the remote host.
channel (:class:`grpc.Channel`): A ``Channel`` instance through
which to make calls.
credentials (object): The authorization credentials to attach to
requests. These credentials identify this application to the
service.
ssl_credentials (:class:`grpc.ChannelCredentials`): A
``ChannelCredentials`` instance for use with an SSL-enabled
channel.
scopes (list[string]): A list of OAuth2 scopes to attach to requests.
client_config (dict):
A dictionary for call options for each method. See
:func:`google.gax.construct_settings` for the structure of
this data. Falls back to the default config if not specified
or the specified config is missing data points.
app_name (string): The name of the application calling
the service. Recommended for analytics purposes.
app_version (string): The version of the application calling
the service. Recommended for analytics purposes.
lib_name (string): The API library software used for calling
the service. (Unless you are writing an API client itself,
leave this as default.)
lib_version (string): The API library software version used
for calling the service. (Unless you are writing an API client
itself, leave this as default.)
metrics_headers (dict): A dictionary of values for tracking
client library metrics. Ultimately serializes to a string
(e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be
considered private.
Returns:
A LanguageServiceClient object.
"""
# Unless the calling application specifically requested
# OAuth scopes, request everything.
if scopes is None:
scopes = self._ALL_SCOPES
# Initialize an empty client config, if none is set.
if client_config is None:
client_config = {}
# Initialize metrics_headers as an ordered dictionary
# (cuts down on cardinality of the resulting string slightly).
metrics_headers = collections.OrderedDict(metrics_headers)
metrics_headers['gl-python'] = platform.python_version()
# The library may or may not be set, depending on what is
# calling this client. Newer client libraries set the library name
# and version.
if lib_name:
metrics_headers[lib_name] = lib_version
# Finally, track the GAPIC package version.
metrics_headers['gapic'] = pkg_resources.get_distribution(
'google-cloud-language', ).version
# Load the configuration defaults.
default_client_config = json.loads(
pkg_resources.resource_string(
__name__, 'language_service_client_config.json').decode())
defaults = api_callable.construct_settings(
'google.cloud.language.v1beta2.LanguageService',
default_client_config,
client_config,
config.STATUS_CODE_NAMES,
metrics_headers=metrics_headers, )
self.language_service_stub = config.create_stub(
language_service_pb2.LanguageServiceStub,
channel=channel,
service_path=service_path,
service_port=port,
credentials=credentials,
scopes=scopes,
ssl_credentials=ssl_credentials)
self._analyze_sentiment = api_callable.create_api_call(
self.language_service_stub.AnalyzeSentiment,
settings=defaults['analyze_sentiment'])
self._analyze_entities = api_callable.create_api_call(
self.language_service_stub.AnalyzeEntities,
settings=defaults['analyze_entities'])
self._analyze_entity_sentiment = api_callable.create_api_call(
self.language_service_stub.AnalyzeEntitySentiment,
settings=defaults['analyze_entity_sentiment'])
self._analyze_syntax = api_callable.create_api_call(
self.language_service_stub.AnalyzeSyntax,
settings=defaults['analyze_syntax'])
self._annotate_text = api_callable.create_api_call(
self.language_service_stub.AnnotateText,
settings=defaults['annotate_text'])
# Service calls
def analyze_sentiment(self, document, encoding_type=None, options=None):
"""
Analyzes the sentiment of the provided text.
Example:
>>> from google.cloud.gapic.language.v1beta2 import language_service_client
>>> from google.cloud.proto.language.v1beta2 import language_service_pb2
>>> client = language_service_client.LanguageServiceClient()
>>> document = language_service_pb2.Document()
>>> response = client.analyze_sentiment(document)
Args:
document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document.
encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets for the
sentence sentiment.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSentimentResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = language_service_pb2.AnalyzeSentimentRequest(
document=document, encoding_type=encoding_type)
return self._analyze_sentiment(request, options)
def analyze_entities(self, document, encoding_type=None, options=None):
"""
Finds named entities (currently proper names and common nouns) in the text
along with entity types, salience, mentions for each entity, and
other properties.
Example:
>>> from google.cloud.gapic.language.v1beta2 import language_service_client
>>> from google.cloud.proto.language.v1beta2 import language_service_pb2
>>> client = language_service_client.LanguageServiceClient()
>>> document = language_service_pb2.Document()
>>> response = client.analyze_entities(document)
Args:
document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document.
encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitiesResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = language_service_pb2.AnalyzeEntitiesRequest(
document=document, encoding_type=encoding_type)
return self._analyze_entities(request, options)
def analyze_entity_sentiment(self,
document,
encoding_type=None,
options=None):
"""
Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes
sentiment associated with each entity and its mentions.
Example:
>>> from google.cloud.gapic.language.v1beta2 import language_service_client
>>> from google.cloud.proto.language.v1beta2 import language_service_pb2
>>> client = language_service_client.LanguageServiceClient()
>>> document = language_service_pb2.Document()
>>> response = client.analyze_entity_sentiment(document)
Args:
document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document.
encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitySentimentResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = language_service_pb2.AnalyzeEntitySentimentRequest(
document=document, encoding_type=encoding_type)
return self._analyze_entity_sentiment(request, options)
def analyze_syntax(self, document, encoding_type=None, options=None):
"""
Analyzes the syntax of the text and provides sentence boundaries and
tokenization along with part of speech tags, dependency trees, and other
properties.
Example:
>>> from google.cloud.gapic.language.v1beta2 import language_service_client
>>> from google.cloud.proto.language.v1beta2 import language_service_pb2
>>> client = language_service_client.LanguageServiceClient()
>>> document = language_service_pb2.Document()
>>> response = client.analyze_syntax(document)
Args:
document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document.
encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSyntaxResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = language_service_pb2.AnalyzeSyntaxRequest(
document=document, encoding_type=encoding_type)
return self._analyze_syntax(request, options)
def annotate_text(self,
document,
features,
encoding_type=None,
options=None):
"""
A convenience method that provides all syntax, sentiment, and entity
features in one call.
Example:
>>> from google.cloud.gapic.language.v1beta2 import language_service_client
>>> from google.cloud.proto.language.v1beta2 import language_service_pb2
>>> client = language_service_client.LanguageServiceClient()
>>> document = language_service_pb2.Document()
>>> features = language_service_pb2.AnnotateTextRequest.Features()
>>> response = client.annotate_text(document, features)
Args:
document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document.
features (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextRequest.Features`): The enabled features.
encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextResponse` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
request = language_service_pb2.AnnotateTextRequest(
document=document, features=features, encoding_type=encoding_type)
return self._annotate_text(request, options)
| {
"content_hash": "aa934338fc89dc39c893d7b6d12cf22a",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 167,
"avg_line_length": 46.31208053691275,
"alnum_prop": 0.6477066879211651,
"repo_name": "calpeyser/google-cloud-python",
"id": "0150ca4f4b8378c81bec5efd9ad6acb8a72ac90c",
"size": "14866",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "language/google/cloud/gapic/language/v1beta2/language_service_client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "62906"
},
{
"name": "Python",
"bytes": "4584603"
},
{
"name": "Shell",
"bytes": "4147"
}
],
"symlink_target": ""
} |
HetimaTracker
=================
| {
"content_hash": "ac7579be72d9ba2631ee810112efd5cd",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 17,
"avg_line_length": 8.5,
"alnum_prop": 0.38235294117647056,
"repo_name": "kyorohiro/dart_hetimatorrent",
"id": "fcc2af40d216ef6827d4a2b4394e7b46ff024b24",
"size": "34",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/HetimaTorrentTracker/README.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "165425"
},
{
"name": "Dart",
"bytes": "453550"
},
{
"name": "HTML",
"bytes": "19043"
},
{
"name": "JavaScript",
"bytes": "2312"
}
],
"symlink_target": ""
} |
.PHONY: docker
docker: runsvinit
docker build -t runsvinit-example .
runsvinit: runsvinit-linux-amd64.tgz
tar zxf $<
runsvinit-linux-amd64.tgz:
wget --quiet https://github.com/peterbourgon/runsvinit/releases/download/v2.0.0/runsvinit-linux-amd64.tgz
| {
"content_hash": "97024568b6a07467b9045a7bdc2b4535",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 106,
"avg_line_length": 25.6,
"alnum_prop": 0.77734375,
"repo_name": "alban/scope",
"id": "5629007c6ec90f2b1dd78afcc1726007e774d1a9",
"size": "256",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "vendor/runsvinit/example/Makefile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2224"
},
{
"name": "CSS",
"bytes": "41280"
},
{
"name": "Go",
"bytes": "1020510"
},
{
"name": "HTML",
"bytes": "642"
},
{
"name": "JavaScript",
"bytes": "510586"
},
{
"name": "Makefile",
"bytes": "12333"
},
{
"name": "Python",
"bytes": "6689"
},
{
"name": "Shell",
"bytes": "68953"
}
],
"symlink_target": ""
} |
package org.apache.wink.providers.json;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import javax.ws.rs.Consumes;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import javax.ws.rs.ext.MessageBodyReader;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import org.apache.wink.common.RestConstants;
import org.apache.wink.common.internal.i18n.Messages;
import org.apache.wink.common.internal.utils.MediaTypeUtils;
import org.apache.wink.common.utils.ProviderUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONTokener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Provider
@Consumes( {MediaType.APPLICATION_JSON, MediaTypeUtils.JAVASCRIPT})
@Produces( {MediaType.APPLICATION_JSON, MediaTypeUtils.JAVASCRIPT})
public class JsonArrayProvider implements MessageBodyWriter<JSONArray>,
MessageBodyReader<JSONArray> {
private static final Logger logger = LoggerFactory.getLogger(JsonProvider.class);
@Context
private UriInfo uriInfo;
public long getSize(JSONArray t,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType) {
return -1;
}
public boolean isWriteable(Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType) {
// TODO: use isAssignableFrom instead of == ?
return type == JSONArray.class;
}
public void writeTo(JSONArray t,
Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String, Object> httpHeaders,
OutputStream entityStream) throws IOException, WebApplicationException {
mediaType = MediaTypeUtils.setDefaultCharsetOnMediaTypeHeader(httpHeaders, mediaType);
String jsonString = null;
try {
jsonString = t.toString(2);
} catch (JSONException e) {
logger.error(Messages.getMessage("jsonFailWriteJSONArray"), e); //$NON-NLS-1$
throw new WebApplicationException(e, Response.Status.INTERNAL_SERVER_ERROR);
}
String callbackParam = null;
try {
callbackParam =
uriInfo.getQueryParameters().getFirst(RestConstants.REST_PARAM_JSON_CALLBACK);
} catch (Exception e) {
logger.trace("Could not get the URI callback param", e); //$NON-NLS-1$
}
Charset charset = Charset.forName(ProviderUtils.getCharset(mediaType));
OutputStreamWriter writer = new OutputStreamWriter(entityStream, charset);
if (callbackParam != null) {
writer.write(callbackParam);
writer.write("("); //$NON-NLS-1$
}
writer.write(jsonString);
if (callbackParam != null) {
writer.write(")"); //$NON-NLS-1$
}
writer.flush();
}
public boolean isReadable(Class<?> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType) {
return type == JSONArray.class;
}
public JSONArray readFrom(Class<JSONArray> type,
Type genericType,
Annotation[] annotations,
MediaType mediaType,
MultivaluedMap<String, String> httpHeaders,
InputStream entityStream) throws IOException, WebApplicationException {
try {
return new JSONArray(new JSONTokener(ProviderUtils
.createReader(entityStream, mediaType)));
} catch (JSONException e) {
logger.error(Messages.getMessage("jsonFailReadJSONArray"), e); //$NON-NLS-1$
throw new WebApplicationException(e, Response.Status.BAD_REQUEST);
}
}
}
| {
"content_hash": "a446a944e8489b15302c41de654d8347",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 101,
"avg_line_length": 37.14049586776859,
"alnum_prop": 0.6186025812194037,
"repo_name": "os890/wink_patches",
"id": "fe1f1e8f19895565cd07060b1d358b82da157845",
"size": "5472",
"binary": false,
"copies": "4",
"ref": "refs/heads/trunk",
"path": "wink-providers/wink-json-provider/src/main/java/org/apache/wink/providers/json/JsonArrayProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "8117189"
},
{
"name": "JavaScript",
"bytes": "3114"
}
],
"symlink_target": ""
} |
'use strict';
const tape = require('tape');
const sinon = require('sinon');
const db = require('../services/db');
const TABLES = [
'points'
];
let currentSandbox;
function beforeEach() {
currentSandbox = sinon.sandbox.create();
}
function afterEach() {
currentSandbox.restore();
// Very naive 'wipe the database' query.
// Should be expanded to reset sequences, be more efficient, &etc.
const query = TABLES
.map(table => `truncate table ${table} cascade;`)
.join('\n');
return db.raw(query);
}
tape.onFinish(() => {
db.destroy();
});
// Run a test in a 'fresh' environment; clear DB and any stubs
function freshTest(description, fn) {
tape(description, (t) => {
const end = t.end;
t.plan = null; // eslint-disable-line no-param-reassign
t.end = null; // eslint-disable-line no-param-reassign
beforeEach();
// For now, all tests must return promises. Can reevaluate this, but it
// provides a nice safety net for async code.
const result = fn(t);
if (!result || !result.then) {
const err = Error(`
All tests must return promises.
Try \`const ok = Promise.resolve()\` at the top of your test suite,
and returning \`ok\` at the end of each synchronous test.
`);
t.fail(err);
}
result.then(afterEach)
.then(() => end())
.catch((err) => {
t.fail(err);
end();
});
});
}
module.exports = {
sandbox() { return currentSandbox; },
test: freshTest
};
| {
"content_hash": "8a2cd6f726f8ae346422d0e034e09302",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 75,
"avg_line_length": 21.225352112676056,
"alnum_prop": 0.6078301260783012,
"repo_name": "dylanpyle/node-api-boilerplate",
"id": "322bd03eea31b92fadbe2a477f83968e5c6af0e2",
"size": "1507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test-helpers/fresh.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "12339"
},
{
"name": "Makefile",
"bytes": "525"
},
{
"name": "Shell",
"bytes": "273"
}
],
"symlink_target": ""
} |
.class Lcom/android/server/accounts/AccountAuthenticatorCache;
.super Landroid/content/pm/RegisteredServicesCache;
.source "AccountAuthenticatorCache.java"
# interfaces
.implements Lcom/android/server/accounts/IAccountAuthenticatorCache;
# annotations
.annotation system Ldalvik/annotation/MemberClasses;
value = {
Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;
}
.end annotation
.annotation system Ldalvik/annotation/Signature;
value = {
"Landroid/content/pm/RegisteredServicesCache",
"<",
"Landroid/accounts/AuthenticatorDescription;",
">;",
"Lcom/android/server/accounts/IAccountAuthenticatorCache;"
}
.end annotation
# static fields
.field private static final TAG:Ljava/lang/String; = "Account"
.field private static final sSerializer:Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;
# direct methods
.method static constructor <clinit>()V
.locals 2
.prologue
.line 48
new-instance v0, Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;
const/4 v1, 0x0
invoke-direct {v0, v1}, Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;-><init>(Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;)V
sput-object v0, Lcom/android/server/accounts/AccountAuthenticatorCache;->sSerializer:Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;
.line 44
return-void
.end method
.method public constructor <init>(Landroid/content/Context;)V
.locals 6
.param p1, "context" # Landroid/content/Context;
.prologue
.line 51
const-string/jumbo v2, "android.accounts.AccountAuthenticator"
.line 52
const-string/jumbo v3, "android.accounts.AccountAuthenticator"
.line 53
const-string/jumbo v4, "account-authenticator"
sget-object v5, Lcom/android/server/accounts/AccountAuthenticatorCache;->sSerializer:Lcom/android/server/accounts/AccountAuthenticatorCache$MySerializer;
move-object v0, p0
move-object v1, p1
.line 51
invoke-direct/range {v0 .. v5}, Landroid/content/pm/RegisteredServicesCache;-><init>(Landroid/content/Context;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Landroid/content/pm/XmlSerializerAndParser;)V
.line 50
return-void
.end method
# virtual methods
.method public bridge synthetic getServiceInfo(Landroid/accounts/AuthenticatorDescription;I)Landroid/content/pm/RegisteredServicesCache$ServiceInfo;
.locals 1
.param p1, "type" # Landroid/accounts/AuthenticatorDescription;
.param p2, "userId" # I
.prologue
.line 318
invoke-virtual {p0, p1, p2}, Landroid/content/pm/RegisteredServicesCache;->getServiceInfo(Ljava/lang/Object;I)Landroid/content/pm/RegisteredServicesCache$ServiceInfo;
move-result-object v0
return-object v0
.end method
.method public parseServiceAttributes(Landroid/content/res/Resources;Ljava/lang/String;Landroid/util/AttributeSet;)Landroid/accounts/AuthenticatorDescription;
.locals 9
.param p1, "res" # Landroid/content/res/Resources;
.param p2, "packageName" # Ljava/lang/String;
.param p3, "attrs" # Landroid/util/AttributeSet;
.prologue
.line 59
sget-object v0, Lcom/android/internal/R$styleable;->AccountAuthenticator:[I
.line 58
invoke-virtual {p1, p3, v0}, Landroid/content/res/Resources;->obtainAttributes(Landroid/util/AttributeSet;[I)Landroid/content/res/TypedArray;
move-result-object v8
.line 62
.local v8, "sa":Landroid/content/res/TypedArray;
const/4 v0, 0x2
:try_start_0
invoke-virtual {v8, v0}, Landroid/content/res/TypedArray;->getString(I)Ljava/lang/String;
move-result-object v1
.line 64
.local v1, "accountType":Ljava/lang/String;
const/4 v0, 0x0
const/4 v2, 0x0
.line 63
invoke-virtual {v8, v0, v2}, Landroid/content/res/TypedArray;->getResourceId(II)I
move-result v3
.line 66
.local v3, "labelId":I
const/4 v0, 0x1
const/4 v2, 0x0
.line 65
invoke-virtual {v8, v0, v2}, Landroid/content/res/TypedArray;->getResourceId(II)I
move-result v4
.line 68
.local v4, "iconId":I
const/4 v0, 0x3
const/4 v2, 0x0
.line 67
invoke-virtual {v8, v0, v2}, Landroid/content/res/TypedArray;->getResourceId(II)I
move-result v5
.line 70
.local v5, "smallIconId":I
const/4 v0, 0x4
const/4 v2, 0x0
.line 69
invoke-virtual {v8, v0, v2}, Landroid/content/res/TypedArray;->getResourceId(II)I
move-result v6
.line 72
.local v6, "prefId":I
const/4 v0, 0x5
const/4 v2, 0x0
.line 71
invoke-virtual {v8, v0, v2}, Landroid/content/res/TypedArray;->getBoolean(IZ)Z
move-result v7
.line 73
.local v7, "customTokens":Z
invoke-static {v1}, Landroid/text/TextUtils;->isEmpty(Ljava/lang/CharSequence;)Z
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v0
if-eqz v0, :cond_0
.line 74
const/4 v0, 0x0
.line 79
invoke-virtual {v8}, Landroid/content/res/TypedArray;->recycle()V
.line 74
return-object v0
.line 76
:cond_0
:try_start_1
new-instance v0, Landroid/accounts/AuthenticatorDescription;
move-object v2, p2
invoke-direct/range {v0 .. v7}, Landroid/accounts/AuthenticatorDescription;-><init>(Ljava/lang/String;Ljava/lang/String;IIIIZ)V
:try_end_1
.catchall {:try_start_1 .. :try_end_1} :catchall_0
.line 79
invoke-virtual {v8}, Landroid/content/res/TypedArray;->recycle()V
.line 76
return-object v0
.line 78
.end local v1 # "accountType":Ljava/lang/String;
.end local v3 # "labelId":I
.end local v4 # "iconId":I
.end local v5 # "smallIconId":I
.end local v6 # "prefId":I
.end local v7 # "customTokens":Z
:catchall_0
move-exception v0
.line 79
invoke-virtual {v8}, Landroid/content/res/TypedArray;->recycle()V
.line 78
throw v0
.end method
.method public bridge synthetic parseServiceAttributes(Landroid/content/res/Resources;Ljava/lang/String;Landroid/util/AttributeSet;)Ljava/lang/Object;
.locals 1
.param p1, "res" # Landroid/content/res/Resources;
.param p2, "packageName" # Ljava/lang/String;
.param p3, "attrs" # Landroid/util/AttributeSet;
.prologue
.line 56
invoke-virtual {p0, p1, p2, p3}, Lcom/android/server/accounts/AccountAuthenticatorCache;->parseServiceAttributes(Landroid/content/res/Resources;Ljava/lang/String;Landroid/util/AttributeSet;)Landroid/accounts/AuthenticatorDescription;
move-result-object v0
return-object v0
.end method
| {
"content_hash": "673af0a9238c4f0e7f0da7e47e94aba5",
"timestamp": "",
"source": "github",
"line_count": 239,
"max_line_length": 237,
"avg_line_length": 28.05439330543933,
"alnum_prop": 0.7064876957494407,
"repo_name": "libnijunior/patchrom_angler",
"id": "8f5bba7f79eb98ae87b87f9a363967f050688ab2",
"size": "6705",
"binary": false,
"copies": "2",
"ref": "refs/heads/mtc20l",
"path": "services.jar.out/smali/com/android/server/accounts/AccountAuthenticatorCache.smali",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3339"
},
{
"name": "Makefile",
"bytes": "937"
},
{
"name": "Roff",
"bytes": "8687"
},
{
"name": "Shell",
"bytes": "10110"
},
{
"name": "Smali",
"bytes": "172302074"
}
],
"symlink_target": ""
} |
title: halo
order: 5
thumbnail: /img/site-specific-installations/halo/halo_03.jpg
description:
- Site Specific Installation
- York Art Gallery 2015/16
- Fine Gold Plated Wire
videos:
- title: Susie MacMurray - responding to the Lycett Green Collection
link: https://www.youtube.com/watch?v=OSXRhViPPt4
images:
- url: /halo/halo_01.jpg
- url: /halo/halo_02.jpg
- url: /halo/halo_03.jpg
- url: /halo/halo_04.jpg
- url: /halo/halo_05.jpg
- url: /halo/halo_06.jpg
---
| {
"content_hash": "73dfe6128f08a135b49514d734c41dee",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 70,
"avg_line_length": 27.11111111111111,
"alnum_prop": 0.6967213114754098,
"repo_name": "andrewMacmurray/susie-macmurray",
"id": "6ae1fe749ddf2c4aeca4e8b4b4f91403f17e21fc",
"size": "492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "collections/_site-specific-installations/halo.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "9615"
},
{
"name": "HTML",
"bytes": "42742"
},
{
"name": "JavaScript",
"bytes": "5758"
},
{
"name": "Ruby",
"bytes": "2561"
}
],
"symlink_target": ""
} |
DEFINE_FALLBACK_SHADER( SDK_VertexLitGeneric, SDK_VertexLitGeneric_DX6 )
BEGIN_SHADER( SDK_VertexLitGeneric_DX6,
"Help for SDK_VertexLitGeneric_DX6" )
BEGIN_SHADER_PARAMS
SHADER_PARAM( DETAIL, SHADER_PARAM_TYPE_TEXTURE, "shadertest/detail", "detail texture" )
SHADER_PARAM( DETAILSCALE, SHADER_PARAM_TYPE_FLOAT, "4", "scale of the detail texture" )
SHADER_PARAM( SELFILLUMTINT, SHADER_PARAM_TYPE_COLOR, "[1 1 1]", "Self-illumination tint" )
SHADER_PARAM( ENVMAP, SHADER_PARAM_TYPE_TEXTURE, "shadertest/shadertest_env", "envmap" )
SHADER_PARAM( ENVMAPFRAME, SHADER_PARAM_TYPE_INTEGER, "", "" )
SHADER_PARAM( ENVMAPMASK, SHADER_PARAM_TYPE_TEXTURE, "shadertest/shadertest_envmask", "envmap mask" )
SHADER_PARAM( ENVMAPMASKFRAME, SHADER_PARAM_TYPE_INTEGER, "", "" )
SHADER_PARAM( ENVMAPMASKSCALE, SHADER_PARAM_TYPE_FLOAT, "1", "envmap mask scale" )
SHADER_PARAM( ENVMAPTINT, SHADER_PARAM_TYPE_COLOR, "[1 1 1]", "envmap tint" )
SHADER_PARAM( ENVMAPOPTIONAL, SHADER_PARAM_TYPE_BOOL, "0", "Make the envmap only apply to dx9 and higher hardware" )
END_SHADER_PARAMS
SHADER_INIT_PARAMS()
{
// default to 'MODEL' mode...
if (!IS_FLAG_DEFINED( MATERIAL_VAR_MODEL ))
SET_FLAGS( MATERIAL_VAR_MODEL );
if( !params[ENVMAPMASKSCALE]->IsDefined() )
params[ENVMAPMASKSCALE]->SetFloatValue( 1.0f );
if( !params[ENVMAPTINT]->IsDefined() )
params[ENVMAPTINT]->SetVecValue( 1.0f, 1.0f, 1.0f );
if( !params[SELFILLUMTINT]->IsDefined() )
params[SELFILLUMTINT]->SetVecValue( 1.0f, 1.0f, 1.0f );
if( !params[DETAILSCALE]->IsDefined() )
params[DETAILSCALE]->SetFloatValue( 4.0f );
// No envmap uses mode 0, it's one less pass
// Also, if multipass = 0, then go to mode 0 also
if ( ( !params[ENVMAP]->IsDefined() ) ||
( !IS_FLAG_SET(MATERIAL_VAR_MULTIPASS) ) )
{
CLEAR_FLAGS( MATERIAL_VAR_ENVMAPMODE );
}
// Vertex color requires mode 1
if ( IS_FLAG_SET(MATERIAL_VAR_VERTEXCOLOR) )
{
SET_FLAGS( MATERIAL_VAR_ENVMAPMODE );
}
// No texture means no self-illum or env mask in base alpha
if ( !params[BASETEXTURE]->IsDefined() )
{
CLEAR_FLAGS( MATERIAL_VAR_SELFILLUM );
CLEAR_FLAGS( MATERIAL_VAR_BASEALPHAENVMAPMASK );
}
// If in decal mode, no debug override...
if ( IS_FLAG_SET(MATERIAL_VAR_DECAL) )
{
SET_FLAGS( MATERIAL_VAR_NO_DEBUG_OVERRIDE );
}
SET_FLAGS2( MATERIAL_VAR2_LIGHTING_VERTEX_LIT );
SET_FLAGS2( MATERIAL_VAR2_NEEDS_SOFTWARE_LIGHTING );
// Get rid of the envmap if it's optional for this dx level.
if( params[ENVMAPOPTIONAL]->IsDefined() && params[ENVMAPOPTIONAL]->GetIntValue() )
{
params[ENVMAP]->SetUndefined();
}
// If mat_specular 0, then get rid of envmap
if( !g_pConfig->UseSpecular() && params[ENVMAP]->IsDefined() && params[BASETEXTURE]->IsDefined() )
{
params[ENVMAP]->SetUndefined();
}
}
SHADER_FALLBACK
{
return 0;
}
SHADER_INIT
{
if (params[BASETEXTURE]->IsDefined())
{
LoadTexture( BASETEXTURE );
if (!params[BASETEXTURE]->GetTextureValue()->IsTranslucent())
{
CLEAR_FLAGS( MATERIAL_VAR_SELFILLUM );
CLEAR_FLAGS( MATERIAL_VAR_BASEALPHAENVMAPMASK );
}
}
if (params[DETAIL]->IsDefined())
{
LoadTexture( DETAIL );
}
// Don't alpha test if the alpha channel is used for other purposes
if (IS_FLAG_SET(MATERIAL_VAR_SELFILLUM) || IS_FLAG_SET(MATERIAL_VAR_BASEALPHAENVMAPMASK) )
CLEAR_FLAGS( MATERIAL_VAR_ALPHATEST );
if (params[ENVMAP]->IsDefined())
{
if( !IS_FLAG_SET(MATERIAL_VAR_ENVMAPSPHERE) )
LoadCubeMap( ENVMAP );
else
LoadTexture( ENVMAP );
if( !g_pHardwareConfig->SupportsCubeMaps() )
{
SET_FLAGS( MATERIAL_VAR_ENVMAPSPHERE );
}
if (params[ENVMAPMASK]->IsDefined())
LoadTexture( ENVMAPMASK );
}
}
int GetDrawFlagsPass1(IMaterialVar** params)
{
int flags = SHADER_DRAW_POSITION | SHADER_DRAW_COLOR;
if (params[BASETEXTURE]->IsTexture())
flags |= SHADER_DRAW_TEXCOORD0;
return flags;
}
void DrawVertexLightingOnly( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
SHADOW_STATE
{
pShaderShadow->EnableTexture( SHADER_TEXTURE_STAGE0, false );
SetModulationShadowState();
SetDefaultBlendingShadowState( );
pShaderShadow->DrawFlags( GetDrawFlagsPass1( params ) );
DefaultFog();
}
DYNAMIC_STATE
{
SetModulationDynamicState();
}
Draw();
}
void MultiplyByVertexLighting( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
SHADOW_STATE
{
// FIXME: How to deal with texture alpha??
pShaderShadow->EnableTexGen( SHADER_TEXTURE_STAGE0, false );
pShaderShadow->EnableTexGen( SHADER_TEXTURE_STAGE1, false );
pShaderShadow->EnableTexture( SHADER_TEXTURE_STAGE0, false );
pShaderShadow->EnableTexture( SHADER_TEXTURE_STAGE1, false );
// NOTE: We're not doing lightmapping here, but we want to use the
// same blend mode as we used for lightmapping
pShaderShadow->EnableBlending( true );
SingleTextureLightmapBlendMode();
pShaderShadow->EnableCustomPixelPipe( true );
pShaderShadow->CustomTextureStages( 1 );
// This here will perform color = vertex light * (cc alpha) + 1 * (1 - cc alpha)
pShaderShadow->CustomTextureOperation( SHADER_TEXTURE_STAGE0,
SHADER_TEXCHANNEL_COLOR, SHADER_TEXOP_BLEND_CONSTANTALPHA,
SHADER_TEXARG_VERTEXCOLOR, SHADER_TEXARG_CONSTANTCOLOR );
// Alpha isn't used, it doesn't matter what we set it to.
pShaderShadow->CustomTextureOperation( SHADER_TEXTURE_STAGE0,
SHADER_TEXCHANNEL_ALPHA, SHADER_TEXOP_SELECTARG1,
SHADER_TEXARG_NONE, SHADER_TEXARG_NONE );
pShaderShadow->DrawFlags( SHADER_DRAW_POSITION | SHADER_DRAW_COLOR );
FogToOOOverbright();
}
DYNAMIC_STATE
{
// Put the alpha in the color channel to modulate the color down....
float alpha = GetAlpha();
pShaderAPI->Color4f( OO_OVERBRIGHT, OO_OVERBRIGHT, OO_OVERBRIGHT, alpha );
}
Draw();
SHADOW_STATE
{
pShaderShadow->EnableCustomPixelPipe( false );
}
}
//-----------------------------------------------------------------------------
// Used by mode 1
//-----------------------------------------------------------------------------
void DrawBaseTimesVertexLighting( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
// Base times vertex lighting, no vertex color
SHADOW_STATE
{
// alpha test
pShaderShadow->EnableAlphaTest( IS_FLAG_SET(MATERIAL_VAR_ALPHATEST) );
// base
pShaderShadow->EnableTexture( SHADER_TEXTURE_STAGE0, true );
pShaderShadow->OverbrightValue( SHADER_TEXTURE_STAGE0, OVERBRIGHT );
// Independenly configure alpha and color
// Color = Color mod * Vertex Light * Tex (x2)
// Alpha = Constant Alpha * Tex Alpha (no tex alpha if self illum == 1)
// Can't have color modulation here
pShaderShadow->EnableConstantColor( IsColorModulating() );
// Independenly configure alpha and color
pShaderShadow->EnableAlphaPipe( true );
pShaderShadow->EnableConstantAlpha( IsAlphaModulating() );
pShaderShadow->EnableVertexAlpha( IS_FLAG_SET(MATERIAL_VAR_VERTEXALPHA) );
if (!IS_FLAG_SET(MATERIAL_VAR_SELFILLUM) && !IS_FLAG_SET(MATERIAL_VAR_BASEALPHAENVMAPMASK))
pShaderShadow->EnableTextureAlpha( SHADER_TEXTURE_STAGE0, true );
SetDefaultBlendingShadowState( BASETEXTURE, true );
pShaderShadow->DrawFlags( GetDrawFlagsPass1( params ) );
DefaultFog();
}
DYNAMIC_STATE
{
SetFixedFunctionTextureTransform( MATERIAL_TEXTURE0, BASETEXTURETRANSFORM );
BindTexture( SHADER_TEXTURE_STAGE0, BASETEXTURE, FRAME );
SetModulationDynamicState();
}
Draw();
SHADOW_STATE
{
pShaderShadow->EnableAlphaPipe( false );
}
}
//-----------------------------------------------------------------------------
// Envmap times vertex lighting, no vertex color
//-----------------------------------------------------------------------------
void DrawEnvmapTimesVertexLighting( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
SHADOW_STATE
{
int materialVarFlags = params[FLAGS]->GetIntValue();
// alpha test
pShaderShadow->EnableAlphaTest( false );
int flags = SetShadowEnvMappingState( ENVMAPMASK ) | SHADER_DRAW_COLOR;
bool hasEnvMapMask = params[ENVMAPMASK]->IsTexture();
pShaderShadow->OverbrightValue( hasEnvMapMask ?
SHADER_TEXTURE_STAGE1 : SHADER_TEXTURE_STAGE0, OVERBRIGHT );
// Independenly configure alpha and color
// Color = Env map * Vertex Light * Envmapmask (x2)
// Alpha = Constant Alpha * Vertex light alpha * Env Map mask Alpha
pShaderShadow->EnableConstantColor( IsColorModulating() );
pShaderShadow->EnableAlphaPipe( true );
pShaderShadow->EnableConstantAlpha( IsAlphaModulating() );
pShaderShadow->EnableVertexAlpha( (materialVarFlags & MATERIAL_VAR_VERTEXALPHA) != 0 );
if (hasEnvMapMask)
pShaderShadow->EnableTextureAlpha( SHADER_TEXTURE_STAGE1, true );
SetDefaultBlendingShadowState( BASETEXTURE, true );
pShaderShadow->DrawFlags( flags );
DefaultFog();
}
DYNAMIC_STATE
{
SetDynamicEnvMappingState( ENVMAP, ENVMAPMASK, BASETEXTURE,
ENVMAPFRAME, ENVMAPMASKFRAME, FRAME,
BASETEXTURETRANSFORM, ENVMAPMASKSCALE );
}
Draw();
SHADOW_STATE
{
pShaderShadow->EnableCustomPixelPipe( false );
pShaderShadow->EnableAlphaPipe( false );
}
}
void DrawMode1( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
bool texDefined = params[BASETEXTURE]->IsTexture();
bool envDefined = params[ENVMAP]->IsTexture();
// bool maskDefined = params[ENVMAPMASK]->IsTexture();
// Pass 1 : Base + env
// FIXME: Could make it 1 pass for base + env, if it wasn't
// for the envmap tint. So this is 3 passes for now....
// If it's base + mask * env, gotta do that in 2 passes
// Gotta do funky stuff to fade out self-illuminated stuff
bool hasEnvMapTint = !IsWhite(ENVMAPTINT);
// Special case, can do in one pass
if (!hasEnvMapTint && !texDefined && !IS_FLAG_SET(MATERIAL_VAR_VERTEXCOLOR) &&
!IsColorModulating() )
{
DrawEnvmapTimesVertexLighting( params, pShaderAPI, pShaderShadow );
return;
}
if (texDefined)
{
FixedFunctionBaseTimesDetailPass(
BASETEXTURE, FRAME, BASETEXTURETRANSFORM, DETAIL, DETAILSCALE );
}
else
{
FixedFunctionMaskedEnvmapPass(
ENVMAP, ENVMAPMASK, BASETEXTURE,
ENVMAPFRAME, ENVMAPMASKFRAME, FRAME,
BASETEXTURETRANSFORM, ENVMAPMASKSCALE, ENVMAPTINT );
}
// We can get here if multipass isn't set if we specify a vertex color
if ( IS_FLAG_SET(MATERIAL_VAR_MULTIPASS) )
{
if ( texDefined && envDefined )
{
FixedFunctionAdditiveMaskedEnvmapPass(
ENVMAP, ENVMAPMASK, BASETEXTURE,
ENVMAPFRAME, ENVMAPMASKFRAME, FRAME,
BASETEXTURETRANSFORM, ENVMAPMASKSCALE, ENVMAPTINT );
}
}
// Pass 2 : * vertex lighting
MultiplyByVertexLighting( params, pShaderAPI, pShaderShadow );
// FIXME: We could add it to the lightmap
// Draw the selfillum pass (blows away envmap at self-illum points)
if ( IS_FLAG_SET(MATERIAL_VAR_SELFILLUM) )
{
FixedFunctionSelfIlluminationPass(
SHADER_TEXTURE_STAGE0, BASETEXTURE, FRAME, BASETEXTURETRANSFORM, SELFILLUMTINT );
}
}
void DrawMode0( IMaterialVar** params, IShaderDynamicAPI *pShaderAPI, IShaderShadow* pShaderShadow )
{
// Pass 1 : Base * lightmap or just lightmap
if ( params[BASETEXTURE]->IsTexture() )
{
DrawBaseTimesVertexLighting( params, pShaderAPI, pShaderShadow );
// Detail map
FixedFunctionMultiplyByDetailPass(
BASETEXTURE, FRAME, BASETEXTURETRANSFORM, DETAIL, DETAILSCALE );
// Draw the selfillum pass
if ( IS_FLAG_SET(MATERIAL_VAR_SELFILLUM) )
{
FixedFunctionSelfIlluminationPass(
SHADER_TEXTURE_STAGE0, BASETEXTURE, FRAME, BASETEXTURETRANSFORM, SELFILLUMTINT );
}
}
else
{
DrawVertexLightingOnly( params, pShaderAPI, pShaderShadow );
// Detail map
FixedFunctionMultiplyByDetailPass(
BASETEXTURE, FRAME, BASETEXTURETRANSFORM, DETAIL, DETAILSCALE );
}
// Pass 2 : Masked environment map
if ( params[ENVMAP]->IsTexture() &&
(IS_FLAG_SET(MATERIAL_VAR_MULTIPASS)) )
{
FixedFunctionAdditiveMaskedEnvmapPass(
ENVMAP, ENVMAPMASK, BASETEXTURE,
ENVMAPFRAME, ENVMAPMASKFRAME, FRAME,
BASETEXTURETRANSFORM, ENVMAPMASKSCALE, ENVMAPTINT );
}
}
SHADER_DRAW
{
bool useMode1 = IS_FLAG_SET(MATERIAL_VAR_ENVMAPMODE);
if (!useMode1)
{
// Base * Vertex Lighting + env
DrawMode0( params, pShaderAPI, pShaderShadow );
}
else
{
// ( Base + env ) * Vertex Lighting
DrawMode1( params, pShaderAPI, pShaderShadow );
}
}
END_SHADER
| {
"content_hash": "5a542d29f6b3cf8926854bd16ea543a9",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 121,
"avg_line_length": 30.98048780487805,
"alnum_prop": 0.6891040780979373,
"repo_name": "sswires/ham-and-jam",
"id": "8c0ec39fd342a3cc87e7aae3bca25b741495a764",
"size": "13047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdkshaders/advanced/sdk_vertexlitgeneric_dx6.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "590722"
},
{
"name": "C++",
"bytes": "32419935"
},
{
"name": "Makefile",
"bytes": "14652"
},
{
"name": "Objective-C",
"bytes": "33408"
},
{
"name": "Perl",
"bytes": "70416"
},
{
"name": "Shell",
"bytes": "10593"
}
],
"symlink_target": ""
} |
<?php
/**
* @see Zend_Db_Table_Row_Abstract
*/
require_once 'Zend/Db/Table/Row/Abstract.php';
/**
* @category Zend
* @package Zend_Db
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class My_ZendDbTable_Row_TestMockRow extends Zend_Db_Table_Row_Abstract
{
public $parentTable = null;
public $dependentTable = null;
public $ruleKey = null;
public $matchTable = null;
public $intersectionTable = null;
public $callerRefRuleKey = null;
public $matchRefRuleKey = null;
public function findDependentRowset($dependentTable, $ruleKey = null, Zend_Db_Table_Select $select = null)
{
$this->dependentTable = $dependentTable;
$this->ruleKey = $ruleKey;
}
public function findParentRow($parentTable, $ruleKey = null, Zend_Db_Table_Select $select = null)
{
$this->parentTable = $parentTable;
$this->ruleKey = $ruleKey;
}
public function findManyToManyRowset($matchTable, $intersectionTable, $callerRefRule = null,
$matchRefRule = null, Zend_Db_Table_Select $select = null)
{
$this->matchTable = $matchTable;
$this->intersectionTable = $intersectionTable;
$this->callerRefRuleKey = $callerRefRule;
$this->matchRefRuleKey = $matchRefRule;
}
protected function _transformColumn($columnName)
{
// convert 'columnFoo' to 'column_foo'
$columnName = strtolower(preg_replace('/([A-Z])/', '_$1', $columnName));
return $columnName;
}
}
| {
"content_hash": "7614fc6bdc277caaf9837b232b3d94df",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 110,
"avg_line_length": 29.25,
"alnum_prop": 0.6131054131054131,
"repo_name": "gencer/zf1",
"id": "38cf4ee40829e2f085ffa6a109eb765e3fbe6438",
"size": "2521",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/Zend/Db/Table/_files/My/ZendDbTable/Row/TestMockRow.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
$script:Packages = @(
@{Name = 'Git.Install'; Path = $null; Type = 'Chocolatey'},
@{Name = 'Git'; Path = "${Env:ProgramFiles(x86)}\git\bin"; Type = 'Chocolatey'},
@{Name = 'Ruby'; Path = 'C:\Tools\ruby215\bin'; Type = 'Chocolatey'},
@{Name = 'githug'; Path = $null; Type = 'RubyGem'}
)
$script:InstalledChocoPackages = @()
$script:InstalledRubyGems = @()
#region Helpers
function Assert-Prerequisite
{
[CmdletBinding()]
param()
Write-Verbose 'Checking if correct version of PowerShell is available'
if ($PSVersionTable.PSVersion -lt '5.0.10018.0')
{
throw 'WMF Feb 2015 or higher is required for this module to work'
}
Write-Verbose 'Testing elevation status'
$id = [System.Security.Principal.WindowsIdentity]::GetCurrent()
$p = New-Object System.Security.Principal.WindowsPrincipal($id)
if (!$p.IsInRole([System.Security.Principal.WindowsBuiltInRole]::Administrator))
{
throw 'This module performs installations and needs to be run elevated'
}
}
function Update-InstalledChocoPackage
{
$script:InstalledChocoPackages = @()
if (-not (Test-Chocolatey)) {return}
$Packages = (choco list -localonly) -split "`r`n"
if ($Packages[0] -match 'No packages found.') {return}
$Packages | % {
$Property = $_.Split(' ')
$script:InstalledChocoPackages += @{Name=($Property[0].Trim()); Version = ($Property[1].Trim())}
}
}
function Update-InstalledRubyGem
{
$script:InstalledRubyGems = @()
if (!(Test-ChocoPackage -Name 'ruby')) {return}
$Packages = (gem list --local) -split "`r`n"
$Packages | % {
$Property = $_.Split(' ')
$script:InstalledRubyGems += @{Name=($Property[0].Trim()); Version = ($Property[1].Trim())}
}
}
function Test-ChocoPackage
{
[CmdletBinding()]
param
(
[string]
$Name
)
if ($script:InstalledChocoPackages.Count -eq 0)
{
Update-InstalledChocoPackage
}
$found = $false
foreach($Package in $script:InstalledChocoPackages)
{
if ($Package.Name -ieq $Name) {return $true}
}
return $false
}
function Test-RubyGem
{
[CmdletBinding()]
param
(
[string]
$Name
)
if ($script:InstalledRubyGems.Count -eq 0)
{
Update-InstalledRubyGem
}
foreach($Gem in $script:InstalledRubyGems)
{
if ($Gem.Name -ieq $Name) {return $true}
}
return $false
}
function Install-ChocoPackage
{
[CmdletBinding()]
param
(
[string]
$Name,
[switch]
$Force
)
if ($Force)
{
$Command = 'install {0} -force'
}
else
{
$Command = 'install {0}'
}
$Command = $Command -f $Name
& choco ("$Command" -split ' ')
}
function Install-RubyGem
{
[CmdletBinding()]
param
(
[string]
$Name,
[switch]
$Force
)
if ($Force)
{
$Command = 'install {0} --force'
}
else
{
$Command = 'install {0}'
}
$Command = $Command -f $Name
& gem ("$Command" -split ' ')
}
function Add-Path
{
[CmdletBinding()]
param(
[string]
$PathFragment,
[ValidateSet('Machine', 'User')]
[string]
$Scope = 'Machine'
)
if ($env:Path.IndexOf($PathFragment) -ne -1)
{
Write-Verbose "$PathFragment exists in `$env:Path, returning"
return
}
Write-Verbose "Adding $PathFragment to `$env:Path in $Scope scope"
[System.Environment]::SetEnvironmentVariable('PATH', "$($env:Path);$PathFragment", $Scope)
Write-Verbose "Adding $PathFragment to `$env:Path in process scope"
[System.Environment]::SetEnvironmentVariable('PATH', "$($env:Path);$PathFragment", [environmentvariabletarget]::Process)
}
function Remove-Path
{
[CmdletBinding()]
param(
[string]
$PathFragment,
[ValidateSet('Machine', 'User')]
[string]
$Scope = 'Machine'
)
$NewPath = $env:Path
$Pos = $NewPath.IndexOf($PathFragment)
while($Pos -ne -1)
{
$NewPath = $NewPath.Remove($pos, $PathFragment.Length)
$Pos = $NewPath.IndexOf($PathFragment)
}
Write-Verbose "Removing $PathFragment from `$env:Path in $Scope scope"
[System.Environment]::SetEnvironmentVariable('PATH', "$NewPath", $Scope)
Write-Verbose "Removing $PathFragment from `$env:Path in process scope"
[System.Environment]::SetEnvironmentVariable('PATH', "$NewPath", [environmentvariabletarget]::Process)
}
function Uninstall-ChocoPackage
{
[CmdletBinding()]
param
(
[string]
$Name
)
$Command = 'uninstall {0}'
$Command = $Command -f $Name
& choco ("$Command" -split ' ')
}
function Uninstall-RubyGem
{
[CmdletBinding()]
param
(
[string]
$Name
)
$Command = 'uninstall {0} -x'
$Command = $Command -f $Name
& gem ("$Command" -split ' ')
}
function Test-Chocolatey
{
Write-Verbose 'Checking if Chocolatey is present'
Invoke-Command {choco} -ErrorVariable e 2> $null
if ($e.Count -eq 0)
{
return $true
}
return $false
}
function Install-RequiredPatch
{
$script:Packages | % {
switch ($_.Name)
{
'Ruby' {Install-RubyPatch; break}
}
}
}
function Install-RubyPatch
{
[CmdletBinding()]
Param()
$LocalGemFile = 'C:\tools\ruby215\rubygems-update-2.2.3.gem'
$GemUri = 'https://github.com/rubygems/rubygems/releases/download/v2.2.3/rubygems-update-2.2.3.gem'
$WebClient = New-Object System.Net.WebClient
$WebClient.DownloadFile($GemUri, $LocalGemFile)
gem install --local "$LocalGemFile"
update_rubygems --no-ri --no-rdoc
gem uninstall rubygems-update-x
}
#endregion Helpers
#region Exports
<#
.SYNOPSIS
Installs chocolatey
.DESCRIPTION
Installs chocolatey using 'https://chocolatey.org/install.ps1'
.LINK
https://github.com/nanalakshmanan/DevSetup
#>
function Install-Chocolatey
{
[CmdletBinding()]
param()
if (Test-Chocolatey)
{
Write-Verbose 'Chocolatey already present, returning'
return
}
Write-Verbose 'Installing Chocolatey'
Invoke-Expression ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1'))
}
<#
.SYNOPSIS
Uninstalls chocolatey
.DESCRIPTION
Uninstalls chocolatey by removing folder and environment variable
(as prescribed in chocolatey guidelines)
.LINK
https://github.com/nanalakshmanan/DevSetup
#>
function Uninstall-Chocolatey
{
[CmdletBinding()]
param()
if (! (Test-Chocolatey))
{
Write-Verbose 'Chocolatey not present, returning'
return
}
Write-Verbose 'Uninstalling Chocolatey'
Remove-Item -Recurse -Force "$env:ProgramData\Chocolatey"
Remove-Item Env:\ChocolateyInstall -Force
[System.Environment]::SetEnvironmentVariable('ChocolateyInstall', $Null, [environmentvariabletarget]::User)
}
<#
.SYNOPSIS
Install developer tools for building resources/configurations
.DESCRIPTION
Installs a pre determined set of tools for building resources
and configurations
.NOTES
Currently supports chocolatey packages and ruby gems. Will
also install Chocolatey if not already present
.LINK
https://github.com/nanalakshmanan/DevSetup
.EXAMPLE
Install-DevTool -Verbose
.EXAMPLE
Install-DevTool -Force -Verbose
This command Force installs the tools even if already present
.PARAMETER Force
Force installs the tools even if already present
#>
function Install-DevTool
{
[CmdletBinding()]
param(
[switch]
$Force
)
Assert-Prerequisite
if (-not (Test-Chocolatey))
{
Install-Chocolatey
}
Update-InstalledChocoPackage
$script:Packages | % {
$Package = New-Object PSObject -Property $_
if (-not $Force)
{
if ($Package.Type -ieq 'Chocolatey')
{
$InstallNeeded = !(Test-ChocoPackage -Name $Package.Name )
}
else
{
$InstallNeeded = !(Test-RubyGem -Name $Package.Name)
}
}
else
{
$InstallNeeded = $true
}
if ($InstallNeeded)
{
if ($Package.Type -ieq 'Chocolatey')
{
Install-ChocoPackage -Name $Package.Name
}
else
{
Install-RubyGem -Name $Package.Name
}
}
if ($Package.Path -ne $null)
{
Add-Path -PathFragment $Package.Path
}
}
Install-RequiredPatch
}
<#
.SYNOPSIS
Uninstall developer tools for building resources/configurations
.DESCRIPTION
Unnstalls all tools from pre determined set of tools for building resources
and configurations
.NOTES
Currently supports chocolatey packages and ruby gems. Does not uninstall
chocolatey. Call Uninstall-Chocolatey command for the same
.LINK
https://github.com/nanalakshmanan/DevSetup
.EXAMPLE
Uninstall-DevTool -Verbose
#>
function Uninstall-DevTool
{
[CmdletBinding()]
param()
Assert-Prerequisite
$ChocoInstalled = Test-Chocolatey
$RubyInstalled = Test-ChocoPackage -Name 'ruby'
Update-InstalledChocoPackage
Update-InstalledRubyGem
# first process all ruby gems
$script:Packages | % {
if ($_.Type -ieq 'RubyGem')
{
if (Test-RubyGem -Name $_.Name)
{
Uninstall-RubyGem -Name $_.Name
}
}
}
# now process all choco packages
$script:Packages | % {
if ($_.Type -ieq 'Chocolatey')
{
if (Test-ChocoPackage -Name $_.Name)
{
Uninstall-ChocoPackage -Name $_.Name
}
if ($_.Path -ne $null)
{
Remove-Path -PathFragment $_.Path
}
}
}
}
#endregion Exports
Export-ModuleMember Install-Chocolatey, Uninstall-Chocolatey, Install-DevTool, Uninstall-DevTool | {
"content_hash": "10f11893be166da586aa96b6881a5934",
"timestamp": "",
"source": "github",
"line_count": 490,
"max_line_length": 124,
"avg_line_length": 21.273469387755103,
"alnum_prop": 0.5867229470452802,
"repo_name": "nanalakshmanan/DevSetup",
"id": "c89a6297111600f44178567455087aed197ebf95",
"size": "10426",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DevSetup.psm1",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "17076"
}
],
"symlink_target": ""
} |
require File.dirname(__FILE__) + '/../abstract_unit'
require File.dirname(__FILE__) + '/../../lib/action_view/helpers/url_helper'
require File.dirname(__FILE__) + '/../../lib/action_view/helpers/asset_tag_helper'
require File.dirname(__FILE__) + '/../../lib/action_view/helpers/tag_helper'
RequestMock = Struct.new("Request", :request_uri)
class UrlHelperTest < Test::Unit::TestCase
include ActionView::Helpers::AssetTagHelper
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TagHelper
def setup
@controller = Class.new do
attr_accessor :url
def url_for(options, *parameters_for_method_reference)
url
end
end
@controller = @controller.new
@controller.url = "http://www.example.com"
end
def test_url_for_escapes_urls
@controller.url = "http://www.example.com?a=b&c=d"
assert_equal "http://www.example.com?a=b&c=d", url_for(:a => 'b', :c => 'd')
assert_equal "http://www.example.com?a=b&c=d", url_for(:a => 'b', :c => 'd', :escape => true)
assert_equal "http://www.example.com?a=b&c=d", url_for(:a => 'b', :c => 'd', :escape => false)
end
# todo: missing test cases
def test_button_to_with_straight_url
assert_dom_equal "<form method=\"post\" action=\"http://www.example.com\" class=\"button-to\"><div><input type=\"submit\" value=\"Hello\" /></div></form>", button_to("Hello", "http://www.example.com")
end
def test_button_to_with_query
assert_dom_equal "<form method=\"post\" action=\"http://www.example.com/q1=v1&q2=v2\" class=\"button-to\"><div><input type=\"submit\" value=\"Hello\" /></div></form>", button_to("Hello", "http://www.example.com/q1=v1&q2=v2")
end
def test_button_to_with_query_and_no_name
assert_dom_equal "<form method=\"post\" action=\"http://www.example.com?q1=v1&q2=v2\" class=\"button-to\"><div><input type=\"submit\" value=\"http://www.example.com?q1=v1&q2=v2\" /></div></form>", button_to(nil, "http://www.example.com?q1=v1&q2=v2")
end
def test_button_to_with_javascript_confirm
assert_dom_equal(
"<form method=\"post\" action=\"http://www.example.com\" class=\"button-to\"><div><input onclick=\"return confirm('Are you sure?');\" type=\"submit\" value=\"Hello\" /></div></form>",
button_to("Hello", "http://www.example.com", :confirm => "Are you sure?")
)
end
def test_button_to_enabled_disabled
assert_dom_equal(
"<form method=\"post\" action=\"http://www.example.com\" class=\"button-to\"><div><input type=\"submit\" value=\"Hello\" /></div></form>",
button_to("Hello", "http://www.example.com", :disabled => false)
)
assert_dom_equal(
"<form method=\"post\" action=\"http://www.example.com\" class=\"button-to\"><div><input disabled=\"disabled\" type=\"submit\" value=\"Hello\" /></div></form>",
button_to("Hello", "http://www.example.com", :disabled => true)
)
end
def test_link_tag_with_straight_url
assert_dom_equal "<a href=\"http://www.example.com\">Hello</a>", link_to("Hello", "http://www.example.com")
end
def test_link_tag_with_query
assert_dom_equal "<a href=\"http://www.example.com?q1=v1&q2=v2\">Hello</a>", link_to("Hello", "http://www.example.com?q1=v1&q2=v2")
end
def test_link_tag_with_query_and_no_name
assert_dom_equal "<a href=\"http://www.example.com?q1=v1&q2=v2\">http://www.example.com?q1=v1&q2=v2</a>", link_to(nil, "http://www.example.com?q1=v1&q2=v2")
end
def test_link_tag_with_img
assert_dom_equal "<a href=\"http://www.example.com\"><img src='/favicon.jpg' /></a>", link_to("<img src='/favicon.jpg' />", "http://www.example.com")
end
def test_link_with_nil_html_options
assert_dom_equal "<a href=\"http://www.example.com\">Hello</a>", link_to("Hello", {:action => 'myaction'}, nil)
end
def test_link_tag_with_custom_onclick
assert_dom_equal "<a href=\"http://www.example.com\" onclick=\"alert('yay!')\">Hello</a>", link_to("Hello", "http://www.example.com", :onclick => "alert('yay!')")
end
def test_link_tag_with_javascript_confirm
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"return confirm('Are you sure?');\">Hello</a>",
link_to("Hello", "http://www.example.com", :confirm => "Are you sure?")
)
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"return confirm('You can\\'t possibly be sure, can you?');\">Hello</a>",
link_to("Hello", "http://www.example.com", :confirm => "You can't possibly be sure, can you?")
)
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"return confirm('You can\\'t possibly be sure,\\n can you?');\">Hello</a>",
link_to("Hello", "http://www.example.com", :confirm => "You can't possibly be sure,\n can you?")
)
end
def test_link_tag_with_popup
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"window.open(this.href);return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", :popup => true)
)
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"window.open(this.href);return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", :popup => 'true')
)
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"window.open(this.href,'window_name','width=300,height=300');return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", :popup => ['window_name', 'width=300,height=300'])
)
end
def test_link_tag_with_popup_and_javascript_confirm
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"if (confirm('Fo\\' sho\\'?')) { window.open(this.href); };return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", { :popup => true, :confirm => "Fo' sho'?" })
)
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"if (confirm('Are you serious?')) { window.open(this.href,'window_name','width=300,height=300'); };return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", { :popup => ['window_name', 'width=300,height=300'], :confirm => "Are you serious?" })
)
end
def test_link_tag_using_post_javascript
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"var f = document.createElement('form'); this.parentNode.appendChild(f); f.method = 'POST'; f.action = this.href; f.submit();return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", :post => true)
)
end
def test_link_tag_using_post_javascript_and_confirm
assert_dom_equal(
"<a href=\"http://www.example.com\" onclick=\"if (confirm('Are you serious?')) { var f = document.createElement('form'); this.parentNode.appendChild(f); f.method = 'POST'; f.action = this.href; f.submit(); };return false;\">Hello</a>",
link_to("Hello", "http://www.example.com", :post => true, :confirm => "Are you serious?")
)
end
def test_link_tag_using_post_javascript_and_popup
assert_raises(ActionView::ActionViewError) { link_to("Hello", "http://www.example.com", :popup => true, :post => true, :confirm => "Are you serious?") }
end
def test_link_to_unless
assert_equal "Showing", link_to_unless(true, "Showing", :action => "show", :controller => "weblog")
assert_dom_equal "<a href=\"http://www.example.com\">Listing</a>", link_to_unless(false, "Listing", :action => "list", :controller => "weblog")
assert_equal "Showing", link_to_unless(true, "Showing", :action => "show", :controller => "weblog", :id => 1)
assert_equal "<strong>Showing</strong>", link_to_unless(true, "Showing", :action => "show", :controller => "weblog", :id => 1) { |name, options, html_options, *parameters_for_method_reference|
"<strong>#{name}</strong>"
}
assert_equal "<strong>Showing</strong>", link_to_unless(true, "Showing", :action => "show", :controller => "weblog", :id => 1) { |name|
"<strong>#{name}</strong>"
}
assert_equal "test", link_to_unless(true, "Showing", :action => "show", :controller => "weblog", :id => 1) {
"test"
}
end
def test_link_to_if
assert_equal "Showing", link_to_if(false, "Showing", :action => "show", :controller => "weblog")
assert_dom_equal "<a href=\"http://www.example.com\">Listing</a>", link_to_if(true, "Listing", :action => "list", :controller => "weblog")
assert_equal "Showing", link_to_if(false, "Showing", :action => "show", :controller => "weblog", :id => 1)
end
def xtest_link_unless_current
@request = RequestMock.new("http://www.example.com")
assert_equal "Showing", link_to_unless_current("Showing", :action => "show", :controller => "weblog")
@request = RequestMock.new("http://www.example.org")
assert "<a href=\"http://www.example.com\">Listing</a>", link_to_unless_current("Listing", :action => "list", :controller => "weblog")
@request = RequestMock.new("http://www.example.com")
assert_equal "Showing", link_to_unless_current("Showing", :action => "show", :controller => "weblog", :id => 1)
end
def test_mail_to
assert_dom_equal "<a href=\"mailto:david@loudthinking.com\">david@loudthinking.com</a>", mail_to("david@loudthinking.com")
assert_dom_equal "<a href=\"mailto:david@loudthinking.com\">David Heinemeier Hansson</a>", mail_to("david@loudthinking.com", "David Heinemeier Hansson")
assert_dom_equal(
"<a class=\"admin\" href=\"mailto:david@loudthinking.com\">David Heinemeier Hansson</a>",
mail_to("david@loudthinking.com", "David Heinemeier Hansson", "class" => "admin")
)
assert_equal mail_to("david@loudthinking.com", "David Heinemeier Hansson", "class" => "admin"),
mail_to("david@loudthinking.com", "David Heinemeier Hansson", :class => "admin")
end
def test_mail_to_with_javascript
assert_dom_equal "<script type=\"text/javascript\">eval(unescape('%64%6f%63%75%6d%65%6e%74%2e%77%72%69%74%65%28%27%3c%61%20%68%72%65%66%3d%22%6d%61%69%6c%74%6f%3a%6d%65%40%64%6f%6d%61%69%6e%2e%63%6f%6d%22%3e%4d%79%20%65%6d%61%69%6c%3c%2f%61%3e%27%29%3b'))</script>", mail_to("me@domain.com", "My email", :encode => "javascript")
end
def test_mail_with_options
assert_dom_equal(
%(<a href="mailto:me@example.com?cc=ccaddress%40example.com&bcc=bccaddress%40example.com&body=This%20is%20the%20body%20of%20the%20message.&subject=This%20is%20an%20example%20email">My email</a>),
mail_to("me@example.com", "My email", :cc => "ccaddress@example.com", :bcc => "bccaddress@example.com", :subject => "This is an example email", :body => "This is the body of the message.")
)
end
def test_mail_to_with_img
assert_dom_equal %(<a href="mailto:feedback@example.com"><img src="/feedback.png" /></a>), mail_to('feedback@example.com', '<img src="/feedback.png" />')
end
def test_mail_to_with_hex
assert_dom_equal "<a href=\"mailto:%6d%65@%64%6f%6d%61%69%6e.%63%6f%6d\">My email</a>", mail_to("me@domain.com", "My email", :encode => "hex")
end
def test_mail_to_with_replace_options
assert_dom_equal "<a href=\"mailto:wolfgang@stufenlos.net\">wolfgang(at)stufenlos(dot)net</a>", mail_to("wolfgang@stufenlos.net", nil, :replace_at => "(at)", :replace_dot => "(dot)")
assert_dom_equal "<a href=\"mailto:%6d%65@%64%6f%6d%61%69%6e.%63%6f%6d\">me(at)domain.com</a>", mail_to("me@domain.com", nil, :encode => "hex", :replace_at => "(at)")
assert_dom_equal "<a href=\"mailto:%6d%65@%64%6f%6d%61%69%6e.%63%6f%6d\">My email</a>", mail_to("me@domain.com", "My email", :encode => "hex", :replace_at => "(at)")
assert_dom_equal "<a href=\"mailto:%6d%65@%64%6f%6d%61%69%6e.%63%6f%6d\">me(at)domain(dot)com</a>", mail_to("me@domain.com", nil, :encode => "hex", :replace_at => "(at)", :replace_dot => "(dot)")
assert_dom_equal "<script type=\"text/javascript\">eval(unescape('%64%6f%63%75%6d%65%6e%74%2e%77%72%69%74%65%28%27%3c%61%20%68%72%65%66%3d%22%6d%61%69%6c%74%6f%3a%6d%65%40%64%6f%6d%61%69%6e%2e%63%6f%6d%22%3e%4d%79%20%65%6d%61%69%6c%3c%2f%61%3e%27%29%3b'))</script>", mail_to("me@domain.com", "My email", :encode => "javascript", :replace_at => "(at)", :replace_dot => "(dot)")
end
end
| {
"content_hash": "772178427446fdfb05ef0f93fb6f661d",
"timestamp": "",
"source": "github",
"line_count": 214,
"max_line_length": 380,
"avg_line_length": 57.00934579439252,
"alnum_prop": 0.629672131147541,
"repo_name": "moviepilot-de/omdb",
"id": "231e34751eecb1f2c6dfef0ee437d6f851affea3",
"size": "12200",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/rails/actionpack/test/template/url_helper_test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "251086"
},
{
"name": "Python",
"bytes": "70858"
},
{
"name": "Ruby",
"bytes": "1018537"
},
{
"name": "Shell",
"bytes": "3083"
}
],
"symlink_target": ""
} |
package org.apache.giraph.comm;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.giraph.graph.BasicVertex;
import org.apache.giraph.graph.VertexMutations;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
/**
* Anything that the server stores
*
* @param <I> Vertex id
* @param <V> Vertex data
* @param <E> Edge data
* @param <M> Message data
*/
@SuppressWarnings("rawtypes")
public class ServerData<I extends WritableComparable,
V extends Writable, E extends Writable, M extends Writable> {
/**
* Map of partition ids to incoming vertices from other workers.
* (Synchronized on values)
*/
private final ConcurrentHashMap<Integer, Collection<BasicVertex<I, V, E, M>>>
inPartitionVertexMap =
new ConcurrentHashMap<Integer, Collection<BasicVertex<I, V, E, M>>>();
/**
* Map of inbound messages, mapping from vertex index to list of messages.
* Transferred to inMessages at beginning of a superstep. This
* intermediary step exists so that the combiner will run not only at the
* client, but also at the server. Also, allows the sending of large
* message lists during the superstep computation. (Synchronized on values)
*/
private final ConcurrentHashMap<I, Collection<M>> transientMessages =
new ConcurrentHashMap<I, Collection<M>>();
/**
* Map of partition ids to incoming vertex mutations from other workers.
* (Synchronized access to values)
*/
private final ConcurrentHashMap<I, VertexMutations<I, V, E, M>>
vertexMutations = new ConcurrentHashMap<I, VertexMutations<I, V, E, M>>();
/**
* Get the partition vertices (synchronize on the values)
*
* @return Partition vertices
*/
public ConcurrentHashMap<Integer, Collection<BasicVertex<I, V, E, M>>>
getPartitionVertexMap() {
return inPartitionVertexMap;
}
/**
* Get the vertex messages (synchronize on the values)
*
* @return Vertex messages
*/
public ConcurrentHashMap<I, Collection<M>> getTransientMessages() {
return transientMessages;
}
/**
* Get the vertex mutations (synchronize on the values)
*
* @return Vertex mutations
*/
public ConcurrentHashMap<I, VertexMutations<I, V, E, M>>
getVertexMutations() {
return vertexMutations;
}
}
| {
"content_hash": "f9e8b1cfb698cf270648a3acb5f68b0f",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 79,
"avg_line_length": 31.026666666666667,
"alnum_prop": 0.7150837988826816,
"repo_name": "LiuJianan/giraphpp-1",
"id": "e3872aebf6ba529a7efd921f0afcf6e7b1b974e4",
"size": "3132",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "target/munged/main/org/apache/giraph/comm/ServerData.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "966"
},
{
"name": "Java",
"bytes": "2732564"
}
],
"symlink_target": ""
} |
package org.apache.struts2.interceptor;
import java.io.Serializable;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionInvocation;
/**
* Background thread to be executed by the ExecuteAndWaitInterceptor.
*
*/
public class BackgroundProcess implements Serializable {
private static final long serialVersionUID = 3884464776311686443L;
protected Object action;
protected ActionInvocation invocation;
protected String result;
protected Exception exception;
protected boolean done;
/**
* Constructs a background process
*
* @param threadName The thread name
* @param invocation The action invocation
* @param threadPriority The thread priority
*/
public BackgroundProcess(String threadName, final ActionInvocation invocation, int threadPriority) {
this.invocation = invocation;
this.action = invocation.getAction();
try {
final Thread t = new Thread(new Runnable() {
public void run() {
try {
beforeInvocation();
result = invocation.invokeActionOnly();
afterInvocation();
} catch (Exception e) {
exception = e;
}
done = true;
}
});
t.setName(threadName);
t.setPriority(threadPriority);
t.start();
} catch (Exception e) {
exception = e;
}
}
/**
* Called before the background thread determines the result code
* from the ActionInvocation.
*
* @throws Exception any exception thrown will be thrown, in turn, by the ExecuteAndWaitInterceptor
*/
protected void beforeInvocation() throws Exception {
ActionContext.setContext(invocation.getInvocationContext());
}
/**
* Called after the background thread determines the result code
* from the ActionInvocation, but before the background thread is
* marked as done.
*
* @throws Exception any exception thrown will be thrown, in turn, by the ExecuteAndWaitInterceptor
*/
protected void afterInvocation() throws Exception {
ActionContext.setContext(null);
}
/**
* Retrieves the action.
*
* @return the action.
*/
public Object getAction() {
return action;
}
/**
* Retrieves the action invocation.
*
* @return the action invocation
*/
public ActionInvocation getInvocation() {
return invocation;
}
/**
* Gets the result of the background process.
*
* @return the result; <tt>null</tt> if not done.
*/
public String getResult() {
return result;
}
/**
* Gets the exception if any was thrown during the execution of the background process.
*
* @return the exception or <tt>null</tt> if no exception was thrown.
*/
public Exception getException() {
return exception;
}
/**
* Returns the status of the background process.
*
* @return <tt>true</tt> if finished, <tt>false</tt> otherwise
*/
public boolean isDone() {
return done;
}
}
| {
"content_hash": "30715323005b24be35f27845eae24f60",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 104,
"avg_line_length": 27.24793388429752,
"alnum_prop": 0.6017591750075827,
"repo_name": "txazo/struts2",
"id": "c8061a304b23d248cc184a3306c0458b717f84b5",
"size": "4114",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/org/apache/struts2/interceptor/BackgroundProcess.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5295"
},
{
"name": "FreeMarker",
"bytes": "168228"
},
{
"name": "HTML",
"bytes": "18991"
},
{
"name": "Java",
"bytes": "4490044"
},
{
"name": "JavaScript",
"bytes": "28734"
},
{
"name": "XSLT",
"bytes": "258"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc.reader;
import com.facebook.presto.common.Subfield;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.common.block.BlockLease;
import com.facebook.presto.common.block.ClosingBlockLease;
import com.facebook.presto.common.block.RunLengthEncodedBlock;
import com.facebook.presto.common.type.MapType;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.orc.OrcAggregatedMemoryContext;
import com.facebook.presto.orc.OrcLocalMemoryContext;
import com.facebook.presto.orc.StreamDescriptor;
import com.facebook.presto.orc.TupleDomainFilter;
import com.facebook.presto.orc.TupleDomainFilter.BigintRange;
import com.facebook.presto.orc.TupleDomainFilter.BytesRange;
import com.facebook.presto.orc.TupleDomainFilter.BytesValues;
import com.facebook.presto.orc.metadata.ColumnEncoding;
import com.facebook.presto.orc.metadata.OrcType;
import com.facebook.presto.orc.stream.BooleanInputStream;
import com.facebook.presto.orc.stream.InputStreamSource;
import com.facebook.presto.orc.stream.InputStreamSources;
import com.facebook.presto.orc.stream.LongInputStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import org.joda.time.DateTimeZone;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.facebook.presto.orc.TupleDomainFilter.IS_NOT_NULL;
import static com.facebook.presto.orc.TupleDomainFilter.IS_NULL;
import static com.facebook.presto.orc.TupleDomainFilterUtils.toBigintValues;
import static com.facebook.presto.orc.array.Arrays.ensureCapacity;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.LENGTH;
import static com.facebook.presto.orc.metadata.Stream.StreamKind.PRESENT;
import static com.facebook.presto.orc.reader.SelectiveStreamReaders.initializeOutputPositions;
import static com.facebook.presto.orc.stream.MissingInputStreamSource.missingStreamSource;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
public class MapDirectSelectiveStreamReader
implements SelectiveStreamReader
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(MapDirectSelectiveStreamReader.class).instanceSize();
private final StreamDescriptor streamDescriptor;
private final boolean legacyMapSubscript;
private final boolean nullsAllowed;
private final boolean nonNullsAllowed;
private final boolean outputRequired;
@Nullable
private final MapType outputType;
private final SelectiveStreamReader keyReader;
private final SelectiveStreamReader valueReader;
private final OrcLocalMemoryContext systemMemoryContext;
private int readOffset;
private int nestedReadOffset;
private InputStreamSource<BooleanInputStream> presentStreamSource = missingStreamSource(BooleanInputStream.class);
@Nullable
private BooleanInputStream presentStream;
private InputStreamSource<LongInputStream> lengthStreamSource = missingStreamSource(LongInputStream.class);
@Nullable
private LongInputStream lengthStream;
private boolean rowGroupOpen;
@Nullable
private int[] offsets;
private boolean[] nulls;
private int[] outputPositions;
private int outputPositionCount;
private boolean allNulls;
private int[] nestedLengths;
private int[] nestedOffsets;
private int[] nestedPositions;
private int[] nestedOutputPositions;
private int nestedOutputPositionCount;
private boolean valuesInUse;
public MapDirectSelectiveStreamReader(
StreamDescriptor streamDescriptor,
Map<Subfield, TupleDomainFilter> filters,
List<Subfield> requiredSubfields,
Optional<Type> outputType,
DateTimeZone hiveStorageTimeZone,
boolean legacyMapSubscript,
OrcAggregatedMemoryContext systemMemoryContext)
{
checkArgument(filters.keySet().stream().map(Subfield::getPath).allMatch(List::isEmpty), "filters on nested columns are not supported yet");
this.streamDescriptor = requireNonNull(streamDescriptor, "streamDescriptor is null");
this.legacyMapSubscript = legacyMapSubscript;
this.systemMemoryContext = requireNonNull(systemMemoryContext, "systemMemoryContext is null").newOrcLocalMemoryContext(MapDirectSelectiveStreamReader.class.getSimpleName());
this.outputRequired = requireNonNull(outputType, "outputType is null").isPresent();
this.outputType = outputType.map(MapType.class::cast).orElse(null);
TupleDomainFilter filter = getTopLevelFilter(filters).orElse(null);
this.nullsAllowed = filter == null || filter.testNull();
this.nonNullsAllowed = filter == null || filter.testNonNull();
List<StreamDescriptor> nestedStreams = streamDescriptor.getNestedStreams();
Optional<Type> keyOutputType = outputType.map(MapType.class::cast).map(MapType::getKeyType);
Optional<Type> valueOutputType = outputType.map(MapType.class::cast).map(MapType::getValueType);
if (outputRequired) {
Map<Subfield, TupleDomainFilter> keyFilter = ImmutableMap.of(new Subfield("c"), makeKeyFilter(nestedStreams.get(0).getOrcTypeKind(), requiredSubfields));
List<Subfield> elementRequiredSubfields = ImmutableList.of();
if (requiredSubfields.stream().map(Subfield::getPath).allMatch(path -> path.size() > 1)) {
elementRequiredSubfields = requiredSubfields.stream()
.map(subfield -> subfield.tail(subfield.getRootName()))
.distinct()
.collect(toImmutableList());
}
this.keyReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(0), keyFilter, keyOutputType, ImmutableList.of(), hiveStorageTimeZone, legacyMapSubscript, systemMemoryContext.newOrcAggregatedMemoryContext());
this.valueReader = SelectiveStreamReaders.createStreamReader(nestedStreams.get(1), ImmutableMap.of(), valueOutputType, elementRequiredSubfields, hiveStorageTimeZone, legacyMapSubscript, systemMemoryContext.newOrcAggregatedMemoryContext());
}
else {
this.keyReader = null;
this.valueReader = null;
}
}
private static TupleDomainFilter makeKeyFilter(OrcType.OrcTypeKind orcType, List<Subfield> requiredSubfields)
{
// Map entries with a null key are skipped in the Hive ORC reader, so skip them here also
if (requiredSubfields.isEmpty()) {
return IS_NOT_NULL;
}
if (requiredSubfields.stream()
.map(Subfield::getPath)
.map(path -> path.get(0))
.anyMatch(Subfield.AllSubscripts.class::isInstance)) {
return IS_NOT_NULL;
}
switch (orcType) {
case BYTE:
case SHORT:
case INT:
case LONG: {
long[] requiredIndices = requiredSubfields.stream()
.map(Subfield::getPath)
.map(path -> path.get(0))
.map(Subfield.LongSubscript.class::cast)
.mapToLong(Subfield.LongSubscript::getIndex)
.distinct()
.toArray();
if (requiredIndices.length == 0) {
return IS_NOT_NULL;
}
if (requiredIndices.length == 1) {
return BigintRange.of(requiredIndices[0], requiredIndices[0], false);
}
return toBigintValues(requiredIndices, false);
}
case STRING:
case CHAR:
case VARCHAR: {
byte[][] requiredIndices = requiredSubfields.stream()
.map(Subfield::getPath)
.map(path -> path.get(0))
.map(Subfield.StringSubscript.class::cast)
.map(Subfield.StringSubscript::getIndex)
.map(String::getBytes)
.toArray(byte[][]::new);
if (requiredIndices.length == 0) {
return IS_NOT_NULL;
}
if (requiredIndices.length == 1) {
return BytesRange.of(requiredIndices[0], false, requiredIndices[0], false, false);
}
return BytesValues.of(requiredIndices, false);
}
default:
return IS_NOT_NULL;
}
}
private static Optional<TupleDomainFilter> getTopLevelFilter(Map<Subfield, TupleDomainFilter> filters)
{
Map<Subfield, TupleDomainFilter> topLevelFilters = Maps.filterEntries(filters, entry -> entry.getKey().getPath().isEmpty());
if (topLevelFilters.isEmpty()) {
return Optional.empty();
}
checkArgument(topLevelFilters.size() == 1, "MAP column may have at most one top-level range filter");
TupleDomainFilter filter = Iterables.getOnlyElement(topLevelFilters.values());
checkArgument(filter == IS_NULL || filter == IS_NOT_NULL, "Top-level range filter on MAP column must be IS NULL or IS NOT NULL");
return Optional.of(filter);
}
@Override
public int read(int offset, int[] positions, int positionCount)
throws IOException
{
checkState(!valuesInUse, "BlockLease hasn't been closed yet");
if (!rowGroupOpen) {
openRowGroup();
}
allNulls = false;
outputPositions = initializeOutputPositions(outputPositions, positions, positionCount);
offsets = ensureCapacity(offsets, positionCount + 1);
offsets[0] = 0;
nestedLengths = ensureCapacity(nestedLengths, positionCount);
nestedOffsets = ensureCapacity(nestedOffsets, positionCount + 1);
systemMemoryContext.setBytes(getRetainedSizeInBytes());
if (lengthStream == null) {
readAllNulls(positions, positionCount);
}
else if (presentStream == null) {
readNoNulls(offset, positions, positionCount);
}
else {
readWithNulls(offset, positions, positionCount);
}
return outputPositionCount;
}
private int readAllNulls(int[] positions, int positionCount)
{
if (nullsAllowed) {
outputPositionCount = positionCount;
}
else {
outputPositionCount = 0;
}
allNulls = true;
return positions[positionCount - 1] + 1;
}
private void readNoNulls(int offset, int[] positions, int positionCount)
throws IOException
{
if (!nonNullsAllowed) {
outputPositionCount = 0;
return;
}
if (readOffset < offset) {
nestedReadOffset += lengthStream.sum(offset - readOffset);
}
int streamPosition = 0;
int nestedOffset = 0;
int nestedPositionCount = 0;
for (int i = 0; i < positionCount; i++) {
int position = positions[i];
if (position > streamPosition) {
nestedOffset += lengthStream.sum(position - streamPosition);
streamPosition = position;
}
streamPosition++;
int length = toIntExact(lengthStream.next());
offsets[i + 1] = offsets[i] + length;
nestedLengths[i] = length;
nestedOffsets[i] = nestedOffset;
nestedOffset += length;
nestedPositionCount += length;
}
outputPositionCount = positionCount;
readOffset = offset + streamPosition;
if (outputRequired) {
nestedOffsets[positionCount] = nestedOffset;
populateNestedPositions(positionCount, nestedPositionCount);
readKeyValueStreams(nestedPositionCount);
}
nestedReadOffset += nestedOffset;
}
private void readWithNulls(int offset, int[] positions, int positionCount)
throws IOException
{
if (readOffset < offset) {
int dataToSkip = presentStream.countBitsSet(offset - readOffset);
nestedReadOffset += lengthStream.sum(dataToSkip);
}
if (outputRequired) {
nulls = ensureCapacity(nulls, positionCount);
}
outputPositionCount = 0;
int streamPosition = 0;
int nonNullPositionCount = 0;
int nestedOffset = 0;
int nestedPositionCount = 0;
for (int i = 0; i < positionCount; i++) {
int position = positions[i];
if (position > streamPosition) {
int dataToSkip = presentStream.countBitsSet(position - streamPosition);
nestedOffset += lengthStream.sum(dataToSkip);
streamPosition = position;
}
streamPosition++;
if (presentStream.nextBit()) {
// not null
int length = toIntExact(lengthStream.next());
if (nonNullsAllowed) {
if (outputRequired) {
nulls[outputPositionCount] = false;
offsets[outputPositionCount + 1] = offsets[outputPositionCount] + length;
nestedLengths[nonNullPositionCount] = length;
nestedOffsets[nonNullPositionCount] = nestedOffset;
nonNullPositionCount++;
nestedPositionCount += length;
}
outputPositions[outputPositionCount] = position;
outputPositionCount++;
}
nestedOffset += length;
}
else {
// null
if (nullsAllowed) {
if (outputRequired) {
nulls[outputPositionCount] = true;
offsets[outputPositionCount + 1] = offsets[outputPositionCount];
}
outputPositions[outputPositionCount] = position;
outputPositionCount++;
}
}
}
if (nonNullPositionCount == 0) {
allNulls = true;
}
else if (outputRequired) {
nestedOffsets[nonNullPositionCount] = nestedOffset;
populateNestedPositions(nonNullPositionCount, nestedPositionCount);
readKeyValueStreams(nestedPositionCount);
}
readOffset = offset + streamPosition;
nestedReadOffset += nestedOffset;
}
private void populateNestedPositions(int positionCount, int nestedPositionCount)
{
nestedPositions = ensureCapacity(nestedPositions, nestedPositionCount);
int index = 0;
for (int i = 0; i < positionCount; i++) {
for (int j = 0; j < nestedLengths[i]; j++) {
nestedPositions[index++] = nestedOffsets[i] + j;
}
}
}
private void readKeyValueStreams(int positionCount)
throws IOException
{
if (positionCount == 0) {
nestedOutputPositionCount = 0;
return;
}
int readCount = keyReader.read(nestedReadOffset, nestedPositions, positionCount);
int[] readPositions = keyReader.getReadPositions();
if (readCount == 0) {
nestedOutputPositionCount = 0;
for (int i = 0; i <= outputPositionCount; i++) {
offsets[i] = 0;
}
return;
}
if (readCount < positionCount) {
int positionIndex = 0;
int nextPosition = readPositions[positionIndex];
int offset = 0;
int previousOffset = 0;
for (int i = 0; i < outputPositionCount; i++) {
int length = 0;
for (int j = previousOffset; j < offsets[i + 1]; j++) {
if (nestedPositions[j] == nextPosition) {
length++;
positionIndex++;
if (positionIndex >= readCount) {
break;
}
nextPosition = readPositions[positionIndex];
}
}
offset += length;
previousOffset = offsets[i + 1];
offsets[i + 1] = offset;
if (positionIndex >= readCount) {
for (int j = i + 1; j < outputPositionCount; j++) {
offsets[j + 1] = offset;
}
break;
}
}
}
int valueReadCount = valueReader.read(nestedReadOffset, readPositions, readCount);
assert valueReadCount == readCount;
nestedOutputPositions = ensureCapacity(nestedOutputPositions, readCount);
System.arraycopy(readPositions, 0, nestedOutputPositions, 0, readCount);
nestedOutputPositionCount = readCount;
}
private void openRowGroup()
throws IOException
{
presentStream = presentStreamSource.openStream();
lengthStream = lengthStreamSource.openStream();
rowGroupOpen = true;
}
@Override
public int[] getReadPositions()
{
return outputPositions;
}
@Override
public Block getBlock(int[] positions, int positionCount)
{
checkArgument(outputPositionCount > 0, "outputPositionCount must be greater than zero");
checkState(outputRequired, "This stream reader doesn't produce output");
checkState(positionCount <= outputPositionCount, "Not enough values");
checkState(!valuesInUse, "BlockLease hasn't been closed yet");
if (allNulls) {
return createNullBlock(outputType, positionCount);
}
boolean includeNulls = nullsAllowed && presentStream != null;
if (outputPositionCount == positionCount) {
Block keyBlock;
Block valueBlock;
if (nestedOutputPositionCount == 0) {
keyBlock = createEmptyBlock(outputType.getKeyType());
valueBlock = createEmptyBlock(outputType.getValueType());
}
else {
keyBlock = keyReader.getBlock(nestedOutputPositions, nestedOutputPositionCount);
valueBlock = valueReader.getBlock(nestedOutputPositions, nestedOutputPositionCount);
}
Block block = outputType.createBlockFromKeyValue(positionCount, Optional.ofNullable(includeNulls ? nulls : null), offsets, keyBlock, valueBlock);
nulls = null;
offsets = null;
return block;
}
int[] offsetsCopy = new int[positionCount + 1];
boolean[] nullsCopy = null;
if (includeNulls) {
nullsCopy = new boolean[positionCount];
}
int positionIndex = 0;
int nextPosition = positions[positionIndex];
int nestedSkipped = 0;
nestedOutputPositionCount = 0;
for (int i = 0; i < outputPositionCount; i++) {
if (outputPositions[i] < nextPosition) {
nestedSkipped += offsets[i + 1] - offsets[i];
continue;
}
assert outputPositions[i] == nextPosition;
offsetsCopy[positionIndex + 1] = offsets[i + 1] - nestedSkipped;
for (int j = offsetsCopy[positionIndex]; j < offsetsCopy[positionIndex + 1]; j++) {
nestedOutputPositions[nestedOutputPositionCount] = nestedOutputPositions[nestedOutputPositionCount + nestedSkipped];
nestedOutputPositionCount++;
}
if (nullsCopy != null) {
nullsCopy[positionIndex] = this.nulls[i];
}
positionIndex++;
if (positionIndex >= positionCount) {
break;
}
nextPosition = positions[positionIndex];
}
Block keyBlock;
Block valueBlock;
if (nestedOutputPositionCount == 0) {
keyBlock = createEmptyBlock(outputType.getKeyType());
valueBlock = createEmptyBlock(outputType.getValueType());
}
else {
keyBlock = keyReader.getBlock(nestedOutputPositions, nestedOutputPositionCount);
valueBlock = valueReader.getBlock(nestedOutputPositions, nestedOutputPositionCount);
}
return outputType.createBlockFromKeyValue(positionCount, Optional.ofNullable(includeNulls ? nullsCopy : null), offsetsCopy, keyBlock, valueBlock);
}
private static RunLengthEncodedBlock createNullBlock(Type type, int positionCount)
{
return new RunLengthEncodedBlock(type.createBlockBuilder(null, 1).appendNull().build(), positionCount);
}
private static Block createEmptyBlock(Type type)
{
return type.createBlockBuilder(null, 0).build();
}
@Override
public BlockLease getBlockView(int[] positions, int positionCount)
{
checkArgument(outputPositionCount > 0, "outputPositionCount must be greater than zero");
checkState(outputRequired, "This stream reader doesn't produce output");
checkState(positionCount <= outputPositionCount, "Not enough values");
checkState(!valuesInUse, "BlockLease hasn't been closed yet");
if (allNulls) {
return newLease(createNullBlock(outputType, positionCount));
}
boolean includeNulls = nullsAllowed && presentStream != null;
if (positionCount != outputPositionCount) {
compactValues(positions, positionCount, includeNulls);
}
if (nestedOutputPositionCount == 0) {
return newLease(outputType.createBlockFromKeyValue(positionCount, Optional.ofNullable(includeNulls ? nulls : null), offsets, createEmptyBlock(outputType.getKeyType()), createEmptyBlock(outputType.getValueType())));
}
BlockLease keyBlockLease = keyReader.getBlockView(nestedOutputPositions, nestedOutputPositionCount);
BlockLease valueBlockLease = valueReader.getBlockView(nestedOutputPositions, nestedOutputPositionCount);
return newLease(outputType.createBlockFromKeyValue(positionCount, Optional.ofNullable(includeNulls ? nulls : null), offsets, keyBlockLease.get(), valueBlockLease.get()), keyBlockLease, valueBlockLease);
}
private void compactValues(int[] positions, int positionCount, boolean compactNulls)
{
int positionIndex = 0;
int nextPosition = positions[positionIndex];
int nestedSkipped = 0;
nestedOutputPositionCount = 0;
for (int i = 0; i < outputPositionCount; i++) {
if (outputPositions[i] < nextPosition) {
nestedSkipped += offsets[i + 1] - offsets[i];
continue;
}
assert outputPositions[i] == nextPosition;
offsets[positionIndex + 1] = offsets[i + 1] - nestedSkipped;
for (int j = offsets[positionIndex]; j < offsets[positionIndex + 1]; j++) {
nestedOutputPositions[nestedOutputPositionCount] = nestedOutputPositions[nestedOutputPositionCount + nestedSkipped];
nestedOutputPositionCount++;
}
if (compactNulls) {
nulls[positionIndex] = nulls[i];
}
outputPositions[positionIndex] = nextPosition;
positionIndex++;
if (positionIndex >= positionCount) {
break;
}
nextPosition = positions[positionIndex];
}
outputPositionCount = positionCount;
}
private BlockLease newLease(Block block, BlockLease... fieldBlockLeases)
{
valuesInUse = true;
return ClosingBlockLease.newLease(block, () -> {
for (BlockLease lease : fieldBlockLeases) {
lease.close();
}
valuesInUse = false;
});
}
@Override
public void throwAnyError(int[] positions, int positionCount)
{
}
@Override
public String toString()
{
return toStringHelper(this)
.addValue(streamDescriptor)
.toString();
}
@Override
public void close()
{
if (keyReader != null) {
keyReader.close();
}
if (valueReader != null) {
valueReader.close();
}
nestedOffsets = null;
offsets = null;
nulls = null;
outputPositions = null;
nestedLengths = null;
nestedPositions = null;
nestedOutputPositions = null;
lengthStream = null;
lengthStreamSource = null;
presentStream = null;
lengthStreamSource = null;
systemMemoryContext.close();
}
@Override
public void startStripe(InputStreamSources dictionaryStreamSources, Map<Integer, ColumnEncoding> encoding)
throws IOException
{
presentStreamSource = missingStreamSource(BooleanInputStream.class);
lengthStreamSource = missingStreamSource(LongInputStream.class);
readOffset = 0;
nestedReadOffset = 0;
presentStream = null;
lengthStream = null;
rowGroupOpen = false;
if (outputRequired) {
keyReader.startStripe(dictionaryStreamSources, encoding);
valueReader.startStripe(dictionaryStreamSources, encoding);
}
}
@Override
public void startRowGroup(InputStreamSources dataStreamSources)
throws IOException
{
presentStreamSource = dataStreamSources.getInputStreamSource(streamDescriptor, PRESENT, BooleanInputStream.class);
lengthStreamSource = dataStreamSources.getInputStreamSource(streamDescriptor, LENGTH, LongInputStream.class);
readOffset = 0;
nestedReadOffset = 0;
presentStream = null;
lengthStream = null;
rowGroupOpen = false;
if (outputRequired) {
keyReader.startRowGroup(dataStreamSources);
valueReader.startRowGroup(dataStreamSources);
}
}
@Override
public long getRetainedSizeInBytes()
{
return INSTANCE_SIZE +
sizeOf(outputPositions) +
sizeOf(offsets) +
sizeOf(nulls) +
sizeOf(nestedLengths) +
sizeOf(nestedOffsets) +
sizeOf(nestedPositions) +
sizeOf(nestedOutputPositions) +
(keyReader != null ? keyReader.getRetainedSizeInBytes() : 0) +
(valueReader != null ? valueReader.getRetainedSizeInBytes() : 0);
}
}
| {
"content_hash": "c3f2231083e2944d270f4a33b3bf305c",
"timestamp": "",
"source": "github",
"line_count": 740,
"max_line_length": 251,
"avg_line_length": 37.50945945945946,
"alnum_prop": 0.6258241164390964,
"repo_name": "twitter-forks/presto",
"id": "c9b7b9acf0ce90cb4b2f39fea46b413c20ed0420",
"size": "27757",
"binary": false,
"copies": "1",
"ref": "refs/heads/twitter-master",
"path": "presto-orc/src/main/java/com/facebook/presto/orc/reader/MapDirectSelectiveStreamReader.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "26944"
},
{
"name": "CSS",
"bytes": "12676"
},
{
"name": "HTML",
"bytes": "50340"
},
{
"name": "Java",
"bytes": "30663750"
},
{
"name": "JavaScript",
"bytes": "203196"
},
{
"name": "Makefile",
"bytes": "6822"
},
{
"name": "PLSQL",
"bytes": "3891"
},
{
"name": "Python",
"bytes": "7552"
},
{
"name": "SQLPL",
"bytes": "926"
},
{
"name": "Shell",
"bytes": "28316"
},
{
"name": "TSQL",
"bytes": "161763"
},
{
"name": "Thrift",
"bytes": "16916"
}
],
"symlink_target": ""
} |
/*
* This software is distributed under BSD 3-clause license (see LICENSE file).
*
* Authors: Soeren Sonnenburg, Fernando Iglesias, Sergey Lisitsyn,
* Heiko Strathmann, Saurabh Goyal
*/
#ifndef _PERCEPTRON_H___
#define _PERCEPTRON_H___
#include <shogun/lib/config.h>
#include <shogun/features/DotFeatures.h>
#include <shogun/lib/common.h>
#include <shogun/machine/IterativeMachine.h>
namespace shogun
{
/** @brief Class Perceptron implements the standard linear (online) perceptron.
*
* Given a maximum number of iterations (the standard perceptron algorithm is
* not guaranteed to converge) and a fixed lerning rate, the result is a linear
* classifier.
*
* \sa LinearMachine
* \sa http://en.wikipedia.org/wiki/Perceptron
*/
class Perceptron : public IterativeMachine<LinearMachine>
{
public:
/** problem type */
MACHINE_PROBLEM_TYPE(PT_BINARY);
/** default constructor */
Perceptron();
~Perceptron() override;
/** get classifier type
*
* @return classifier type PERCEPTRON
*/
EMachineType get_classifier_type() override { return CT_PERCEPTRON; }
/// set learn rate of gradient descent training algorithm
inline void set_learn_rate(float64_t r)
{
learn_rate=r;
}
/// set if the hyperplane should be initialized
void set_initialize_hyperplane(bool initialize_hyperplane);
/// get if the hyperplane should be initialized
bool get_initialize_hyperplane();
/** @return object name */
const char* get_name() const override { return "Perceptron"; }
protected:
void init_model(std::shared_ptr<Features> data) override;
void iteration() override;
protected:
/** learning rate */
float64_t learn_rate;
private:
/** Flag that determines whether hyper-plane is initialised by the
* algorithm, or not.
* The latter allows users to initialize the algorithm by
* manually setting weights and bias before training.
*/
bool m_initialize_hyperplane;
};
}
#endif
| {
"content_hash": "49f23080db3ab0ed508568acf4742397",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 79,
"avg_line_length": 25.102564102564102,
"alnum_prop": 0.7145045965270684,
"repo_name": "shogun-toolbox/shogun",
"id": "7c74984d9391175d94c45376d55461d17f25420e",
"size": "1958",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/shogun/classifier/Perceptron.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "64"
},
{
"name": "Batchfile",
"bytes": "615"
},
{
"name": "C",
"bytes": "12178"
},
{
"name": "C++",
"bytes": "10278013"
},
{
"name": "CMake",
"bytes": "196539"
},
{
"name": "Dockerfile",
"bytes": "2046"
},
{
"name": "GDB",
"bytes": "89"
},
{
"name": "HTML",
"bytes": "2060"
},
{
"name": "MATLAB",
"bytes": "8755"
},
{
"name": "Makefile",
"bytes": "244"
},
{
"name": "Python",
"bytes": "286749"
},
{
"name": "SWIG",
"bytes": "386485"
},
{
"name": "Shell",
"bytes": "7267"
}
],
"symlink_target": ""
} |
import os
import sys
import string
import xml.etree.ElementTree as etree
from xml.etree.ElementTree import SubElement
from utils import _make_path_relative
from utils import xml_indent
fs_encoding = sys.getfilesystemencoding()
def _get_filetype(fn):
if fn.rfind('.cpp') != -1 or fn.rfind('.cxx') != -1:
return 8
if fn.rfind('.c') != -1 or fn.rfind('.C') != -1:
return 1
# assemble file type
if fn.rfind('.s') != -1 or fn.rfind('.S') != -1:
return 2
# header type
if fn.rfind('.h') != -1:
return 5
if fn.rfind('.lib') != -1:
return 4
if fn.rfind('.o') != -1:
return 3
# other filetype
return 5
def MDK4AddGroupForFN(ProjectFiles, parent, name, filename, project_path):
group = SubElement(parent, 'Group')
group_name = SubElement(group, 'GroupName')
group_name.text = name
name = os.path.basename(filename)
path = os.path.dirname (filename)
basename = os.path.basename(path)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
files = SubElement(group, 'Files')
file = SubElement(files, 'File')
file_name = SubElement(file, 'FileName')
name = os.path.basename(path)
if name.find('.cpp') != -1:
obj_name = name.replace('.cpp', '.o')
elif name.find('.c') != -1:
obj_name = name.replace('.c', '.o')
elif name.find('.s') != -1:
obj_name = name.replace('.s', '.o')
elif name.find('.S') != -1:
obj_name = name.replace('.s', '.o')
else:
obj_name = name
if ProjectFiles.count(obj_name):
name = basename + '_' + name
ProjectFiles.append(obj_name)
file_name.text = name.decode(fs_encoding)
file_type = SubElement(file, 'FileType')
file_type.text = '%d' % _get_filetype(name)
file_path = SubElement(file, 'FilePath')
file_path.text = path.decode(fs_encoding)
return group
def MDK4AddLibToGroup(ProjectFiles, group, name, filename, project_path):
name = os.path.basename(filename)
path = os.path.dirname (filename)
basename = os.path.basename(path)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
files = SubElement(group, 'Files')
file = SubElement(files, 'File')
file_name = SubElement(file, 'FileName')
name = os.path.basename(path)
if name.find('.cpp') != -1:
obj_name = name.replace('.cpp', '.o')
elif name.find('.c') != -1:
obj_name = name.replace('.c', '.o')
elif name.find('.s') != -1:
obj_name = name.replace('.s', '.o')
elif name.find('.S') != -1:
obj_name = name.replace('.s', '.o')
else:
obj_name = name
if ProjectFiles.count(obj_name):
name = basename + '_' + name
ProjectFiles.append(obj_name)
try:
file_name.text = name.decode(fs_encoding)
except:
file_name.text = name
file_type = SubElement(file, 'FileType')
file_type.text = '%d' % _get_filetype(name)
file_path = SubElement(file, 'FilePath')
try:
file_path.text = path.decode(fs_encoding)
except:
file_path.text = path
return group
def MDK4AddGroup(ProjectFiles, parent, name, files, project_path, group_scons):
# don't add an empty group
if len(files) == 0:
return
group = SubElement(parent, 'Group')
group_name = SubElement(group, 'GroupName')
group_name.text = name
for f in files:
fn = f.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
basename = os.path.basename(path)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
files = SubElement(group, 'Files')
file = SubElement(files, 'File')
file_name = SubElement(file, 'FileName')
name = os.path.basename(path)
if name.find('.cpp') != -1:
obj_name = name.replace('.cpp', '.o')
elif name.find('.c') != -1:
obj_name = name.replace('.c', '.o')
elif name.find('.s') != -1:
obj_name = name.replace('.s', '.o')
elif name.find('.S') != -1:
obj_name = name.replace('.s', '.o')
if ProjectFiles.count(obj_name):
name = basename + '_' + name
ProjectFiles.append(obj_name)
file_name.text = name # name.decode(fs_encoding)
file_type = SubElement(file, 'FileType')
file_type.text = '%d' % _get_filetype(name)
file_path = SubElement(file, 'FilePath')
file_path.text = path # path.decode(fs_encoding)
# for local LOCAL_CFLAGS/LOCAL_CXXFLAGS/LOCAL_CCFLAGS/LOCAL_CPPPATH/LOCAL_CPPDEFINES
MiscControls_text = ' '
if file_type.text == '1' and 'LOCAL_CFLAGS' in group_scons:
MiscControls_text = MiscControls_text + group_scons['LOCAL_CFLAGS']
elif file_type.text == '8' and 'LOCAL_CXXFLAGS' in group_scons:
MiscControls_text = MiscControls_text + group_scons['LOCAL_CXXFLAGS']
if 'LOCAL_CCFLAGS' in group_scons:
MiscControls_text = MiscControls_text + group_scons['LOCAL_CCFLAGS']
if MiscControls_text != ' ':
FileOption = SubElement(file, 'FileOption')
FileArmAds = SubElement(FileOption, 'FileArmAds')
Cads = SubElement(FileArmAds, 'Cads')
VariousControls = SubElement(Cads, 'VariousControls')
MiscControls = SubElement(VariousControls, 'MiscControls')
MiscControls.text = MiscControls_text
Define = SubElement(VariousControls, 'Define')
if 'LOCAL_CPPDEFINES' in group_scons:
Define.text = ', '.join(set(group_scons['LOCAL_CPPDEFINES']))
else:
Define.text = ' '
Undefine = SubElement(VariousControls, 'Undefine')
Undefine.text = ' '
IncludePath = SubElement(VariousControls, 'IncludePath')
if 'LOCAL_CPPPATH' in group_scons:
IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in group_scons['LOCAL_CPPPATH']])
else:
IncludePath.text = ' '
return group
# The common part of making MDK4/5 project
def MDK45Project(tree, target, script):
project_path = os.path.dirname(os.path.abspath(target))
root = tree.getroot()
out = open(target, 'w')
out.write('<?xml version="1.0" encoding="UTF-8" standalone="no" ?>\n')
CPPPATH = []
CPPDEFINES = []
LINKFLAGS = ''
CFLAGS = ''
ProjectFiles = []
# add group
groups = tree.find('Targets/Target/Groups')
if groups is None:
groups = SubElement(tree.find('Targets/Target'), 'Groups')
groups.clear() # clean old groups
for group in script:
group_tree = MDK4AddGroup(ProjectFiles, groups, group['name'], group['src'], project_path, group)
# get each include path
if 'CPPPATH' in group and group['CPPPATH']:
if CPPPATH:
CPPPATH += group['CPPPATH']
else:
CPPPATH += group['CPPPATH']
# get each group's definitions
if 'CPPDEFINES' in group and group['CPPDEFINES']:
if CPPDEFINES:
CPPDEFINES += group['CPPDEFINES']
else:
CPPDEFINES = group['CPPDEFINES']
# get each group's link flags
if 'LINKFLAGS' in group and group['LINKFLAGS']:
if LINKFLAGS:
LINKFLAGS += ' ' + group['LINKFLAGS']
else:
LINKFLAGS += group['LINKFLAGS']
if 'LIBS' in group and group['LIBS']:
for item in group['LIBS']:
lib_path = ''
for path_item in group['LIBPATH']:
full_path = os.path.join(path_item, item + '.lib')
if os.path.isfile(full_path): # has this library
lib_path = full_path
break
if lib_path != '':
if group_tree != None:
MDK4AddLibToGroup(ProjectFiles, group_tree, group['name'], lib_path, project_path)
else:
group_tree = MDK4AddGroupForFN(ProjectFiles, groups, group['name'], lib_path, project_path)
# write include path, definitions and link flags
IncludePath = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/IncludePath')
IncludePath.text = ';'.join([_make_path_relative(project_path, os.path.normpath(i)) for i in set(CPPPATH)])
Define = tree.find('Targets/Target/TargetOption/TargetArmAds/Cads/VariousControls/Define')
Define.text = ', '.join(set(CPPDEFINES))
Misc = tree.find('Targets/Target/TargetOption/TargetArmAds/LDads/Misc')
Misc.text = LINKFLAGS
xml_indent(root)
out.write(etree.tostring(root, encoding='utf-8').decode())
out.close()
def MDK4Project(target, script):
if os.path.isfile('template.uvproj') is False:
print ('Warning: The template project file [template.uvproj] not found!')
return
template_tree = etree.parse('template.uvproj')
MDK45Project(template_tree, target, script)
# remove project.uvopt file
project_uvopt = os.path.abspath(target).replace('uvproj', 'uvopt')
if os.path.isfile(project_uvopt):
os.unlink(project_uvopt)
# copy uvopt file
if os.path.exists('template.uvopt'):
import shutil
shutil.copy2('template.uvopt', 'project.uvopt')
def MDK5Project(target, script):
if os.path.isfile('template.uvprojx') is False:
print ('Warning: The template project file [template.uvprojx] not found!')
return
template_tree = etree.parse('template.uvprojx')
MDK45Project(template_tree, target, script)
# remove project.uvopt file
project_uvopt = os.path.abspath(target).replace('uvprojx', 'uvoptx')
if os.path.isfile(project_uvopt):
os.unlink(project_uvopt)
# copy uvopt file
if os.path.exists('template.uvoptx'):
import shutil
shutil.copy2('template.uvoptx', 'project.uvoptx')
def MDK2Project(target, script):
template = open('template.Uv2', "r")
lines = template.readlines()
project = open(target, "w")
project_path = os.path.dirname(os.path.abspath(target))
line_index = 5
# write group
for group in script:
lines.insert(line_index, 'Group (%s)\r\n' % group['name'])
line_index += 1
lines.insert(line_index, '\r\n')
line_index += 1
# write file
ProjectFiles = []
CPPPATH = []
CPPDEFINES = []
LINKFLAGS = ''
CFLAGS = ''
# number of groups
group_index = 1
for group in script:
# print group['name']
# get each include path
if 'CPPPATH' in group and group['CPPPATH']:
if CPPPATH:
CPPPATH += group['CPPPATH']
else:
CPPPATH += group['CPPPATH']
# get each group's definitions
if 'CPPDEFINES' in group and group['CPPDEFINES']:
if CPPDEFINES:
CPPDEFINES += group['CPPDEFINES']
else:
CPPDEFINES = group['CPPDEFINES']
# get each group's link flags
if 'LINKFLAGS' in group and group['LINKFLAGS']:
if LINKFLAGS:
LINKFLAGS += ' ' + group['LINKFLAGS']
else:
LINKFLAGS += group['LINKFLAGS']
# generate file items
for node in group['src']:
fn = node.rfile()
name = fn.name
path = os.path.dirname(fn.abspath)
basename = os.path.basename(path)
path = _make_path_relative(project_path, path)
path = os.path.join(path, name)
if ProjectFiles.count(name):
name = basename + '_' + name
ProjectFiles.append(name)
lines.insert(line_index, 'File %d,%d,<%s><%s>\r\n'
% (group_index, _get_filetype(name), path, name))
line_index += 1
group_index = group_index + 1
lines.insert(line_index, '\r\n')
line_index += 1
# remove repeat path
paths = set()
for path in CPPPATH:
inc = _make_path_relative(project_path, os.path.normpath(path))
paths.add(inc) #.replace('\\', '/')
paths = [i for i in paths]
CPPPATH = string.join(paths, ';')
definitions = [i for i in set(CPPDEFINES)]
CPPDEFINES = string.join(definitions, ', ')
while line_index < len(lines):
if lines[line_index].startswith(' ADSCINCD '):
lines[line_index] = ' ADSCINCD (' + CPPPATH + ')\r\n'
if lines[line_index].startswith(' ADSLDMC ('):
lines[line_index] = ' ADSLDMC (' + LINKFLAGS + ')\r\n'
if lines[line_index].startswith(' ADSCDEFN ('):
lines[line_index] = ' ADSCDEFN (' + CPPDEFINES + ')\r\n'
line_index += 1
# write project
for line in lines:
project.write(line)
project.close()
def ARMCC_Version():
import rtconfig
import subprocess
import re
path = rtconfig.EXEC_PATH
if(rtconfig.PLATFORM == 'armcc'):
path = os.path.join(path, 'armcc.exe')
elif(rtconfig.PLATFORM == 'armclang'):
path = os.path.join(path, 'armlink.exe')
if os.path.exists(path):
cmd = path
else:
print('Error: get armcc version failed. Please update the KEIL MDK installation path in rtconfig.py!')
return "0.0"
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = child.communicate()
'''
example stdout:
Product: MDK Plus 5.24
Component: ARM Compiler 5.06 update 5 (build 528)
Tool: armcc [4d3621]
return version: MDK Plus 5.24/ARM Compiler 5.06 update 5 (build 528)/armcc [4d3621]
'''
version_Product = re.search(r'Product: (.+)', stdout).group(1)
version_Product = version_Product[:-1]
version_Component = re.search(r'Component: (.*)', stdout).group(1)
version_Component = version_Component[:-1]
version_Tool = re.search(r'Tool: (.*)', stdout).group(1)
version_Tool = version_Tool[:-1]
version_str_format = '%s/%s/%s'
version_str = version_str_format % (version_Product, version_Component, version_Tool)
return version_str
| {
"content_hash": "b5138e33af993a247d3a4d62f28ed1c0",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 139,
"avg_line_length": 33.08735632183908,
"alnum_prop": 0.5827138192176753,
"repo_name": "RT-Thread/rt-thread",
"id": "0405eb0b1b540105741e4cb74ba96bede11017ce",
"size": "15340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/keil.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "20211623"
},
{
"name": "Batchfile",
"bytes": "77561"
},
{
"name": "C",
"bytes": "1056417995"
},
{
"name": "C++",
"bytes": "945403"
},
{
"name": "CMake",
"bytes": "250858"
},
{
"name": "CSS",
"bytes": "138218"
},
{
"name": "GDB",
"bytes": "11796"
},
{
"name": "HTML",
"bytes": "4763477"
},
{
"name": "JavaScript",
"bytes": "637"
},
{
"name": "LLVM",
"bytes": "10344"
},
{
"name": "Lex",
"bytes": "7026"
},
{
"name": "Logos",
"bytes": "7238"
},
{
"name": "Lua",
"bytes": "922"
},
{
"name": "M4",
"bytes": "17515"
},
{
"name": "Makefile",
"bytes": "485713"
},
{
"name": "Pawn",
"bytes": "1250"
},
{
"name": "Perl",
"bytes": "16728"
},
{
"name": "Python",
"bytes": "3175087"
},
{
"name": "RPC",
"bytes": "14162"
},
{
"name": "Shell",
"bytes": "422027"
},
{
"name": "Tcl",
"bytes": "179"
},
{
"name": "Yacc",
"bytes": "30555"
}
],
"symlink_target": ""
} |
//
// Copyright (c) Microsoft. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Common;
using Microsoft.WindowsAzure.Common.Internals;
namespace Microsoft.WindowsAzure
{
/// <summary>
/// Class for token based credentials associated with a particular subscription.
/// </summary>
public class TokenCloudCredentials : SubscriptionCloudCredentials
{
// The Windows Azure Subscription ID.
private readonly string _subscriptionId = null;
/// <summary>
/// Gets subscription ID which uniquely identifies Windows Azure
/// subscription. The subscription ID forms part of the URI for
/// every call that you make to the Service Management API.
/// </summary>
public override string SubscriptionId
{
get { return _subscriptionId; }
}
/// <summary>
/// Gets or sets secure token used to authenticate against Windows Azure API.
/// No anonymous requests are allowed.
/// </summary>
public string Token { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="TokenCloudCredentials"/>
/// class.
/// </summary>
/// <param name="subscriptionId">The Subscription ID.</param>
/// <param name="token">Valid JSON Web Token (JWT).</param>
public TokenCloudCredentials(string subscriptionId, string token)
{
if (subscriptionId == null)
{
throw new ArgumentNullException("subscriptionId");
}
else if (subscriptionId.Length == 0)
{
throw CloudExtensions.CreateArgumentEmptyException("subscriptionId");
}
else if (token == null)
{
throw new ArgumentNullException("token");
}
else if (token.Length == 0)
{
throw CloudExtensions.CreateArgumentEmptyException("token");
}
_subscriptionId = subscriptionId;
Token = token;
}
/// <summary>
/// Attempt to create token credentials from a collection of
/// settings.
/// </summary>
/// <param name="settings">The settings to use.</param>
/// <returns>
/// TokenCloudCredentials is created, null otherwise.
/// </returns>
public static TokenCloudCredentials Create(IDictionary<string, object> settings)
{
if (settings == null)
{
throw new ArgumentNullException("settings");
}
string subscriptionId = ConfigurationHelper.GetString(settings, "SubscriptionId", false);
string token = ConfigurationHelper.GetString(settings, "Token", false);
if (subscriptionId != null && token != null)
{
return new TokenCloudCredentials(subscriptionId, token);
}
return null;
}
/// <summary>
/// Apply the credentials to the HTTP request.
/// </summary>
/// <param name="request">The HTTP request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>
/// Task that will complete when processing has completed.
/// </returns>
public override Task ProcessHttpRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
if (request == null)
{
throw new ArgumentNullException("request");
}
request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", Token);
return base.ProcessHttpRequestAsync(request, cancellationToken);
}
}
}
| {
"content_hash": "8370b64b190bcb0329c42997f26987e2",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 117,
"avg_line_length": 35.936,
"alnum_prop": 0.6084149599287623,
"repo_name": "huangpf/azure-sdk-for-net",
"id": "3a885791ab5b14c9472a9f4ec6cab710f8c66041",
"size": "4494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Common/Credentials/TokenCloudCredentials.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "6827625"
}
],
"symlink_target": ""
} |
/**
* Session Configuration
* (sails.config.session)
*
* Sails session integration leans heavily on the great work already done by
* Express, but also unifies Socket.io with the Connect session store. It uses
* Connect's cookie parser to normalize configuration differences between Express
* and Socket.io and hooks into Sails' middleware interpreter to allow you to access
* and auto-save to `req.session` with Socket.io the same way you would with Express.
*
* For more information on configuring the session, check out:
* http://sailsjs.org/#/documentation/reference/sails.config/sails.config.session.html
*/
module.exports.session = {
/***************************************************************************
* *
* Session secret is automatically generated when your new app is created *
* Replace at your own risk in production-- you will invalidate the cookies *
* of your users, forcing them to log in again. *
* *
***************************************************************************/
secret: '80f8e7022a06a0605675b9310605df91',
/***************************************************************************
* *
* Set the session cookie expire time The maxAge is set by milliseconds, *
* the example below is for 24 hours *
* *
***************************************************************************/
// cookie: {
// maxAge: 24 * 60 * 60 * 1000
// },
/***************************************************************************
* *
* In production, uncomment the following lines to set up a shared redis *
* session store that can be shared across multiple Sails.js servers *
***************************************************************************/
// adapter: 'redis',
/***************************************************************************
* *
* The following values are optional, if no options are set a redis *
* instance running on localhost is expected. Read more about options at: *
* https://github.com/visionmedia/connect-redis *
* *
* *
***************************************************************************/
// host: 'localhost',
// port: 6379,
// ttl: <redis session TTL in seconds>,
// db: 0,
// pass: <redis auth password>,
// prefix: 'sess:',
/***************************************************************************
* *
* Uncomment the following lines to use your Mongo adapter as a session *
* store *
* *
***************************************************************************/
// adapter: 'mongo',
// host: 'localhost',
// port: 27017,
// db: 'sails',
// collection: 'sessions',
/***************************************************************************
* *
* Optional Values: *
* *
* # Note: url will override other connection settings url: *
* 'mongodb://user:pass@host:port/database/collection', *
* *
***************************************************************************/
// username: '',
// password: '',
// auto_reconnect: false,
// ssl: false,
// stringify: true
};
| {
"content_hash": "54b76dd3805c9c4b5cd4e712a6d9c531",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 86,
"avg_line_length": 47.505494505494504,
"alnum_prop": 0.3296321998612075,
"repo_name": "1upon0/rfid-auth-system",
"id": "48dfc38edd54a4185a4d0818bbdac58d413b91b2",
"size": "4323",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/config/session.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "110445"
},
{
"name": "C",
"bytes": "848319"
},
{
"name": "C++",
"bytes": "257556"
},
{
"name": "CSS",
"bytes": "142637"
},
{
"name": "Gnuplot",
"bytes": "1125"
},
{
"name": "HTML",
"bytes": "921766"
},
{
"name": "JavaScript",
"bytes": "400339"
},
{
"name": "Makefile",
"bytes": "119274"
},
{
"name": "Python",
"bytes": "2308656"
},
{
"name": "QMake",
"bytes": "987"
},
{
"name": "Shell",
"bytes": "6209"
}
],
"symlink_target": ""
} |
package android.print.mockservice;
import android.os.CancellationSignal;
import android.print.PrinterId;
import android.printservice.CustomPrinterIconCallback;
import java.util.List;
public abstract class PrinterDiscoverySessionCallbacks {
private StubbablePrinterDiscoverySession mSession;
public void setSession(StubbablePrinterDiscoverySession session) {
mSession = session;
}
public StubbablePrinterDiscoverySession getSession() {
return mSession;
}
public abstract void onStartPrinterDiscovery(List<PrinterId> priorityList);
public abstract void onStopPrinterDiscovery();
public abstract void onValidatePrinters(List<PrinterId> printerIds);
public abstract void onStartPrinterStateTracking(PrinterId printerId);
public abstract void onRequestCustomPrinterIcon(PrinterId printerId,
CancellationSignal cancellationSignal, CustomPrinterIconCallback callback);
public abstract void onStopPrinterStateTracking(PrinterId printerId);
public abstract void onDestroy();
}
| {
"content_hash": "5de34378b7518c89a6de1e1fc4ad84d9",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 87,
"avg_line_length": 28.62162162162162,
"alnum_prop": 0.7932011331444759,
"repo_name": "xorware/android_frameworks_base",
"id": "be002e29ff687c4455aad6916914fc7549a34e92",
"size": "1678",
"binary": false,
"copies": "1",
"ref": "refs/heads/n",
"path": "core/tests/coretests/src/android/print/mockservice/PrinterDiscoverySessionCallbacks.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "167132"
},
{
"name": "C++",
"bytes": "7092455"
},
{
"name": "GLSL",
"bytes": "20654"
},
{
"name": "HTML",
"bytes": "224185"
},
{
"name": "Java",
"bytes": "78926513"
},
{
"name": "Makefile",
"bytes": "420231"
},
{
"name": "Python",
"bytes": "42309"
},
{
"name": "RenderScript",
"bytes": "153826"
},
{
"name": "Shell",
"bytes": "21079"
}
],
"symlink_target": ""
} |
import warnings
import pandas as pd
import pyspark
import pyspark as ps
from pyspark.sql.column import Column
import ibis.common.exceptions as com
import ibis.expr.schema as sch
import ibis.expr.types as types
from ibis.backends.base.sql import BaseSQLBackend
from ibis.backends.base.sql.ddl import (
CreateDatabase,
DropTable,
TruncateTable,
is_fully_qualified,
)
from ibis.expr.scope import Scope
from ibis.expr.timecontext import canonicalize_context, localize_context
from . import ddl
from .client import PySparkTable, spark_dataframe_schema
from .compiler import PySparkDatabaseTable, PySparkExprTranslator
from .datatypes import spark_dtype
_read_csv_defaults = {
'header': True,
'multiLine': True,
'mode': 'FAILFAST',
'escape': '"',
}
class _PySparkCursor:
"""Spark cursor.
This allows the Spark client to reuse machinery in
:file:`ibis/backends/base/sql/client.py`.
"""
def __init__(self, query):
"""
Construct a SparkCursor with query `query`.
Parameters
----------
query : pyspark.sql.DataFrame
Contains result of query.
"""
self.query = query
def fetchall(self):
"""Fetch all rows."""
result = self.query.collect() # blocks until finished
return result
@property
def columns(self):
"""Return the columns of the result set."""
return self.query.columns
@property
def description(self):
"""Get the fields of the result set's schema."""
return self.query.schema
def __enter__(self):
# For compatibility when constructed from Query.execute()
"""No-op for compatibility."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""No-op for compatibility."""
class Backend(BaseSQLBackend):
name = 'pyspark'
table_class = PySparkDatabaseTable
table_expr_class = PySparkTable
def connect(self, session):
"""
Create a pyspark `Backend` for use with Ibis.
Pipes `**kwargs` into Backend, which pipes them into SparkContext.
See documentation for SparkContext:
https://spark.apache.org/docs/latest/api/python/_modules/pyspark/context.html#SparkContext
"""
new_backend = self.__class__()
new_backend._context = session.sparkContext
new_backend._session = session
new_backend._catalog = session.catalog
# Spark internally stores timestamps as UTC values, and timestamp data
# that is brought in without a specified time zone is converted as
# local time to UTC with microsecond resolution.
# https://spark.apache.org/docs/latest/sql-pyspark-pandas-with-arrow.html#timestamp-with-time-zone-semantics
new_backend._session.conf.set('spark.sql.session.timeZone', 'UTC')
return new_backend
@property
def version(self):
return pyspark.__version__
def set_database(self, name):
warnings.warn(
'`set_database` is deprecated and will be removed in a future '
'version of Ibis. Create a new connection to the desired database '
'instead',
FutureWarning,
)
self._catalog.setCurrentDatabase(name)
@property
def current_database(self):
return self._catalog.currentDatabase()
def list_databases(self, like=None):
databases = [db.name for db in self._catalog.listDatabases()]
return self._filter_with_like(databases, like)
def list_tables(self, like=None, database=None):
tables = [
t.name
for t in self._catalog.listTables(
dbName=database or self.current_database
)
]
return self._filter_with_like(tables, like)
def compile(self, expr, timecontext=None, params=None, *args, **kwargs):
"""Compile an ibis expression to a PySpark DataFrame object"""
if timecontext is not None:
session_timezone = self._session.conf.get(
'spark.sql.session.timeZone'
)
# Since spark use session timezone for tz-naive timestamps
# we localize tz-naive context here to match that behavior
timecontext = localize_context(
canonicalize_context(timecontext), session_timezone
)
# Insert params in scope
if params is None:
scope = Scope()
else:
scope = Scope(
{param.op(): raw_value for param, raw_value in params.items()},
timecontext,
)
return PySparkExprTranslator().translate(
expr, scope=scope, timecontext=timecontext
)
def execute(
self, expr, timecontext=None, params=None, limit='default', **kwargs
):
if isinstance(expr, types.TableExpr):
return self.compile(expr, timecontext, params, **kwargs).toPandas()
elif isinstance(expr, types.ColumnExpr):
# expression must be named for the projection
expr = expr.name('tmp')
return self.compile(
expr.to_projection(), timecontext, params, **kwargs
).toPandas()['tmp']
elif isinstance(expr, types.ScalarExpr):
compiled = self.compile(expr, timecontext, params, **kwargs)
if isinstance(compiled, Column):
# attach result column to a fake DataFrame and
# select the result
compiled = self._session.range(0, 1).select(compiled)
return compiled.toPandas().iloc[0, 0]
else:
raise com.IbisError(
f"Cannot execute expression of type: {type(expr)}"
)
@staticmethod
def _fully_qualified_name(name, database):
if is_fully_qualified(name):
return name
if database:
return f'{database}.`{name}`'
return name
def close(self):
"""
Close Spark connection and drop any temporary objects
"""
self._context.stop()
def fetch_from_cursor(self, cursor, schema):
df = cursor.query.toPandas() # blocks until finished
return schema.apply_to(df)
def raw_sql(self, stmt):
query = self._session.sql(stmt)
return _PySparkCursor(query)
def _get_schema_using_query(self, query):
cur = self.raw_sql(query)
return spark_dataframe_schema(cur.query)
def _get_jtable(self, name, database=None):
try:
jtable = self._catalog._jcatalog.getTable(
self._fully_qualified_name(name, database)
)
except ps.sql.utils.AnalysisException as e:
raise com.IbisInputError(str(e)) from e
return jtable
def table(self, name, database=None):
"""
Create a table expression that references a particular table or view
in the database.
Parameters
----------
name : string
database : string, optional
Returns
-------
table : TableExpr
"""
jtable = self._get_jtable(name, database)
name, database = jtable.name(), jtable.database()
qualified_name = self._fully_qualified_name(name, database)
schema = self.get_schema(qualified_name)
node = self.table_class(qualified_name, schema, self)
return self.table_expr_class(node)
def create_database(self, name, path=None, force=False):
"""
Create a new Spark database
Parameters
----------
name : string
Database name
path : string, default None
Path where to store the database data; otherwise uses Spark
default
"""
statement = CreateDatabase(name, path=path, can_exist=force)
return self.raw_sql(statement.compile())
def drop_database(self, name, force=False):
"""Drop a Spark database.
Parameters
----------
name : string
Database name
force : bool, default False
If False, Spark throws exception if database is not empty or
database does not exist
"""
statement = ddl.DropDatabase(name, must_exist=not force, cascade=force)
return self.raw_sql(statement.compile())
def get_schema(self, table_name, database=None):
"""
Return a Schema object for the indicated table and database
Parameters
----------
table_name : string
May be fully qualified
database : string
Spark does not have a database argument for its table() method,
so this must be None
Returns
-------
schema : ibis Schema
"""
if database is not None:
raise com.UnsupportedArgumentError(
'Spark does not support database param for table'
)
df = self._session.table(table_name)
return sch.infer(df)
def _schema_from_csv(self, path, **kwargs):
"""
Return a Schema object for the indicated csv file. Spark goes through
the file once to determine the schema. See documentation for
`pyspark.sql.DataFrameReader` for kwargs.
Parameters
----------
path : string
Returns
-------
schema : ibis Schema
"""
options = _read_csv_defaults.copy()
options.update(kwargs)
options['inferSchema'] = True
df = self._session.read.csv(path, **options)
return spark_dataframe_schema(df)
def _create_table_or_temp_view_from_csv(
self,
name,
path,
schema=None,
database=None,
force=False,
temp_view=False,
format='parquet',
**kwargs,
):
options = _read_csv_defaults.copy()
options.update(kwargs)
if schema:
assert ('inferSchema', True) not in options.items()
schema = spark_dtype(schema)
options['schema'] = schema
else:
options['inferSchema'] = True
df = self._session.read.csv(path, **options)
if temp_view:
if force:
df.createOrReplaceTempView(name)
else:
df.createTempView(name)
else:
qualified_name = self._fully_qualified_name(
name, database or self.current_database
)
mode = 'error'
if force:
mode = 'overwrite'
df.write.saveAsTable(qualified_name, format=format, mode=mode)
def create_table(
self,
table_name,
obj=None,
schema=None,
database=None,
force=False,
# HDFS options
format='parquet',
):
"""
Create a new table in Spark using an Ibis table expression.
Parameters
----------
table_name : string
obj : TableExpr or pandas.DataFrame, optional
If passed, creates table from select statement results
schema : ibis.Schema, optional
Mutually exclusive with obj, creates an empty table with a
particular schema
database : string, default None (optional)
force : boolean, default False
If true, create table if table with indicated name already exists
format : {'parquet'}
Examples
--------
>>> con.create_table('new_table_name', table_expr) # doctest: +SKIP
"""
if obj is not None:
if isinstance(obj, pd.DataFrame):
spark_df = self._session.createDataFrame(obj)
mode = 'error'
if force:
mode = 'overwrite'
spark_df.write.saveAsTable(
table_name, format=format, mode=mode
)
return
ast = self.compiler.to_ast(obj)
select = ast.queries[0]
statement = ddl.CTAS(
table_name,
select,
database=database,
can_exist=force,
format=format,
)
elif schema is not None:
statement = ddl.CreateTableWithSchema(
table_name,
schema,
database=database,
format=format,
can_exist=force,
)
else:
raise com.IbisError('Must pass expr or schema')
return self.raw_sql(statement.compile())
def create_view(
self, name, expr, database=None, can_exist=False, temporary=False
):
"""
Create a Spark view from a table expression
Parameters
----------
name : string
expr : ibis TableExpr
database : string, default None
can_exist : boolean, default False
Replace an existing view of the same name if it exists
temporary : boolean, default False
"""
ast = self.compiler.to_ast(expr)
select = ast.queries[0]
statement = ddl.CreateView(
name,
select,
database=database,
can_exist=can_exist,
temporary=temporary,
)
return self.raw_sql(statement.compile())
def drop_table(self, name, database=None, force=False):
self.drop_table_or_view(name, database, force)
def drop_view(self, name, database=None, force=False):
self.drop_table_or_view(name, database, force)
def drop_table_or_view(self, name, database=None, force=False):
"""
Drop a Spark table or view
Parameters
----------
name : string
database : string, default None (optional)
force : boolean, default False
Database may throw exception if table does not exist
Examples
--------
>>> table = 'my_table'
>>> db = 'operations'
>>> con.drop_table_or_view(table, db, force=True) # doctest: +SKIP
"""
statement = DropTable(name, database=database, must_exist=not force)
self.raw_sql(statement.compile())
def truncate_table(self, table_name, database=None):
"""
Delete all rows from, but do not drop, an existing table
Parameters
----------
table_name : string
database : string, default None (optional)
"""
statement = TruncateTable(table_name, database=database)
self.raw_sql(statement.compile())
def insert(
self,
table_name,
obj=None,
database=None,
overwrite=False,
values=None,
validate=True,
):
"""
Insert into existing table.
See SparkTable.insert for other parameters.
Parameters
----------
table_name : string
database : string, default None
Examples
--------
>>> table = 'my_table'
>>> con.insert(table, table_expr) # doctest: +SKIP
# Completely overwrite contents
>>> con.insert(table, table_expr, overwrite=True) # doctest: +SKIP
"""
table = self.table(table_name, database=database)
return table.insert(
obj=obj, overwrite=overwrite, values=values, validate=validate
)
def compute_stats(self, name, database=None, noscan=False):
"""
Issue COMPUTE STATISTICS command for a given table
Parameters
----------
name : string
Can be fully qualified (with database name)
database : string, optional
noscan : boolean, default False
If True, collect only basic statistics for the table (number of
rows, size in bytes).
"""
maybe_noscan = ' NOSCAN' if noscan else ''
stmt = 'ANALYZE TABLE {} COMPUTE STATISTICS{}'.format(
self._fully_qualified_name(name, database), maybe_noscan
)
return self.raw_sql(stmt)
| {
"content_hash": "f48f808fc022d8f5e7581282fdfd5c50",
"timestamp": "",
"source": "github",
"line_count": 530,
"max_line_length": 116,
"avg_line_length": 30.39245283018868,
"alnum_prop": 0.5724484728085424,
"repo_name": "cloudera/ibis",
"id": "86b1f125661bf95b48ce63a9b48e679ba2b5d658",
"size": "16108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ibis/backends/pyspark/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "44943"
},
{
"name": "CMake",
"bytes": "4383"
},
{
"name": "Python",
"bytes": "2570944"
},
{
"name": "Shell",
"bytes": "1989"
}
],
"symlink_target": ""
} |
namespace Microsoft.Azure.Management.Marketplace.Models
{
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Request approvals details
/// </summary>
public partial class RequestApprovalsDetails
{
/// <summary>
/// Initializes a new instance of the RequestApprovalsDetails class.
/// </summary>
public RequestApprovalsDetails()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the RequestApprovalsDetails class.
/// </summary>
/// <param name="offerId">Gets offer id</param>
/// <param name="displayName">Gets offer display name</param>
/// <param name="publisherId">Gets or sets publisher id</param>
/// <param name="messageCode">Gets or sets the notification message
/// id</param>
/// <param name="icon">Gets or sets the icon url</param>
/// <param name="plans">Gets or sets removed plans
/// notifications</param>
public RequestApprovalsDetails(string offerId = default(string), string displayName = default(string), string publisherId = default(string), long? messageCode = default(long?), string icon = default(string), IList<PlanNotificationDetails> plans = default(IList<PlanNotificationDetails>))
{
OfferId = offerId;
DisplayName = displayName;
PublisherId = publisherId;
MessageCode = messageCode;
Icon = icon;
Plans = plans;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets offer id
/// </summary>
[JsonProperty(PropertyName = "offerId")]
public string OfferId { get; set; }
/// <summary>
/// Gets offer display name
/// </summary>
[JsonProperty(PropertyName = "displayName")]
public string DisplayName { get; set; }
/// <summary>
/// Gets or sets publisher id
/// </summary>
[JsonProperty(PropertyName = "publisherId")]
public string PublisherId { get; set; }
/// <summary>
/// Gets or sets the notification message id
/// </summary>
[JsonProperty(PropertyName = "messageCode")]
public long? MessageCode { get; set; }
/// <summary>
/// Gets or sets the icon url
/// </summary>
[JsonProperty(PropertyName = "icon")]
public string Icon { get; set; }
/// <summary>
/// Gets or sets removed plans notifications
/// </summary>
[JsonProperty(PropertyName = "plans")]
public IList<PlanNotificationDetails> Plans { get; set; }
}
}
| {
"content_hash": "f3be31478d5197e25f02e1f9aeea999e",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 295,
"avg_line_length": 34.44705882352941,
"alnum_prop": 0.582308743169399,
"repo_name": "Azure/azure-sdk-for-net",
"id": "f91ead2f2de4809e60fd4f4723ddf96900bf1353",
"size": "3281",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/marketplace/Microsoft.Azure.Management.Marketplace/src/Generated/Models/RequestApprovalsDetails.cs",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace LinqToDB.SqlQuery
{
public class SqlSelectStatement : SqlStatementWithQueryBase
{
public SqlSelectStatement(SelectQuery selectQuery) : base(selectQuery)
{
}
public SqlSelectStatement() : base(null)
{
}
public override QueryType QueryType => QueryType.Select;
public override QueryElementType ElementType => QueryElementType.SelectStatement;
public override StringBuilder ToString(StringBuilder sb, Dictionary<IQueryElement, IQueryElement> dic)
{
return SelectQuery.ToString(sb, dic);
}
public override ISqlExpression Walk(bool skipColumns, Func<ISqlExpression, ISqlExpression> func)
{
With?.Walk(skipColumns, func);
var newQuery = SelectQuery.Walk(skipColumns, func);
if (!ReferenceEquals(newQuery, SelectQuery))
SelectQuery = (SelectQuery)newQuery;
return null;
}
public override ICloneableElement Clone(Dictionary<ICloneableElement, ICloneableElement> objectTree, Predicate<ICloneableElement> doClone)
{
var clone = new SqlSelectStatement();
if (SelectQuery != null)
clone.SelectQuery = (SelectQuery)SelectQuery.Clone(objectTree, doClone);
clone.Parameters.AddRange(Parameters.Select(p => (SqlParameter)p.Clone(objectTree, doClone)));
objectTree.Add(this, clone);
return clone;
}
}
}
| {
"content_hash": "2a7d3be56cd17484ee7466cbdbbfa1fa",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 140,
"avg_line_length": 28.163265306122447,
"alnum_prop": 0.7536231884057971,
"repo_name": "genusP/linq2db",
"id": "690a966794c0f696fe549498d92cf39ef0cd445f",
"size": "1382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/LinqToDB/SqlQuery/SqlSelectStatement.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3021"
},
{
"name": "C#",
"bytes": "5974769"
},
{
"name": "CoffeeScript",
"bytes": "20"
},
{
"name": "F#",
"bytes": "9977"
},
{
"name": "PLSQL",
"bytes": "22385"
},
{
"name": "PLpgSQL",
"bytes": "28536"
},
{
"name": "PowerShell",
"bytes": "14271"
},
{
"name": "SQLPL",
"bytes": "8076"
},
{
"name": "Shell",
"bytes": "2935"
},
{
"name": "Smalltalk",
"bytes": "11"
},
{
"name": "Visual Basic",
"bytes": "1660"
}
],
"symlink_target": ""
} |
package fi.vtt.nubomedia.jsonrpcwsandroid;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | {
"content_hash": "55251c13340791c683bca1d65ee53049",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 78,
"avg_line_length": 20.733333333333334,
"alnum_prop": 0.7331189710610932,
"repo_name": "nubomedia-vtt/jsonrpc-ws-android",
"id": "2b684d2a2dfd869adf7f9f76b483f61cf6a84670",
"size": "311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jsonrpc-ws-android/src/test/java/fi/vtt/nubomedia/jsonrpcwsandroid/ExampleUnitTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "13448"
}
],
"symlink_target": ""
} |
using namespace mx::core;
TEST( Test01, WorkNumber )
{
std::string indentString( INDENT );
XsString value1{ "ABC" };
XsString value2{ "XYZ" };
WorkNumber object1;
WorkNumber object2( value2 );
std::stringstream default_constructed;
object1.toStream( default_constructed, 0 );
std::stringstream object2_stream;
object2.toStream( object2_stream, 2 );
std::string expected = R"(<work-number></work-number>)";
std::string actual = default_constructed.str();
CHECK_EQUAL( expected, actual )
expected = indentString+indentString+R"(<work-number>XYZ</work-number>)";
actual = object2_stream.str();
CHECK_EQUAL( expected, actual )
value1 = object2.getValue();
object1.setValue( value1 );
std::stringstream o1; std::stringstream o2; bool isOneLineOnly = false;
object1.streamContents( o1, 0, isOneLineOnly );
object2.streamContents( o2, 0, isOneLineOnly );
CHECK( isOneLineOnly )
CHECK_EQUAL( o1.str(), o2.str() )
}
#endif
| {
"content_hash": "b3b503eeb69c34bb1a8d1603808d6708",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 74,
"avg_line_length": 32.3448275862069,
"alnum_prop": 0.7249466950959488,
"repo_name": "Webern/MusicXML-Class-Library",
"id": "9eea20f0ee8c41bb7c72e638b41a97a3e16be797",
"size": "1200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Sourcecode/private/mxtest/core/WorkNumberTest.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1796"
},
{
"name": "C++",
"bytes": "8167393"
},
{
"name": "CMake",
"bytes": "2762"
},
{
"name": "HTML",
"bytes": "8450"
},
{
"name": "Objective-C",
"bytes": "1428"
},
{
"name": "Ruby",
"bytes": "141276"
},
{
"name": "Shell",
"bytes": "1997"
}
],
"symlink_target": ""
} |
using System.Collections;
using System.Collections.Specialized;
using System.Web.UI;
using System.ComponentModel;
using System.Security.Permissions;
using System.Reflection;
namespace System.Web.UI.WebControls {
[AspNetHostingPermissionAttribute (SecurityAction.LinkDemand, Level = AspNetHostingPermissionLevel.Minimal)]
[AspNetHostingPermissionAttribute (SecurityAction.InheritanceDemand, Level = AspNetHostingPermissionLevel.Minimal)]
public class HyperLinkField : DataControlField
{
PropertyDescriptor textProperty;
PropertyDescriptor[] urlProperties;
static string[] emptyFields;
public override bool Initialize (bool sortingEnabled, Control control)
{
return base.Initialize (sortingEnabled, control);
}
[EditorAttribute ("System.Web.UI.Design.WebControls.DataFieldEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
[TypeConverterAttribute (typeof(StringArrayConverter))]
[WebCategoryAttribute ("Data")]
[DefaultValueAttribute ("")]
public virtual string[] DataNavigateUrlFields {
get {
object ob = ViewState ["DataNavigateUrlFields"];
if (ob != null) return (string[]) ob;
if (emptyFields == null) emptyFields = new string[0];
return emptyFields;
}
set {
ViewState ["DataNavigateUrlFields"] = value;
OnFieldChanged ();
}
}
[DefaultValueAttribute ("")]
[WebCategoryAttribute ("Data")]
public virtual string DataNavigateUrlFormatString {
get {
object ob = ViewState ["DataNavigateUrlFormatString"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["DataNavigateUrlFormatString"] = value;
OnFieldChanged ();
}
}
[WebCategoryAttribute ("Data")]
[DefaultValueAttribute ("")]
[TypeConverterAttribute ("System.Web.UI.Design.DataSourceViewSchemaConverter, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
public virtual string DataTextField {
get {
object ob = ViewState ["DataTextField"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["DataTextField"] = value;
OnFieldChanged ();
}
}
[DefaultValueAttribute ("")]
[WebCategoryAttribute ("Data")]
public virtual string DataTextFormatString {
get {
object ob = ViewState ["DataTextFormatString"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["DataTextFormatString"] = value;
OnFieldChanged ();
}
}
[DefaultValueAttribute ("")]
[EditorAttribute ("System.Web.UI.Design.ImageUrlEditor, System.Design, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a", "System.Drawing.Design.UITypeEditor, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")]
[UrlPropertyAttribute]
[WebCategoryAttribute ("Behavior")]
public virtual string NavigateUrl {
get {
object ob = ViewState ["NavigateUrl"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["NavigateUrl"] = value;
OnFieldChanged ();
}
}
[DefaultValueAttribute ("")]
[WebCategoryAttribute ("Behavior")]
[TypeConverterAttribute (typeof(TargetConverter))]
public virtual string Target {
get {
object ob = ViewState ["Target"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["Target"] = value;
OnFieldChanged ();
}
}
[LocalizableAttribute (true)]
[DefaultValueAttribute ("")]
[WebCategoryAttribute ("Appearance")]
public virtual string Text {
get {
object ob = ViewState ["Text"];
if (ob != null) return (string) ob;
return "";
}
set {
ViewState ["Text"] = value;
OnFieldChanged ();
}
}
public override void InitializeCell (DataControlFieldCell cell,
DataControlCellType cellType, DataControlRowState rowState, int rowIndex)
{
base.InitializeCell (cell, cellType, rowState, rowIndex);
HyperLink link = new HyperLink ();
bool bind = false;
if (Target.Length > 0)
link.Target = Target;
if (DataTextField.Length > 0)
bind = true;
else
link.Text = Text;
string[] fields = DataNavigateUrlFields;
if (fields.Length > 0)
bind = true;
else
link.NavigateUrl = NavigateUrl;
if (bind && cellType == DataControlCellType.DataCell && (rowState & DataControlRowState.Insert) == 0)
cell.DataBinding += new EventHandler (OnDataBindField);
cell.Controls.Add (link);
}
protected virtual string FormatDataNavigateUrlValue (object[] dataUrlValues)
{
if (dataUrlValues == null || dataUrlValues.Length == 0)
return "";
else if (DataNavigateUrlFormatString.Length > 0)
return string.Format (DataNavigateUrlFormatString, dataUrlValues);
else
return dataUrlValues[0].ToString ();
}
protected virtual string FormatDataTextValue (object dataTextValue)
{
if (DataTextFormatString.Length > 0)
return string.Format (DataTextFormatString, dataTextValue);
else if (dataTextValue == null)
return "";
else
return dataTextValue.ToString ();
}
void OnDataBindField (object sender, EventArgs e)
{
DataControlFieldCell cell = (DataControlFieldCell) sender;
HyperLink link = (HyperLink) cell.Controls [0];
object controlContainer = cell.BindingContainer;
object item = DataBinder.GetDataItem (controlContainer);
if (DataTextField.Length > 0) {
if (textProperty == null) SetupProperties (controlContainer);
link.Text = FormatDataTextValue (textProperty.GetValue (item));
}
string[] urlFields = DataNavigateUrlFields;
if (urlFields.Length > 0) {
if (urlProperties == null) SetupProperties (controlContainer);
object[] dataUrlValues = new object [urlFields.Length];
for (int n=0; n<dataUrlValues.Length; n++)
dataUrlValues [n] = urlProperties [n].GetValue (item);
link.NavigateUrl = FormatDataNavigateUrlValue (dataUrlValues);
}
}
void SetupProperties (object controlContainer)
{
object item = DataBinder.GetDataItem (controlContainer);
PropertyDescriptorCollection props = TypeDescriptor.GetProperties (item);
if (DataTextField.Length > 0) {
textProperty = props [DataTextField];
if (textProperty == null)
new InvalidOperationException ("Property '" + DataTextField + "' not found in object of type " + item.GetType());
}
string[] urlFields = DataNavigateUrlFields;
if (urlFields.Length > 0) {
urlProperties = new PropertyDescriptor [urlFields.Length];
for (int n=0; n<urlFields.Length; n++) {
PropertyDescriptor prop = props [urlFields [n]];
if (prop == null)
new InvalidOperationException ("Property '" + urlFields [n] + "' not found in object of type " + item.GetType());
urlProperties [n] = prop;
}
}
}
protected override DataControlField CreateField ()
{
return new HyperLinkField ();
}
protected override void CopyProperties (DataControlField newField)
{
base.CopyProperties (newField);
HyperLinkField field = (HyperLinkField) newField;
field.DataNavigateUrlFields = DataNavigateUrlFields;
field.DataNavigateUrlFormatString = DataNavigateUrlFormatString;
field.DataTextField = DataTextField;
field.DataTextFormatString = DataTextFormatString;
field.NavigateUrl = NavigateUrl;
field.Target = Target;
field.Text = Text;
}
public override void ValidateSupportsCallback ()
{
}
}
}
#endif
| {
"content_hash": "bd9af329bdac0b7e3889f25475d35ae1",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 278,
"avg_line_length": 31.188524590163933,
"alnum_prop": 0.695137976346912,
"repo_name": "jjenki11/blaze-chem-rendering",
"id": "c2513e9cf2e9a54e72d25cf9435157fe45f4ebc1",
"size": "8867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qca_designer/lib/ml-pnet-0.8.1/mcs-sources/class/System.Web/System.Web.UI.WebControls/HyperLinkField.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "2476"
}
],
"symlink_target": ""
} |
<?php
namespace Chromabits\Illuminated\Conference\Views;
/**
* Class Panel.
*
* @todo
* @author Eduardo Trujillo <ed@chromabits.com>
* @package Chromabits\Illuminated\Conference\Views
*/
class Panel
{
//
}
| {
"content_hash": "3ae6812c31308940b6bfa34b4a0554cb",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 51,
"avg_line_length": 12.941176470588236,
"alnum_prop": 0.6863636363636364,
"repo_name": "chromabits/illuminated",
"id": "3f5a32f9a7039d8f78956c1c7f9d771c2a45f75c",
"size": "464",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/Chromabits/Illuminated/Conference/Views/Panel.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "39"
},
{
"name": "HTML",
"bytes": "92"
},
{
"name": "PHP",
"bytes": "572769"
},
{
"name": "Shell",
"bytes": "484"
}
],
"symlink_target": ""
} |
PACKAGE_NAME = gauche-rdf
PACKAGE_VERSION = $(shell cat VERSION)
PACKAGE_FULL_NAME = $(PACKAGE_NAME)-$(PACKAGE_VERSION)
CONFIG_GENERATED = config.log config.status autom4te*.cache
.PHONY: all clean test check install distclean realclean dist
all: ; @ echo "% make install"
clean:
rm -f core *~
cd test; $(MAKE) clean
check:
cd test; $(MAKE) check
install: all
cd src; $(MAKE) install
distclean: clean
rm -rf $(CONFIG_GENERATED)
rm -f $(PACKAGE_FULL_NAME)*
dist:
rm -f $(PACKAGE_FULL_NAME).tar.gz
mkdir $(PACKAGE_FULL_NAME)
cp -pRH $(filter-out $(PACKAGE_FULL_NAME), $(wildcard *)) $(PACKAGE_FULL_NAME)/
tar zvcf $(PACKAGE_FULL_NAME).tar.gz $(PACKAGE_FULL_NAME)
rm -rf $(PACKAGE_FULL_NAME)
| {
"content_hash": "dfbda8d0f75832e96abecdcc2bce03cf",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 80,
"avg_line_length": 24.620689655172413,
"alnum_prop": 0.6876750700280112,
"repo_name": "keiji0/gauche-rdf",
"id": "24c72dc6bb68245b10c736d9a3a6a23d425af3b6",
"size": "787",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Makefile",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Scheme",
"bytes": "28472"
}
],
"symlink_target": ""
} |
'use strict';
var aws = require('aws-sdk');
var s3 = new aws.S3();
const fs = require('fs');
const util = require('util');
const exec = require('child_process').execSync;
var sync = require('child_process').spawnSync;
exports.handler = (event, context, callback) => {
process.env['PATH'] = process.env['PATH'] + ':' + process.env['LAMBDA_TASK_ROOT'];
var bucketName = event.bucketName;
var imagefileName = event.imagefileName;
var params = {Bucket: bucketName, Key: imagefileName};
var ocrText = 'ne docekuva';
s3.getObject(params).promise().then( data => {
fs.writeFileSync("/tmp/"+imagefileName, data.Body, 'utf8');
var ghs = sync('LD_LIBRARY_PATH=./lib TESSDATA_PREFIX=./ ./tesseract', ['/tmp/'+imagefileName, 'stdout', '-l', 'eng'], { shell : true });
console.log(ghs.stdout.toString('utf8'));
callback(null, JSON.stringify(ghs.stdout.toString('utf8')));
});
};
| {
"content_hash": "497382319c8044aea4b818a1ce843867",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 148,
"avg_line_length": 35.111111111111114,
"alnum_prop": 0.6255274261603375,
"repo_name": "tgajdoski/pronovos",
"id": "6f9bb195e2c179d41aee6868800811014be06764",
"size": "948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lambdaFiles/tesseractLambda.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21318"
},
{
"name": "HTML",
"bytes": "3449643"
},
{
"name": "JavaScript",
"bytes": "143797"
},
{
"name": "Shell",
"bytes": "1578"
},
{
"name": "TypeScript",
"bytes": "19409"
}
],
"symlink_target": ""
} |
package net.opengis.gml.impl;
import net.opengis.gml.CoordinateSystemAxisBaseType;
import net.opengis.gml.GmlPackage;
import org.eclipse.emf.ecore.EClass;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Coordinate System Axis Base Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* </p>
*
* @generated
*/
public abstract class CoordinateSystemAxisBaseTypeImpl extends DefinitionTypeImpl implements CoordinateSystemAxisBaseType {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected CoordinateSystemAxisBaseTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return GmlPackage.eINSTANCE.getCoordinateSystemAxisBaseType();
}
} //CoordinateSystemAxisBaseTypeImpl
| {
"content_hash": "6cd43bf69f4febaeca6d235aa589ea21",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 123,
"avg_line_length": 22.13157894736842,
"alnum_prop": 0.6825208085612366,
"repo_name": "markus1978/citygml4emf",
"id": "3c4ae39f6e4d756e95102b1da1c8c09bcd0d20a2",
"size": "890",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "de.hub.citygml.emf.ecore/src/net/opengis/gml/impl/CoordinateSystemAxisBaseTypeImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "34817194"
}
],
"symlink_target": ""
} |
namespace base {
class FilePath;
}
namespace gfx {
class ImageSkia;
class Size;
}
#if defined(TOOLKIT_VIEWS)
namespace views {
class View;
}
#endif
namespace app_list {
class AppListModel;
class AppListViewDelegateObserver;
class SearchResult;
class SpeechUIModel;
class APP_LIST_EXPORT AppListViewDelegate {
public:
// A user of the app list.
struct APP_LIST_EXPORT User {
User();
~User();
// Whether or not this user is the current user of the app list.
bool active;
// The name of this user.
base::string16 name;
// The email address of this user.
base::string16 email;
// The path to this user's profile directory.
base::FilePath profile_path;
};
typedef std::vector<User> Users;
virtual ~AppListViewDelegate() {}
// Whether to force the use of a native desktop widget when the app list
// window is first created.
virtual bool ForceNativeDesktop() const = 0;
// Sets the delegate to use the profile at |profile_path|. This is currently
// only used by non-Ash Windows.
virtual void SetProfileByPath(const base::FilePath& profile_path) = 0;
// Gets the model associated with the view delegate. The model may be owned
// by the delegate, or owned elsewhere (e.g. a profile keyed service).
virtual AppListModel* GetModel() = 0;
// Gets the SpeechUIModel for the app list. Owned by the AppListViewDelegate.
virtual SpeechUIModel* GetSpeechUI() = 0;
// Gets a path to a shortcut for the given app. Returns asynchronously as the
// shortcut may not exist yet.
virtual void GetShortcutPathForApp(
const std::string& app_id,
const base::Callback<void(const base::FilePath&)>& callback) = 0;
// Invoked to start a new search. Delegate collects query input from
// SearchBoxModel and populates SearchResults. Both models are sub models
// of AppListModel.
virtual void StartSearch() = 0;
// Invoked to stop the current search.
virtual void StopSearch() = 0;
// Invoked to open the search result.
virtual void OpenSearchResult(SearchResult* result,
bool auto_launch,
int event_flags) = 0;
// Called to invoke a custom action on |result|. |action_index| corresponds
// to the index of an icon in |result.action_icons()|.
virtual void InvokeSearchResultAction(SearchResult* result,
int action_index,
int event_flags) = 0;
// Gets the timeout for auto-launching the first search result, or 0 if the
// auto-launch should not happen for the current search session.
virtual base::TimeDelta GetAutoLaunchTimeout() = 0;
// Invoked when the auto-launch is canceled by the user action.
virtual void AutoLaunchCanceled() = 0;
// Invoked when the app list UI is created.
virtual void ViewInitialized() = 0;
// Invoked to dismiss app list. This may leave the view open but hidden from
// the user.
virtual void Dismiss() = 0;
// Invoked when the app list is closing.
virtual void ViewClosing() = 0;
// Returns the icon to be displayed in the window and taskbar.
virtual gfx::ImageSkia GetWindowIcon() = 0;
// Open the settings UI.
virtual void OpenSettings() = 0;
// Open the help UI.
virtual void OpenHelp() = 0;
// Open the feedback UI.
virtual void OpenFeedback() = 0;
// Invoked to toggle the status of speech recognition.
virtual void StartSpeechRecognition() = 0;
virtual void StopSpeechRecognition() = 0;
// Shows the app list for the profile specified by |profile_path|.
virtual void ShowForProfileByPath(const base::FilePath& profile_path) = 0;
#if defined(TOOLKIT_VIEWS)
// Creates the web view for the start page. The caller takes the ownership of
// the returned view.
virtual views::View* CreateStartPageWebView(const gfx::Size& size) = 0;
// Creates the web views for the user-specified custom pages. The caller takes
// ownership of the returned views.
virtual std::vector<views::View*> CreateCustomPageWebViews(
const gfx::Size& size) = 0;
// Invoked when the custom launcher page's animation changes.
virtual void CustomLauncherPageAnimationChanged(double progress) = 0;
// Invoked when the custom launcher page's subpage should be popped.
virtual void CustomLauncherPagePopSubpage() = 0;
#endif
// Returns true if the delegate supports speech recognition.
virtual bool IsSpeechRecognitionEnabled() = 0;
// Returns the list of users (for AppListMenu).
virtual const Users& GetUsers() const = 0;
// Returns true if the app list should be centered and in landscape mode.
virtual bool ShouldCenterWindow() const = 0;
// Adds/removes an observer for profile changes.
virtual void AddObserver(AppListViewDelegateObserver* observer) {}
virtual void RemoveObserver(AppListViewDelegateObserver* observer) {}
};
} // namespace app_list
#endif // UI_APP_LIST_APP_LIST_VIEW_DELEGATE_H_
| {
"content_hash": "834341a55810480b0bfe09a6f4d7732e",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 80,
"avg_line_length": 32.05806451612903,
"alnum_prop": 0.7017508553028778,
"repo_name": "hujiajie/chromium-crosswalk",
"id": "349b976affbfdb8f4c3e8be7074043f02a758884",
"size": "5443",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "ui/app_list/app_list_view_delegate.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package org.apache.lucene.index;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Collections;
import java.util.Iterator;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene3x.Lucene3xCodec;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.FieldInfo.DocValuesType;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.TestUtil;
public class TestIndexableField extends LuceneTestCase {
private class MyField implements IndexableField {
private final int counter;
private final IndexableFieldType fieldType = new IndexableFieldType() {
@Override
public boolean indexed() {
return (counter % 10) != 3;
}
@Override
public boolean stored() {
return (counter & 1) == 0 || (counter % 10) == 3;
}
@Override
public boolean tokenized() {
return true;
}
@Override
public boolean storeTermVectors() {
return indexed() && counter % 2 == 1 && counter % 10 != 9;
}
@Override
public boolean storeTermVectorOffsets() {
return storeTermVectors() && counter % 10 != 9;
}
@Override
public boolean storeTermVectorPositions() {
return storeTermVectors() && counter % 10 != 9;
}
@Override
public boolean storeTermVectorPayloads() {
if (Codec.getDefault() instanceof Lucene3xCodec) {
return false; // 3.x doesnt support
} else {
return storeTermVectors() && counter % 10 != 9;
}
}
@Override
public boolean omitNorms() {
return false;
}
@Override
public FieldInfo.IndexOptions indexOptions() {
return FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS;
}
@Override
public DocValuesType docValueType() {
return null;
}
};
public MyField(int counter) {
this.counter = counter;
}
@Override
public String name() {
return "f" + counter;
}
@Override
public float boost() {
return 1.0f + random().nextFloat();
}
@Override
public BytesRef binaryValue() {
if ((counter%10) == 3) {
final byte[] bytes = new byte[10];
for(int idx=0;idx<bytes.length;idx++) {
bytes[idx] = (byte) (counter+idx);
}
return new BytesRef(bytes, 0, bytes.length);
} else {
return null;
}
}
@Override
public String stringValue() {
final int fieldID = counter%10;
if (fieldID != 3 && fieldID != 7) {
return "text " + counter;
} else {
return null;
}
}
@Override
public Reader readerValue() {
if (counter%10 == 7) {
return new StringReader("text " + counter);
} else {
return null;
}
}
@Override
public Number numericValue() {
return null;
}
@Override
public IndexableFieldType fieldType() {
return fieldType;
}
@Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException {
return readerValue() != null ? analyzer.tokenStream(name(), readerValue()) :
analyzer.tokenStream(name(), new StringReader(stringValue()));
}
}
// Silly test showing how to index documents w/o using Lucene's core
// Document nor Field class
public void testArbitraryFields() throws Exception {
final Directory dir = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), dir);
final int NUM_DOCS = atLeast(27);
if (VERBOSE) {
System.out.println("TEST: " + NUM_DOCS + " docs");
}
final int[] fieldsPerDoc = new int[NUM_DOCS];
int baseCount = 0;
for(int docCount=0;docCount<NUM_DOCS;docCount++) {
final int fieldCount = TestUtil.nextInt(random(), 1, 17);
fieldsPerDoc[docCount] = fieldCount-1;
final int finalDocCount = docCount;
if (VERBOSE) {
System.out.println("TEST: " + fieldCount + " fields in doc " + docCount);
}
final int finalBaseCount = baseCount;
baseCount += fieldCount-1;
w.addDocument(new Iterable<IndexableField>() {
@Override
public Iterator<IndexableField> iterator() {
return new Iterator<IndexableField>() {
int fieldUpto;
@Override
public boolean hasNext() {
return fieldUpto < fieldCount;
}
@Override
public IndexableField next() {
assert fieldUpto < fieldCount;
if (fieldUpto == 0) {
fieldUpto = 1;
return newStringField("id", ""+finalDocCount, Field.Store.YES);
} else {
return new MyField(finalBaseCount + (fieldUpto++-1));
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
});
}
final IndexReader r = w.getReader();
w.close();
final IndexSearcher s = newSearcher(r);
int counter = 0;
for(int id=0;id<NUM_DOCS;id++) {
if (VERBOSE) {
System.out.println("TEST: verify doc id=" + id + " (" + fieldsPerDoc[id] + " fields) counter=" + counter);
}
final TopDocs hits = s.search(new TermQuery(new Term("id", ""+id)), 1);
assertEquals(1, hits.totalHits);
final int docID = hits.scoreDocs[0].doc;
final Document doc = s.doc(docID);
final int endCounter = counter + fieldsPerDoc[id];
while(counter < endCounter) {
final String name = "f" + counter;
final int fieldID = counter % 10;
final boolean stored = (counter&1) == 0 || fieldID == 3;
final boolean binary = fieldID == 3;
final boolean indexed = fieldID != 3;
final String stringValue;
if (fieldID != 3 && fieldID != 9) {
stringValue = "text " + counter;
} else {
stringValue = null;
}
// stored:
if (stored) {
IndexableField f = doc.getField(name);
assertNotNull("doc " + id + " doesn't have field f" + counter, f);
if (binary) {
assertNotNull("doc " + id + " doesn't have field f" + counter, f);
final BytesRef b = f.binaryValue();
assertNotNull(b);
assertEquals(10, b.length);
for(int idx=0;idx<10;idx++) {
assertEquals((byte) (idx+counter), b.bytes[b.offset+idx]);
}
} else {
assert stringValue != null;
assertEquals(stringValue, f.stringValue());
}
}
if (indexed) {
final boolean tv = counter % 2 == 1 && fieldID != 9;
if (tv) {
final Terms tfv = r.getTermVectors(docID).terms(name);
assertNotNull(tfv);
TermsEnum termsEnum = tfv.iterator(null);
assertEquals(new BytesRef(""+counter), termsEnum.next());
assertEquals(1, termsEnum.totalTermFreq());
DocsAndPositionsEnum dpEnum = termsEnum.docsAndPositions(null, null);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
assertEquals(1, dpEnum.nextPosition());
assertEquals(new BytesRef("text"), termsEnum.next());
assertEquals(1, termsEnum.totalTermFreq());
dpEnum = termsEnum.docsAndPositions(null, dpEnum);
assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS);
assertEquals(1, dpEnum.freq());
assertEquals(0, dpEnum.nextPosition());
assertNull(termsEnum.next());
// TODO: offsets
} else {
Fields vectors = r.getTermVectors(docID);
assertTrue(vectors == null || vectors.terms(name) == null);
}
BooleanQuery bq = new BooleanQuery();
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term(name, "text")), BooleanClause.Occur.MUST);
final TopDocs hits2 = s.search(bq, 1);
assertEquals(1, hits2.totalHits);
assertEquals(docID, hits2.scoreDocs[0].doc);
bq = new BooleanQuery();
bq.add(new TermQuery(new Term("id", ""+id)), BooleanClause.Occur.MUST);
bq.add(new TermQuery(new Term(name, ""+counter)), BooleanClause.Occur.MUST);
final TopDocs hits3 = s.search(bq, 1);
assertEquals(1, hits3.totalHits);
assertEquals(docID, hits3.scoreDocs[0].doc);
}
counter++;
}
}
r.close();
dir.close();
}
private static class CustomField implements IndexableField {
@Override
public BytesRef binaryValue() {
return null;
}
@Override
public String stringValue() {
return "foobar";
}
@Override
public Reader readerValue() {
return null;
}
@Override
public float boost() {
return 1.0f;
}
@Override
public Number numericValue() {
return null;
}
@Override
public String name() {
return "field";
}
@Override
public TokenStream tokenStream(Analyzer a, TokenStream reuse) {
return null;
}
@Override
public IndexableFieldType fieldType() {
FieldType ft = new FieldType(StoredField.TYPE);
ft.setStoreTermVectors(true);
ft.freeze();
return ft;
}
}
// LUCENE-5611
public void testNotIndexedTermVectors() throws Exception {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
try {
w.addDocument(Collections.<IndexableField>singletonList(new CustomField()));
fail("didn't hit exception");
} catch (IllegalArgumentException iae) {
// expected
}
w.close();
dir.close();
}
}
| {
"content_hash": "f2d2f25a68e2b0a3d921186e26a55eea",
"timestamp": "",
"source": "github",
"line_count": 369,
"max_line_length": 114,
"avg_line_length": 28.75338753387534,
"alnum_prop": 0.5920829406220547,
"repo_name": "williamchengit/TestRepo",
"id": "a2d3798890287599f8dcbdfe4c6f1bb0c1946d2c",
"size": "11411",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "lucene/core/src/test/org/apache/lucene/index/TestIndexableField.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
namespace GoogleARCoreInternal
{
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using GoogleARCore;
using GoogleARCoreInternal;
using UnityEngine;
#if UNITY_IOS && !UNITY_EDITOR
using AndroidImport = GoogleARCoreInternal.DllImportNoop;
using IOSImport = System.Runtime.InteropServices.DllImportAttribute;
#else
using AndroidImport = System.Runtime.InteropServices.DllImportAttribute;
using IOSImport = GoogleARCoreInternal.DllImportNoop;
#endif
internal class AugmentedImageApi
{
private NativeSession m_NativeSession;
public AugmentedImageApi(NativeSession nativeSession)
{
m_NativeSession = nativeSession;
}
public int GetDatabaseIndex(IntPtr augmentedImageHandle)
{
int outIndex = -1;
ExternApi.ArAugmentedImage_getIndex(m_NativeSession.SessionHandle, augmentedImageHandle,
ref outIndex);
return outIndex;
}
public Pose GetCenterPose(IntPtr augmentedImageHandle)
{
IntPtr poseHandle = m_NativeSession.PoseApi.Create();
ExternApi.ArAugmentedImage_getCenterPose(
m_NativeSession.SessionHandle, augmentedImageHandle, poseHandle);
Pose result = m_NativeSession.PoseApi.ExtractPoseValue(poseHandle);
m_NativeSession.PoseApi.Destroy(poseHandle);
return result;
}
public float GetExtentX(IntPtr augmentedImageHandle)
{
float outExtentX = 0f;
ExternApi.ArAugmentedImage_getExtentX(
m_NativeSession.SessionHandle, augmentedImageHandle, ref outExtentX);
return outExtentX;
}
public float GetExtentZ(IntPtr augmentedImageHandle)
{
float outExtentZ = 0f;
ExternApi.ArAugmentedImage_getExtentZ(
m_NativeSession.SessionHandle, augmentedImageHandle, ref outExtentZ);
return outExtentZ;
}
public string GetName(IntPtr augmentedImageHandle)
{
IntPtr outName = IntPtr.Zero;
ExternApi.ArAugmentedImage_acquireName(
m_NativeSession.SessionHandle, augmentedImageHandle, ref outName);
string name = Marshal.PtrToStringAnsi(outName);
ExternApi.ArString_release(outName);
return name;
}
public AugmentedImageTrackingMethod GetTrackingMethod(IntPtr augmentedImageHandle)
{
AugmentedImageTrackingMethod trackingMethod = AugmentedImageTrackingMethod.NotTracking;
ExternApi.ArAugmentedImage_getTrackingMethod(
m_NativeSession.SessionHandle, augmentedImageHandle, ref trackingMethod);
return trackingMethod;
}
private struct ExternApi
{
#pragma warning disable 626
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_getIndex(IntPtr sessionHandle,
IntPtr augmentedImageHandle, ref int outIndex);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_getCenterPose(IntPtr sessionHandle,
IntPtr augmentedImageHandle, IntPtr outPoseHandle);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_getExtentX(IntPtr sessionHandle,
IntPtr augmentedImageHandle, ref float outExtentX);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_getExtentZ(IntPtr sessionHandle,
IntPtr augmentedImageHandle, ref float outExtentZ);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_acquireName(IntPtr sessionHandle,
IntPtr augmentedImageHandle, ref IntPtr outName);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArAugmentedImage_getTrackingMethod(IntPtr sessionHandle,
IntPtr augmentedImageHandle, ref AugmentedImageTrackingMethod trackingMethod);
[AndroidImport(ApiConstants.ARCoreNativeApi)]
public static extern void ArString_release(IntPtr str);
#pragma warning restore 626
}
}
}
| {
"content_hash": "c68e037aec6a528161258aded925468f",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 100,
"avg_line_length": 39.598214285714285,
"alnum_prop": 0.6795941375422774,
"repo_name": "googlecreativelab/sounds-in-space",
"id": "e53ebd7c95e604163d885b15a5d63b725c9d4dc1",
"size": "5275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assets/GoogleARCore/SDK/Scripts/Api/Wrappers/AugmentedImageApi.cs",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "1750661"
},
{
"name": "HLSL",
"bytes": "7372"
},
{
"name": "Objective-C",
"bytes": "957"
},
{
"name": "Objective-C++",
"bytes": "1989"
},
{
"name": "ShaderLab",
"bytes": "82364"
}
],
"symlink_target": ""
} |
def game_log(text):
global log
# Create the log if it does not exist
try:
log
except NameError:
log = []
log.append(text)
def get_game_log_html():
global log
try:
return '<br>'.join(log)
except NameError:
return 'The game has not yet begun!'
| {
"content_hash": "b0df6782a3b3a3555c019db8508f16b7",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 44,
"avg_line_length": 20.466666666666665,
"alnum_prop": 0.5667752442996743,
"repo_name": "robertecurtin/plutos-envy",
"id": "a05d9070c3e14c22d74f080855cf8807f6d6779f",
"size": "393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/gamelog.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2744"
},
{
"name": "Python",
"bytes": "15684"
},
{
"name": "Shell",
"bytes": "518"
}
],
"symlink_target": ""
} |
package controllers
import (
"encoding/json"
"errors"
"github.com/gorilla/mux"
"github.com/rakutentech/dotconf-assist/src/backend/models"
"github.com/rakutentech/dotconf-assist/src/backend/settings"
"net/http"
"strconv"
"time"
)
func AddAppHandler(w http.ResponseWriter, r *http.Request) {
SetResponseHeaders(w, r)
var err error
var body []byte
var responseCode int = http.StatusBadRequest
var app models.App //here is single object, not slice
token := r.Header.Get("x-auth-token")
msg, userName, _ := ValidateToken(token)
if userName == "" {
err = errors.New(msg)
responseCode = http.StatusUnauthorized
goto Error
}
if body, err = ParseRequest(w, r); err != nil {
goto Error
}
if err = json.Unmarshal(body, &app); err != nil {
goto Error
}
if err = models.SaveApp(app); err != nil {
goto Error
}
responseCode = http.StatusOK
WriteResponse(w, responseCode, nil)
settings.WriteLog(r, responseCode, nil, GetCurrentFuncName())
return
Error:
WriteErrorResponse(w, responseCode, err)
settings.WriteLog(r, responseCode, err, GetCurrentFuncName())
}
func GetAppsHandler(w http.ResponseWriter, r *http.Request) { //get all apps
SetResponseHeaders(w, r)
var err error
var responseCode int = http.StatusBadRequest
var apps []models.App
env := r.URL.Query().Get("env")
user := r.URL.Query().Get("user")
params := []string{ConvertEmpty2Percent(env), ConvertEmpty2Percent(user)}
token := r.Header.Get("x-auth-token")
msg, userName, _ := ValidateToken(token)
isAdmin := false
if userName == "" {
err = errors.New(msg)
responseCode = http.StatusUnauthorized
goto Error
}
if userName == "admin" {
isAdmin = true
}
if apps, err = models.GetApps(params, true, true, true, isAdmin); err != nil {
goto Error
}
if err = WriteResponse(w, http.StatusOK, apps); err != nil {
goto Error
}
responseCode = http.StatusOK
settings.WriteLog(r, responseCode, nil, GetCurrentFuncName())
return
Error:
WriteErrorResponse(w, responseCode, err)
settings.WriteLog(r, responseCode, err, GetCurrentFuncName())
}
func GetAppHandler(w http.ResponseWriter, r *http.Request) {
SetResponseHeaders(w, r)
var err error
var responseCode int = http.StatusBadRequest
var app models.App
vars := mux.Vars(r)
appID, err := strconv.Atoi(vars["id"])
params := []string{"", ""} // need to improve
token := r.Header.Get("x-auth-token")
msg, userName, _ := ValidateToken(token)
isAdmin := false
if userName == "" {
err = errors.New(msg)
responseCode = http.StatusUnauthorized
goto Error
}
if userName == "admin" {
isAdmin = true
}
if app, err = models.GetApp(params, appID, false, isAdmin); err != nil {
if err.Error() == "record not found" {
responseCode = http.StatusNotFound
}
goto Error
}
if err = WriteResponse(w, http.StatusOK, app); err != nil {
goto Error
}
responseCode = http.StatusOK
settings.WriteLog(r, responseCode, nil, GetCurrentFuncName())
return
Error:
WriteErrorResponse(w, responseCode, err)
settings.WriteLog(r, responseCode, err, GetCurrentFuncName())
}
func UpdateAppHandler(w http.ResponseWriter, r *http.Request) {
SetResponseHeaders(w, r)
var err error
var responseCode int = http.StatusBadRequest
var app models.App //here is single object, not slice
var body []byte
vars := mux.Vars(r)
appID, err := strconv.Atoi(vars["id"])
token := r.Header.Get("x-auth-token")
msg, userName, _ := ValidateToken(token)
if userName == "" {
err = errors.New(msg)
responseCode = http.StatusUnauthorized
goto Error
}
if body, err = ParseRequest(w, r); err != nil {
goto Error
}
if err = json.Unmarshal(body, &app); err != nil {
goto Error
}
app.UpdatedAt = time.Now()
if err = models.UpdateApp(appID, app); err != nil {
goto Error
}
responseCode = http.StatusOK
WriteResponse(w, responseCode, nil)
settings.WriteLog(r, responseCode, nil, GetCurrentFuncName())
return
Error:
WriteErrorResponse(w, responseCode, err)
settings.WriteLog(r, responseCode, err, GetCurrentFuncName())
}
func DeleteAppHandler(w http.ResponseWriter, r *http.Request) {
SetResponseHeaders(w, r)
var err error
var responseCode int = http.StatusBadRequest
vars := mux.Vars(r)
appID, err := strconv.Atoi(vars["id"])
token := r.Header.Get("x-auth-token")
msg, userName, _ := ValidateToken(token)
if userName == "" {
err = errors.New(msg)
responseCode = http.StatusUnauthorized
goto Error
}
if err = models.DeleteApp(appID); err != nil {
goto Error
}
responseCode = http.StatusOK
WriteResponse(w, responseCode, nil)
settings.WriteLog(r, responseCode, nil, GetCurrentFuncName())
return
Error:
WriteErrorResponse(w, responseCode, err)
settings.WriteLog(r, responseCode, err, GetCurrentFuncName())
}
| {
"content_hash": "7fc7352392f2a2b20c8b6b11c5b01701",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 79,
"avg_line_length": 24.727748691099478,
"alnum_prop": 0.7059072623332627,
"repo_name": "rakutentech/dotconf-assist",
"id": "c522de94d66bf3bb3267f742350497d22d9f5078",
"size": "4723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/backend/controllers/log_app.go",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
var path = require('path');
var distPath = path.join(__dirname, '../../dist');
var templateCompilerPath = path.join(distPath, 'ember-template-compiler');
var module = QUnit.module;
var test = QUnit.test;
var templateCompiler;
module('ember-template-compiler.js', {
beforeEach: function() {
templateCompiler = require(templateCompilerPath);
},
afterEach: function() {
// clear the previously cached version of this module
delete require.cache[templateCompilerPath + '.js'];
},
});
test('can be required', function(assert) {
assert.ok(typeof templateCompiler.precompile === 'function', 'precompile function is present');
assert.ok(typeof templateCompiler.compile === 'function', 'compile function is present');
});
test('can access _Ember.ENV (private API used by ember-cli-htmlbars)', function(assert) {
assert.equal(typeof templateCompiler._Ember.ENV, 'object', '_Ember.ENV is present');
assert.notEqual(typeof templateCompiler._Ember.ENV, null, '_Ember.ENV is not null');
});
test('can access _Ember.FEATURES (private API used by ember-cli-htmlbars)', function(assert) {
assert.equal(typeof templateCompiler._Ember.FEATURES, 'object', '_Ember.FEATURES is present');
assert.notEqual(typeof templateCompiler._Ember.FEATURES, null, '_Ember.FEATURES is not null');
});
test('can access _Ember.VERSION (private API used by ember-cli-htmlbars)', function(assert) {
assert.equal(typeof templateCompiler._Ember.VERSION, 'string', '_Ember.VERSION is present');
});
test('can generate a template with a server side generated `id`', function(assert) {
var TemplateJSON = JSON.parse(templateCompiler.precompile('<div>simple text</div>'));
assert.ok(TemplateJSON.id, 'an `id` was generated');
});
| {
"content_hash": "5aae95a2a1f48ea41d3e34ea7eb916ff",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 97,
"avg_line_length": 38.53333333333333,
"alnum_prop": 0.7243367935409458,
"repo_name": "mike-north/ember.js",
"id": "74d5cdf7dd9e95cb047a5477f1c2e03bf0720333",
"size": "1734",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/node/template-compiler-test.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "9511"
},
{
"name": "JavaScript",
"bytes": "3517274"
},
{
"name": "Shell",
"bytes": "776"
},
{
"name": "TypeScript",
"bytes": "627367"
}
],
"symlink_target": ""
} |
#include <sys/mman.h>
#include <sys/types.h>
#include <errno.h>
#include <fcntl.h>
#include <unistd.h>
#include <osquery/filesystem.h>
#include <osquery/flags.h>
#include <osquery/logger.h>
namespace osquery {
#define kLinuxMaxMemRead 0x10000
const std::string kLinuxMemPath = "/dev/mem";
FLAG(bool, disable_memory, false, "Disable physical memory reads");
Status readMem(int fd, size_t base, size_t length, uint8_t* buffer) {
if (lseek(fd, base, SEEK_SET) == -1) {
return Status(1, "Cannot seek to physical base");
}
// Read from raw memory until an unrecoverable read error or the all of the
// requested bytes are read.
size_t total_read = 0;
ssize_t bytes_read = 0;
while (total_read != length && bytes_read != 0) {
bytes_read = read(fd, buffer + total_read, length - total_read);
if (bytes_read == -1) {
if (errno != EINTR) {
return Status(1, "Cannot read requested length");
}
} else {
total_read += bytes_read;
}
}
// The read call finished without reading the requested number of bytes.
if (total_read != length) {
return Status(1, "Read incorrect number of bytes");
}
return Status(0, "OK");
}
Status readRawMem(size_t base, size_t length, void** buffer) {
*buffer = 0;
if (FLAGS_disable_memory) {
return Status(1, "Configuration has disabled physical memory reads");
}
if (length > kLinuxMaxMemRead) {
return Status(1, "Cowardly refusing to read a large number of bytes");
}
auto status = isReadable(kLinuxMemPath);
if (!status.ok()) {
// For non-su users *hopefully* raw memory is not readable.
return status;
}
int fd = open(kLinuxMemPath.c_str(), O_RDONLY);
if (fd < 0) {
return Status(1, std::string("Cannot open ") + kLinuxMemPath);
}
if ((*buffer = malloc(length)) == nullptr) {
close(fd);
return Status(1, "Cannot allocate memory for read");
}
#ifdef _SC_PAGESIZE
size_t offset = base % sysconf(_SC_PAGESIZE);
#else
// getpagesize() is more or less deprecated.
size_t offset = base % getpagesize();
#endif
// Use memmap for maximum portability over read().
auto map = mmap(0, offset + length, PROT_READ, MAP_SHARED, fd, base - offset);
if (map == MAP_FAILED) {
// Could fallback to a lseek/read.
if (!readMem(fd, base, length, (uint8_t*)*buffer).ok()) {
close(fd);
free(*buffer);
*buffer = nullptr;
return Status(1, "Cannot memory map or seek/read memory");
}
} else {
// Memory map succeeded, copy and unmap.
memcpy(*buffer, (uint8_t*)map + offset, length);
if (munmap(map, offset + length) == -1) {
LOG(WARNING) << "Unable to unmap raw memory";
}
}
close(fd);
return Status(0, "OK");
}
}
| {
"content_hash": "a33ee97792001f1f291ce357025cfad2",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 80,
"avg_line_length": 26.038095238095238,
"alnum_prop": 0.6349670811997074,
"repo_name": "PickmanSec/osquery",
"id": "b67fe39654bb330c8c9a41dc4df5464f87a747b4",
"size": "3049",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "osquery/filesystem/linux/mem.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "128"
},
{
"name": "C",
"bytes": "38092"
},
{
"name": "C++",
"bytes": "1807301"
},
{
"name": "CMake",
"bytes": "82625"
},
{
"name": "Makefile",
"bytes": "4097"
},
{
"name": "Objective-C++",
"bytes": "56456"
},
{
"name": "Shell",
"bytes": "2038"
},
{
"name": "Thrift",
"bytes": "2969"
}
],
"symlink_target": ""
} |
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe "Encoding.com Queue facade" do
before :each do
@http = mock("Http Interface")
@facade = EncodingDotCom::Queue.new(1234, "abcd", @http)
end
def expect_xml_with_xpath(xpath)
@http.should_receive(:post).with(EncodingDotCom::Queue::ENDPOINT,
EncodingXpathMatcher.new(xpath)).and_return(stub("Http Response", :code => "200", :body => ""))
end
def expect_response_xml(response_xml)
response = stub("Http Response", :code => "200", :body => response_xml)
@http.should_receive(:post).and_return(response)
end
describe "any xml sent to encoding.com" do
[:add_and_process, :status].each do |method|
it "should have a root query node for method #{method}" do
expect_xml_with_xpath("/query")
@facade.send(method, stub("source"))
end
it "should have a user_id node for method #{method}" do
expect_xml_with_xpath("/query/userid[text()=1234]")
@facade.send(method, stub("source"))
end
it "should have a user key node for method #{method}" do
expect_xml_with_xpath("/query/userkey[text()='abcd']")
@facade.send(method, stub("source"))
end
end
end
describe "request sent to encoding.com" do
it "should return true if a success" do
@http.should_receive(:post).and_return(stub("Http Response", :code => "200", :body => ""))
@facade.add_and_process(stub("source"), {}).should be_true
end
it "should raise an AvailabilityError if response status from encoding.com is not a 200" do
@http.should_receive(:post).and_return(stub("Http Response", :code => "503", :body => ""))
lambda { @facade.add_and_process(stub("source"), {}) }.should raise_error(EncodingDotCom::AvailabilityError)
end
it "should raise an MessageError if response contains errors" do
response = stub("Http Response",
:code => "200",
:body => "<?xml version=\"1.0\"?>\n<response><errors><error>Wrong query format</error></errors></response>\n")
@http.should_receive(:post).and_return(response)
lambda { @facade.add_and_process(stub("source"), {}) }.should raise_error(EncodingDotCom::MessageError)
end
end
describe "xml sent to encoding.com to process a video" do
it "should have an action of 'AddMedia'." do
expect_xml_with_xpath("/query/action[text()='AddMedia']")
@facade.add_and_process(stub("source"), {})
end
it "should include the source url" do
expect_xml_with_xpath("/query/source[text()='http://example.com/']")
@facade.add_and_process("http://example.com/", {})
end
it "should include the formats provided" do
expect_xml_with_xpath("/query/format/output[text()='flv']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.add_and_process(stub("source"), {stub("destination") => format})
end
it "should include the destination urls in the formats provided" do
expect_xml_with_xpath("/query/format/destination[text()='http://example.com']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.add_and_process(stub("source"), {"http://example.com" => format})
end
end
describe "calling add_and_process" do
it "should return the a media id" do
expect_response_xml("<response><MediaID>1234</MediaID></response>")
@facade.add_and_process(stub("source"), {}).should == 1234
end
end
describe "xml sent to encoding.com to get the status of a job" do
it "should include a action node with 'GetStatus'" do
expect_xml_with_xpath("/query/action[text()='GetStatus']")
@facade.status("mediaid")
end
it "should include a media id node" do
expect_xml_with_xpath("/query/mediaid[text()='abcd']")
@facade.status("abcd")
end
end
describe "calling simple status method" do
it "should respond with a string status from encoding.com" do
expect_response_xml("<response><status>New</status></response>")
@facade.status("mediaid").should == "New"
end
end
describe "calling status_report to retreive additional status details" do
it "should parse progress properly" do
expect_response_xml("<response><status>New</status><progress>99.3</progress></response>")
@facade.status_report("mediaid").progress.should == 99
end
# <response>
# <id>101</id>
# <userid>100</userid>
# <sourcefile>sourceURL</sourcefile>
# <status>Finished</status>
# <notifyurl>notifyURL</notifyurl>
# <created>2010-02-26 15:29:55</created>
# <started>2010-02-26 15:30:31</started>
# <finished>2010-02-26 15:30:48</finished>
# <downloaded>2010-02-26 15:30:07</downloaded>
# <filesize>4399104</filesize>
# <processor>RACKSPACE</processor>
# <time_left>0</time_left>
# <progress>100.0</progress>
# <format>
# <created>otherTime</created>
# </format>
# </response>
it "should parse everything properly" do
expect_response_xml("<response><id>101</id><userid>100</userid><sourcefile>sourceURL</sourcefile><status>Finished</status><notifyurl>notifyURL</notifyurl><created>2010-02-26 15:29:55</created><started>2010-02-26 15:30:31</started><finished>2010-02-26 15:30:48</finished><downloaded>2010-02-26 15:30:07</downloaded><filesize>4399104</filesize><processor>RACKSPACE</processor><time_left>0</time_left><progress>100.0</progress><format><created>otherTime</created></format></response>")
r = @facade.status_report("mediaid")
r.progress.should == 100
r.time_left.should == 0
r.status.should == 'Finished'
r.notify_url.should == 'notifyURL'
r.created.should == Time.local(2010, 02, 26, 15, 29, 55)
r.started.should < r.finished
r.source_file.should == 'sourceURL'
r.processor.should == 'RACKSPACE'
end
end
describe "calling get media list method" do
it "should include an action node with 'GetMediaList'" do
expect_xml_with_xpath("/query/action[text()='GetMediaList']")
@facade.list
end
describe "returned MediaListItems" do
before :each do
expect_response_xml(<<-END
<response>
<media>
<mediafile>foo.wmv</mediafile>
<mediaid>1234</mediaid>
<mediastatus>Closed</mediastatus>
<createdate>2009-01-01 12:00:01</createdate>
<startdate>2009-01-01 12:00:02</startdate>
<finishdate>2009-01-01 12:00:03</finishdate>
</media>
</response>
END
)
end
it "should return an array of media list values" do
@facade.list.should be_kind_of(Enumerable)
end
it "should have a hash of returned attributes with a mediafile key" do
@facade.list.first.media_file.should == "foo.wmv"
end
it "should have a hash of returned attributes with a mediaid key" do
@facade.list.first.media_id.should == 1234
end
it "should have a hash of returned attributes with a mediastatus key" do
@facade.list.first.media_status.should == "Closed"
end
it "should have a hash of returned attributes with a createdate key" do
@facade.list.first.create_date.should == Time.local(2009, 1, 1, 12, 0, 1)
end
it "should have a hash of returned attributes with a startdate key" do
@facade.list.first.start_date.should == Time.local(2009, 1, 1, 12, 0, 2)
end
it "should have a hash of returned attributes with a finishdate key" do
@facade.list.first.finish_date.should == Time.local(2009, 1, 1, 12, 0, 3)
end
end
end
describe "deleting specified media and all its items in the queue" do
it "should have an action of 'CancelMedia'." do
expect_xml_with_xpath("/query/action[text()='CancelMedia']")
@facade.cancel(5678)
end
it "should have a mediaid of 1234." do
expect_xml_with_xpath("/query/mediaid[text()='5678']")
@facade.cancel(5678)
end
end
describe "processing items already in the encoding" do
it "should have an action of 'CancelMedia'." do
expect_xml_with_xpath("/query/action[text()='ProcessMedia']")
@facade.process(5678)
end
it "should have a mediaid of 1234." do
expect_xml_with_xpath("/query/mediaid[text()='5678']")
@facade.process(5678)
end
end
describe "updating formats of an item already in the encoding.com queue" do
it "should have an action of 'UpdateMedia'." do
expect_xml_with_xpath("/query/action[text()='UpdateMedia']")
@facade.update(5678, {})
end
it "should have a mediaid of 1234." do
expect_xml_with_xpath("/query/mediaid[text()='5678']")
@facade.update(5678, {})
end
it "should include the formats provided" do
expect_xml_with_xpath("/query/format/output[text()='flv']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.update(5678, {stub("destination") => format})
end
it "should include the destination urls in the formats provided" do
expect_xml_with_xpath("/query/format/destination[text()='http://example.com']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.update(5678, {"http://example.com" => format})
end
end
describe "adding an item to the encoding.com queue but not processing it" do
it "should have an action of 'AddMedia'." do
expect_xml_with_xpath("/query/action[text()='AddMediaBenchmark']")
@facade.add(stub("source"), {})
end
it "should include the source url" do
expect_xml_with_xpath("/query/source[text()='http://example.com/']")
@facade.add("http://example.com/", {})
end
it "should include the formats provided" do
expect_xml_with_xpath("/query/format/output[text()='flv']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.add(stub("source"), {stub("destination") => format})
end
it "should include the destination urls in the formats provided" do
expect_xml_with_xpath("/query/format/destination[text()='http://example.com']")
format = EncodingDotCom::Format.create("output" => "flv")
@facade.add(stub("source"), {"http://example.com" => format})
end
it "should allow setting arbitrary nodes (like notify)" do
expect_xml_with_xpath("/query/notify[text()='testURL']")
@facade.add("http://example.com/", {}, {'notify' => 'testURL'})
end
end
describe "getting information about a specified media item" do
it "should have an action of 'GetMediaInfo'." do
expect_xml_with_xpath("/query/action[text()='GetMediaInfo']")
@facade.info(5678)
end
it "should have a mediaid of 1234." do
expect_xml_with_xpath("/query/mediaid[text()='5678']")
@facade.cancel(5678)
end
describe "returned media info object" do
before :each do
expect_response_xml(<<-END
<?xml version="1.0"?>
<response>
<bitrate>1807k</bitrate>
<duration>6464.83</duration>
<video_codec>mpeg4</video_codec>
<video_bitrate>1679k</video_bitrate>
<frame_rate>23.98</frame_rate>
<size>640x352</size>
<pixel_aspect_ratio>1:1</pixel_aspect_ratio>
<display_aspect_ratio>20:11</display_aspect_ratio>
<audio_codec>ac3</audio_codec>
<audio_sample_rate>48000</audio_sample_rate>
<audio_channels>2</audio_channels>
</response>
END
)
end
it "should have a bitrate" do
@facade.info(1234).bitrate.should == "1807k"
end
it "should have a duration" do
@facade.info(1234).duration.should == 6464.83
end
it "should have a video_codec" do
@facade.info(1234).video_codec.should == "mpeg4"
end
it "should have a video_bitrate" do
@facade.info(1234).video_bitrate.should == "1679k"
end
it "should have a frame rate" do
@facade.info(1234).frame_rate.should == 23.98
end
it "should have a size" do
@facade.info(1234).size.should == "640x352"
end
it "should hava a pixel aspect ratio" do
@facade.info(1234).pixel_aspect_ratio.should == "1:1"
end
it "should have a display aspect ratio" do
@facade.info(1234).display_aspect_ratio.should == "20:11"
end
it "should have an audio codec" do
@facade.info(1234).audio_codec.should == "ac3"
end
it "should have an audio sample rate" do
@facade.info(1234).audio_sample_rate.should == 48_000
end
it "should have audio channels" do
@facade.info(1234).audio_channels.should == 2
end
end
end
end
| {
"content_hash": "d7527b95fb279a2c8de62e6d084cb6e2",
"timestamp": "",
"source": "github",
"line_count": 354,
"max_line_length": 488,
"avg_line_length": 36.75423728813559,
"alnum_prop": 0.6237798785642917,
"repo_name": "grandcentrix/gem-encoding-dot-com",
"id": "77b26763f57751e67f3cf24f9a81588044d63001",
"size": "13011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/queue_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "56791"
}
],
"symlink_target": ""
} |
import * as React from "react";
import { CarbonIconProps } from "../../";
declare const ThunderstormSevere32: React.ForwardRefExoticComponent<
CarbonIconProps & React.RefAttributes<SVGSVGElement>
>;
export default ThunderstormSevere32;
| {
"content_hash": "aad62a3c7f5137b1b78645bcb3edc3c1",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 68,
"avg_line_length": 39.666666666666664,
"alnum_prop": 0.7899159663865546,
"repo_name": "mcliment/DefinitelyTyped",
"id": "647c93fa544aad1d1f6eeeb19ff4b468b9ced7c5",
"size": "238",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "types/carbon__icons-react/es/thunderstorm--severe/32.d.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "15"
},
{
"name": "Protocol Buffer",
"bytes": "678"
},
{
"name": "TypeScript",
"bytes": "17214021"
}
],
"symlink_target": ""
} |
import json
from datetime import datetime, timedelta
from unittest import mock
import redis
import responses
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils.timezone import get_current_timezone, now
from mockredis import mock_redis_client
from frigg.authentication.models import User
from frigg.builds.models import Build, BuildResult, Project
r = redis.Redis(**settings.REDIS_SETTINGS)
class ProjectTestCase(TestCase):
fixtures = ['frigg/builds/fixtures/users.json']
def test___str__(self):
project = Project.objects.create(owner='frigg', name='frigg-worker')
self.assertEqual(str(project), 'frigg / frigg-worker')
def test_clone_url_public(self):
project = Project(owner='frigg', name='frigg-worker', private=False)
self.assertEqual(project.clone_url, 'https://github.com/frigg/frigg-worker.git')
@mock.patch('frigg.builds.models.Project.github_token', '')
def test_clone_url_private(self):
project = Project(owner='frigg', name='chewie', private=True)
self.assertEqual(project.clone_url, 'https://@github.com/frigg/chewie.git')
@mock.patch('frigg.builds.models.Project.github_token', '')
def test_clone_url_ssh(self):
project = Project(owner='frigg', name='chewie', should_clone_with_ssh=True)
self.assertEqual(project.clone_url, 'git@github.com:frigg/chewie.git')
def test_last_build_number(self):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
self.assertEqual(project.last_build_number, 0)
Build.objects.create(project=project, build_number=42)
self.assertEqual(project.last_build_number, 42)
def test_auto_approval(self):
project = Project.objects.create(owner='frigg', name='frigg')
self.assertTrue(project.approved)
@mock.patch('frigg.helpers.github.list_collaborators', lambda x: ['dumbledore'])
def test_update_members(self):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
project.update_members()
self.assertEqual(project.members.all().count(), 1)
def test_start(self):
project = Project.objects.create(owner='frigg', name='frigg')
build = project.start_build({
'branch': 'b',
'sha': 's',
'author': 'dumbledore',
'pull_request_id': 0,
'message': '',
})
self.assertEqual(build.branch, 'b')
self.assertEqual(build.sha, 's')
self.assertEqual(build.author, 'dumbledore')
self.assertEqual(build.pull_request_id, 0)
self.assertEqual(build.build_number, 1)
self.assertEqual(project.last_build_number, 1)
@mock.patch('frigg.builds.models.Build.start')
def test_start_pull_request_with_earlier_build(self, mock_start):
data = {
'branch': 'b',
'sha': 's',
'author': 'dumbledore',
'pull_request_id': 0,
'message': '',
}
project = Project.objects.create(owner='frigg', name='frigg')
project.start_build(data)
self.assertEqual(project.builds.count(), 1)
self.assertEqual(project.last_build_number, 1)
data['pull_request_id'] = 1
self.assertEqual(project.builds.count(), 1)
data['pull_request_id'] = 1
build = project.start_build(data)
self.assertEqual(build.branch, 'b')
self.assertEqual(build.sha, 's')
self.assertEqual(build.author, 'dumbledore')
self.assertEqual(build.pull_request_id, 1)
self.assertEqual(build.build_number, 1)
self.assertEqual(project.last_build_number, 1)
def test_average_time(self):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
build_options = dict(project=project, build_number=1,
start_time=datetime(2015, 5, 5, 5, 5, tzinfo=get_current_timezone()),
end_time=datetime(2015, 5, 5, 5, 15, tzinfo=get_current_timezone()))
builds = [Build.objects.create(**build_options)]
build_options = dict(project=project, build_number=2,
start_time=datetime(2015, 5, 5, 5, 5, tzinfo=get_current_timezone()),
end_time=datetime(2015, 5, 5, 5, 25, tzinfo=get_current_timezone()))
builds += [Build.objects.create(**build_options)]
self.assertEqual(project.average_time, timedelta(minutes=15))
def test_number_of_members(self):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
self.assertEqual(project.number_of_members, 0)
project.members.add(User.objects.get(pk=1))
self.assertEqual(project.number_of_members, 1)
@mock.patch('frigg.builds.models.get_badge')
def test_get_badge_should_call_badge_with_last_build(self, mock_get_badge):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
build = Build.objects.create(project=project)
BuildResult.objects.create(build=build, succeeded=True)
self.assertIsNotNone(project.get_badge())
mock_get_badge.assert_called_once_with(True)
@mock.patch('frigg.builds.models.get_unknown_badge')
def test_get_badge_should_call_unknown_badge_if_no_build(self, mock_get_unknown_badge):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
self.assertIsNotNone(project.get_badge())
mock_get_unknown_badge.assert_called_once_with('build')
@mock.patch('frigg.builds.models.get_coverage_badge')
def test_get_coverage_badge_should_call_coverage_badge_with_last_build(self, mock_get_badge):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
build = Build.objects.create(project=project)
BuildResult.objects.create(build=build, succeeded=True, coverage=98)
self.assertIsNotNone(project.get_coverage_badge())
mock_get_badge.assert_called_once_with(98)
@mock.patch('frigg.builds.models.get_unknown_badge')
def test_get_coverage_badge_should_call_unknown_badge_if_no_buildt(self, mock_get_unknown):
project = Project.objects.create(owner='frigg', name='frigg-worker', private=False)
self.assertIsNotNone(project.get_coverage_badge())
mock_get_unknown.assert_called_once_with('coverage')
class BuildTestCase(TestCase):
fixtures = ['frigg/builds/fixtures/users.json']
def setUp(self):
r.flushall()
self.project = Project.objects.create(owner='frigg', name='frigg-worker', approved=True)
def test___str__(self):
build = Build.objects.create(project=self.project, branch='master', build_number=1)
self.assertEqual(str(build), 'frigg / frigg-worker / master #1')
@mock.patch('frigg.builds.models.Project.github_token', 'token')
def test_queue_object(self):
build = Build.objects.create(project=self.project, branch='master', sha='s', build_number=1)
obj = build.queue_object
self.assertEqual(obj['id'], build.pk)
self.assertEqual(obj['branch'], build.branch)
self.assertEqual(obj['sha'], build.sha)
self.assertEqual(obj['image'], settings.DEFAULT_BUILD_IMAGE)
self.assertEqual(obj['clone_url'], build.project.clone_url)
self.assertEqual(obj['owner'], build.project.owner)
self.assertEqual(obj['name'], build.project.name)
self.assertEqual(obj['gh_token'], 'token')
self.assertFalse('pull_request_id' in obj)
build.pull_request_id = 42
obj = build.queue_object
self.assertEqual(obj['pull_request_id'], 42)
def test_queue_object_have_environment_variables(self):
self.project.environment_variables.create(key='V', value=42, is_secret=False)
build = Build.objects.create(project=self.project, branch='master', sha='s', build_number=1)
obj = build.queue_object
assert obj['environment_variables']['V'] == '42'
assert 'V' not in obj['secrets']
def test_queue_object_have_secrets_when_no_pull_request(self):
self.project.environment_variables.create(key='V', value=40, is_secret=True)
build = Build.objects.create(project=self.project, branch='master', sha='s', build_number=1)
obj = build.queue_object
assert obj['secrets']['V'] == '40'
assert 'V' not in obj['environment_variables']
def test_queue_object_not_have_secrets_when_no_pull_request_and_custom_branch(self):
self.project.environment_variables.create(key='V', value=40, is_secret=True)
build = Build.objects.create(project=self.project, branch='custom', sha='s', build_number=1)
obj = build.queue_object
assert 'V' not in obj['secrets']
assert 'V' not in obj['environment_variables']
def test_queue_object_not_have_secrets_when_pull_request(self):
self.project.environment_variables.create(key='V', value=40, is_secret=True)
build = Build.objects.create(
project=self.project,
branch='master',
sha='s',
build_number=1,
pull_request_id=2
)
obj = build.queue_object
assert 'V' not in obj['secrets']
def test_queue_set_custom_image(self):
custom_docker_image = 'frigg/frigg-test-dind'
project = Project.objects.create(image=custom_docker_image)
build = Build.objects.create(project=project)
obj = build.queue_object
self.assertEqual(obj['id'], build.pk)
self.assertEqual(obj['image'], custom_docker_image)
def test_color(self):
build = Build.objects.create(project=self.project, branch='master', build_number=1)
self.assertEqual(build.color, 'orange')
result = BuildResult.objects.create(build=build, succeeded=True, result_log=[])
self.assertEqual(build.color, 'green')
result.still_running = True
self.assertEqual(build.color, 'orange')
result.still_running = False
result.succeeded = False
self.assertEqual(build.color, 'red')
result.result_log = [{'task': ''}]
self.assertEqual(build.color, 'gray')
@responses.activate
def test_send_webhook(self):
responses.add(
responses.POST,
'http://w.frigg.io',
body='Ok',
content_type='application/json'
)
build = Build.objects.create(project=self.project, branch='master', build_number=1)
BuildResult.objects.create(build=build, succeeded=True)
response = build.send_webhook('http://w.frigg.io')
request = json.loads(response.request.body)
self.assertEqual(request['sha'], build.sha)
self.assertEqual(request['build_url'], build.get_absolute_url())
self.assertEqual(request['state'], build.result.succeeded)
@mock.patch('frigg.helpers.github.set_commit_status')
@mock.patch('redis.Redis', mock_redis_client)
def test_start(self, mock_set_commit_status):
build = Build.objects.create(project=self.project, branch='master', build_number=1)
BuildResult.objects.create(build=build, succeeded=True)
build.start()
self.assertEqual(BuildResult.objects.all().count(), 0)
self.assertTrue(mock_set_commit_status.called)
@mock.patch('frigg.helpers.github.set_commit_status')
@mock.patch('redis.Redis', mock_redis_client)
def test_start_restart_should_not_have_end_time(self, mock_set_commit_status):
build = Build.objects.create(project=self.project, branch='master', build_number=1,
end_time=now())
build.start()
build = Build.objects.get(project=self.project, build_number=1)
self.assertIsNone(build.end_time)
self.assertTrue(mock_set_commit_status.called)
@mock.patch('frigg.builds.models.BuildResult.create_not_approved')
@mock.patch('redis.Redis', mock_redis_client)
def test_start_not_approved(self, mock_create_not_approved):
project = Project.objects.create(owner='tind', name='frigg', approved=False)
build = Build.objects.create(project=project, branch='master', build_number=1)
build.start()
self.assertTrue(mock_create_not_approved.called)
@mock.patch('frigg.builds.models.Build.start')
def test_restart_should_start_if_not_in_queue(self, mock_start):
project = Project.objects.create(owner='tind', name='frigg', approved=False)
build = Build.objects.create(project=project, branch='master', build_number=1)
build.start()
r.rpop(project.queue_name)
assert r.llen(project.queue_name) == 0
build.restart()
assert mock_start.called
@mock.patch('frigg.builds.models.Build.start')
def test_restart_should_not_start_if_already_in_queue(self, mock_start):
project = Project.objects.create(owner='tind', name='frigg', approved=False)
build = Build.objects.create(project=project, branch='master', build_number=1)
r.lpush(project.queue_name, json.dumps(build.queue_object))
build.restart()
assert not mock_start.called
def test_has_timed_out(self):
project = Project.objects.create(owner='frigg', name='frigg')
build = Build.objects.create(project=project, build_number=1,
start_time=now() - timedelta(minutes=61))
self.assertTrue(build.has_timed_out())
build.start_time = now()
self.assertFalse(build.has_timed_out())
with mock.patch('frigg.builds.models.Project.average_time', timedelta(seconds=120)):
self.assertFalse(build.has_timed_out())
build.start_time = now() - timedelta(seconds=60)
self.assertFalse(build.has_timed_out())
build.start_time = now() - timedelta(seconds=400)
self.assertTrue(build.has_timed_out())
def test_author_user(self):
user = get_user_model().objects.get(pk=1)
build = Build(
project=self.project,
branch='master',
build_number=1,
author=user.username
)
self.assertEqual(build.author_user, user)
build.author = 'i'
self.assertIsNone(build.author_user)
def test_short_message(self):
build = Build(
project=self.project,
branch='master',
build_number=1,
message='Multi\nLine\nMessage'
)
self.assertEqual(build.short_message, 'Multi')
build = Build(
project=self.project,
branch='master',
build_number=1,
message='Single line message'
)
self.assertEqual(build.short_message, 'Single line message')
def test_rendered_message(self):
build = Build(
project=self.project,
branch='master',
build_number=1,
message='Single **line** message'
)
self.assertEqual(build.rendered_message, '<p>Single <strong>line</strong> message</p>')
@mock.patch('frigg.builds.models.Build.send_webhook')
@mock.patch('frigg.helpers.github.set_commit_status')
def test_handle_worker_report(self, mock_set_commit_status, mock_send_webhook):
build = Build.objects.create(
project=self.project,
branch='master',
build_number=1,
)
build.handle_worker_report({
'sha': 'superbhash',
'clone_url': 'https://github.com/frigg/frigg-worker.git',
'name': 'frigg-worker',
'branch': 'master',
'owner': 'frigg',
'id': 1,
'results': [
{'task': 'make test', 'return_code': 0, 'succeeded': True, 'log': 'log'},
{'task': 'make test'}
],
'webhooks': ['http://example.com']
})
self.assertIsNotNone(Build.objects.get(pk=build.id).end_time)
mock_set_commit_status.assert_called_once_with(build)
mock_send_webhook.assert_called_once_with('http://example.com')
@mock.patch('frigg.builds.models.Build.send_webhook')
@mock.patch('frigg.helpers.github.set_commit_status')
def test_handle_worker_host(self, mock_set_commit_status, mock_send_webhook):
build = Build.objects.create(
project=self.project,
branch='master',
build_number=1,
)
build.handle_worker_report({
'sha': 'superbhash',
'clone_url': 'https://github.com/frigg/frigg-worker.git',
'name': 'frigg-worker',
'branch': 'master',
'owner': 'frigg',
'id': 1,
'results': [
{'task': 'make test', 'return_code': 0, 'succeeded': True, 'log': 'log'},
{'task': 'make test'}
],
'webhooks': ['http://example.com']
})
self.assertIsNotNone(Build.objects.get(pk=build.id).end_time)
mock_set_commit_status.assert_called_once_with(build)
mock_send_webhook.assert_called_once_with('http://example.com')
@mock.patch('frigg.builds.models.Build.send_webhook')
@mock.patch('frigg.helpers.github.set_commit_status')
def test_handle_worker_report_still_running(self, mock_set_commit_status, mock_send_webhook):
build = Build.objects.create(
project=self.project,
branch='master',
build_number=1,
)
build.handle_worker_report({
'sha': 'superbhash',
'clone_url': 'https://github.com/frigg/frigg-worker.git',
'name': 'frigg-worker',
'branch': 'master',
'owner': 'frigg',
'worker_host': 'albus.frigg.io',
'finished': False,
'id': 1,
'results': [
{'task': 'make test', 'return_code': 0, 'succeeded': True, 'log': 'log'},
{'task': 'flake8', 'pending': True},
{'task': 'make test'}
],
'webhooks': ['http://example.com']
})
self.assertIsNone(Build.objects.get(pk=build.id).end_time)
self.assertEqual(build.result.worker_host, 'albus.frigg.io')
@mock.patch('frigg.builds.models.Project.average_time', timedelta(minutes=10))
def test_estimated_finish_time(self):
build = Build(
project=self.project,
)
self.assertEqual(build.estimated_finish_time, None)
build.start_time = now()
self.assertEqual(build.estimated_finish_time.day, (now() + timedelta(minutes=10)).day)
self.assertEqual(build.estimated_finish_time.hour, (now() + timedelta(minutes=10)).hour)
self.assertEqual(build.estimated_finish_time.minute, (now() + timedelta(minutes=10)).minute)
@mock.patch('frigg.deployments.models.PRDeployment.start')
def test_initiate_deployment_with_specified_image(self, mock_deployment_start):
start_time = datetime(2012, 12, 12, tzinfo=get_current_timezone())
b1 = Build.objects.create(project=self.project, branch='master',
build_number=4, start_time=start_time)
deployment = b1.initiate_deployment({'image': 'frigg/super-image'})
self.assertEqual(deployment.image, 'frigg/super-image')
self.assertTrue(mock_deployment_start.called_once)
@mock.patch('frigg.deployments.models.PRDeployment.start')
def test_initiate_deployment_without_specified_image(self, mock_deployment_start):
start_time = datetime(2012, 12, 12, tzinfo=get_current_timezone())
b1 = Build.objects.create(project=self.project, branch='master',
build_number=4, start_time=start_time)
deployment = b1.initiate_deployment({})
self.assertEqual(deployment.image, settings.FRIGG_PREVIEW_IMAGE)
self.assertTrue(mock_deployment_start.called_once)
def test_delete_logs_should_remove_logs(self):
build = Build.objects.create(project=self.project, branch='master', build_number=4)
result = BuildResult.objects.create(
build=build,
setup_log=[{"item": "something"}],
service_log=[{"item": "something"}],
result_log=[{"item": "something"}],
after_log=[{"item": "something"}],
)
build.delete_logs()
result = BuildResult.objects.get(pk=result.pk)
self.assertEqual(result.setup_log, [])
self.assertEqual(result.service_tasks, [])
self.assertEqual(result.result_log, [])
self.assertEqual(result.after_tasks, [])
class BuildResultTestCase(TestCase):
def setUp(self):
self.project = Project.objects.create(owner='frigg', name='frigg-worker')
self.build = Build.objects.create(project=self.project, branch='master', build_number=1)
def test___str__(self):
result = BuildResult.objects.create(build=self.build)
self.assertEqual(str(result), 'frigg / frigg-worker / master #1')
def test_evaluate_results(self):
self.assertTrue(BuildResult.evaluate_results([{'succeeded': True}]))
self.assertTrue(BuildResult.evaluate_results([{'succeeded': True}, {}]))
self.assertFalse(BuildResult.evaluate_results([
{'succeeded': True},
{'succeeded': False}
]))
self.assertFalse(BuildResult.evaluate_results([
{'succeeded': False},
{'succeeded': True}
]))
self.assertFalse(BuildResult.evaluate_results([{'succeeded': False}, {}]))
def test_create_not_approved(self):
result = BuildResult.create_not_approved(self.build)
self.assertEqual(result.build_id, self.build.pk)
self.assertFalse(result.succeeded)
assert result.tasks[0]['error'] == 'This project is not approved.'
assert result.setup_tasks == []
assert result.service_tasks == []
def test_create_from_worker_payload(self):
BuildResult.create_from_worker_payload(self.build, {
'sha': 'superbhash',
'clone_url': 'https://github.com/frigg/frigg-worker.git',
'name': 'frigg-worker',
'branch': 'master',
'owner': 'frigg',
'worker_host': 'albus.frigg.io',
'finished': False,
'id': 1,
'results': [
{'task': 'make test', 'return_code': 0, 'succeeded': True, 'log': 'log'},
{'task': 'flake8', 'pending': True},
{'task': 'make test'}
],
'service_results': [
{'task': 'service postgresql start', 'return_code': 0, 'succeeded': True,
'log': 'log'},
],
'setup_results': [
{'task': 'make', 'return_code': 0, 'succeeded': True, 'log': 'log'},
],
'after_results': [
{'task': 'after', 'return_code': 0, 'succeeded': True, 'log': 'log'},
],
'webhooks': ['http://example.com']
})
assert self.build.result.worker_host == 'albus.frigg.io'
assert self.build.result.still_running
assert isinstance(self.build.result.tasks, list)
assert isinstance(self.build.result.setup_log, list)
assert isinstance(self.build.result.service_tasks, list)
assert isinstance(self.build.result.after_tasks, list)
def test_create_from_worker_payload_without_optional_results(self):
BuildResult.create_from_worker_payload(self.build, {
'sha': 'superbhash',
'clone_url': 'https://github.com/frigg/frigg-worker.git',
'name': 'frigg-worker',
'branch': 'master',
'owner': 'frigg',
'worker_host': 'albus.frigg.io',
'finished': False,
'id': 1,
'results': [
{'task': 'make test', 'return_code': 0, 'succeeded': True, 'log': 'log'},
{'task': 'flake8', 'pending': True},
{'task': 'make test'}
],
'webhooks': ['http://example.com']
})
assert isinstance(self.build.result.tasks, list)
assert isinstance(self.build.result.setup_log, list)
assert isinstance(self.build.result.service_tasks, list)
assert isinstance(self.build.result.after_tasks, list)
def test_tasks(self):
data = [
{'task': 'tox', 'log': '{}', 'return_code': 0},
{'task': 'tox', 'log': 'tested all the stuff\n1!"#$%&/()=?', 'return_code': 11},
{'task': 'tox', 'return_log': 'fail', 'return_code': 'd'}
]
result = BuildResult.objects.create(
build=self.build,
result_log=data
)
self.assertEqual(len(result.tasks), 3)
self.assertEqual(result.tasks, data)
def test_service_tasks(self):
data = [
{'task': 'tox', 'log': '{}', 'return_code': 0},
{'task': 'tox', 'log': 'tested all the stuff\n1!"#$%&/()=?', 'return_code': 11},
{'task': 'tox', 'return_log': 'fail', 'return_code': 'd'}
]
result = BuildResult.objects.create(
build=self.build,
service_log=data
)
self.assertEqual(len(result.service_tasks), 3)
self.assertEqual(result.service_tasks, data)
def test_setup_tasks(self):
data = [
{'task': 'tox', 'log': '{}', 'return_code': 0},
{'task': 'tox', 'log': 'tested all the stuff\n1!"#$%&/()=?', 'return_code': 11},
{'task': 'tox', 'return_log': 'fail', 'return_code': 'd'}
]
result = BuildResult.objects.create(
build=self.build,
setup_log=data
)
self.assertEqual(len(result.setup_tasks), 3)
self.assertEqual(result.setup_tasks, data)
def test_coverage_diff(self):
start_time = datetime(2012, 12, 12, tzinfo=get_current_timezone())
b1 = Build.objects.create(project=self.project, branch='i', build_number=4,
start_time=start_time)
positive_change = BuildResult.objects.create(build=b1, coverage=100)
self.assertEqual(positive_change.coverage_diff, 100)
master = Build.objects.create(project=self.project, branch='master', build_number=3,
end_time=start_time - timedelta(hours=1))
BuildResult.objects.create(build=master, coverage=20)
# Need to fetch again to come around cached_property
self.assertEqual(BuildResult.objects.get(pk=positive_change.pk).coverage_diff, 80)
b2 = Build.objects.create(project=self.project, branch='i', build_number=5,
start_time=start_time)
negative_change = BuildResult.objects.create(build=b2, coverage=10)
self.assertEqual(negative_change.coverage_diff, -10)
b3 = Build.objects.create(project=self.project, branch='i', build_number=6,
start_time=start_time)
no_change = BuildResult.objects.create(build=b3, coverage=20)
self.assertEqual(no_change.coverage_diff, 0)
| {
"content_hash": "ecb54e44e2926564950858e2ed012883",
"timestamp": "",
"source": "github",
"line_count": 616,
"max_line_length": 100,
"avg_line_length": 45.300324675324674,
"alnum_prop": 0.5979931911843755,
"repo_name": "frigg/frigg-hq",
"id": "7f9aad0bea49adc6197a2546cf9c1f1dcf825eb6",
"size": "27928",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/builds/test_models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3504"
},
{
"name": "HTML",
"bytes": "8114"
},
{
"name": "JavaScript",
"bytes": "5982"
},
{
"name": "Makefile",
"bytes": "1164"
},
{
"name": "Python",
"bytes": "182545"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_40) on Wed Apr 13 18:09:35 UTC 2016 -->
<title>RowIterator (apache-cassandra API)</title>
<meta name="date" content="2016-04-13">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="RowIterator (apache-cassandra API)";
}
}
catch(err) {
}
//-->
var methods = {"i0":18};
var tabs = {65535:["t0","All Methods"],2:["t2","Instance Methods"],16:["t5","Default Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/RowIterator.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/apache/cassandra/db/rows/RowDiffListener.html" title="interface in org.apache.cassandra.db.rows"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../../org/apache/cassandra/db/rows/RowIterators.html" title="class in org.apache.cassandra.db.rows"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/cassandra/db/rows/RowIterator.html" target="_top">Frames</a></li>
<li><a href="RowIterator.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.apache.cassandra.db.rows</div>
<h2 title="Interface RowIterator" class="title">Interface RowIterator</h2>
</div>
<div class="contentContainer">
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Superinterfaces:</dt>
<dd>java.lang.AutoCloseable, <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html" title="interface in org.apache.cassandra.db.rows">BaseRowIterator</a><<a href="../../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>>, <a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils">CloseableIterator</a><<a href="../../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>>, java.util.Iterator<<a href="../../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>></dd>
</dl>
<dl>
<dt>All Known Implementing Classes:</dt>
<dd><a href="../../../../../org/apache/cassandra/db/transform/FilteredRows.html" title="class in org.apache.cassandra.db.transform">FilteredRows</a></dd>
</dl>
<hr>
<br>
<pre>public interface <span class="typeNameLabel">RowIterator</span>
extends <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html" title="interface in org.apache.cassandra.db.rows">BaseRowIterator</a><<a href="../../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>></pre>
<div class="block">An iterator over rows belonging to a partition.
A RowIterator is an UnfilteredRowIterator to which any deletion information has been
filtered out. As such, all cell of all rows returned by this iterator are,
by definition, live, and hence code using a RowIterator don't have to worry
about tombstones and other deletion information.
Note that as for UnfilteredRowIterator, the rows returned must be in clustering order (or
reverse clustering order if isReverseOrder is true), and the Row objects returned
by next() are only valid until the next call to hasNext() or next().</div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t5" class="tableTab"><span><a href="javascript:show(16);">Default Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>default boolean</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../org/apache/cassandra/db/rows/RowIterator.html#isEmpty--">isEmpty</a></span>()</code>
<div class="block">Returns whether the provided iterator has no data.</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.org.apache.cassandra.db.rows.BaseRowIterator">
<!-- -->
</a>
<h3>Methods inherited from interface org.apache.cassandra.db.rows.<a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html" title="interface in org.apache.cassandra.db.rows">BaseRowIterator</a></h3>
<code><a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#columns--">columns</a>, <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#isReverseOrder--">isReverseOrder</a>, <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#metadata--">metadata</a>, <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#partitionKey--">partitionKey</a>, <a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#staticRow--">staticRow</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.org.apache.cassandra.utils.CloseableIterator">
<!-- -->
</a>
<h3>Methods inherited from interface org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils">CloseableIterator</a></h3>
<code><a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html#close--">close</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.util.Iterator">
<!-- -->
</a>
<h3>Methods inherited from interface java.util.Iterator</h3>
<code>forEachRemaining, hasNext, next, remove</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="isEmpty--">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>isEmpty</h4>
<pre>default boolean isEmpty()</pre>
<div class="block">Returns whether the provided iterator has no data.</div>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html#isEmpty--">isEmpty</a></code> in interface <code><a href="../../../../../org/apache/cassandra/db/rows/BaseRowIterator.html" title="interface in org.apache.cassandra.db.rows">BaseRowIterator</a><<a href="../../../../../org/apache/cassandra/db/rows/Row.html" title="interface in org.apache.cassandra.db.rows">Row</a>></code></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/RowIterator.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/apache/cassandra/db/rows/RowDiffListener.html" title="interface in org.apache.cassandra.db.rows"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../../org/apache/cassandra/db/rows/RowIterators.html" title="class in org.apache.cassandra.db.rows"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/cassandra/db/rows/RowIterator.html" target="_top">Frames</a></li>
<li><a href="RowIterator.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2016 The Apache Software Foundation</small></p>
</body>
</html>
| {
"content_hash": "06202b5caa02d1753dd8265fe99c7a41",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 728,
"avg_line_length": 44.11524163568773,
"alnum_prop": 0.6516389989045251,
"repo_name": "elisska/cloudera-cassandra",
"id": "7f3fa612a6799cc8c9cf7b203ca119f0d83d6b75",
"size": "11867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DATASTAX_CASSANDRA-3.5.0/javadoc/org/apache/cassandra/db/rows/RowIterator.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "75145"
},
{
"name": "CSS",
"bytes": "4112"
},
{
"name": "HTML",
"bytes": "331372"
},
{
"name": "PowerShell",
"bytes": "77673"
},
{
"name": "Python",
"bytes": "979128"
},
{
"name": "Shell",
"bytes": "143685"
},
{
"name": "Thrift",
"bytes": "80564"
}
],
"symlink_target": ""
} |
const { create, env } = require('sanctuary');
const { env: flutureEnv } = require('fluture-sanctuary-types');
const { Future, Par, seq } = require('fluture');
const checkTypes = process.env.NODE_ENV !== 'production';
const S = create({checkTypes, env: env.concat(flutureEnv)});
S.any = S.curry2((pred, list) => S.reduce(S.or, false, S.map(pred, list)));
S.all = S.curry2((pred, list) => S.reduce(S.and, true, S.map(pred, list)));
S.contains = S.curry2((elem, list) => S.any(S.equals(elem), list));
S.eitherToFuture = either => either.isLeft ? Future.reject(either.value) : Future.of(either.value);
S.maybeToFuture = err => S.compose(S.eitherToFuture, S.maybeToEither(err));
module.exports = { S, F: Future, P: Par, seq };
| {
"content_hash": "6ff1c4acf82866122a30c122e7563743",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 99,
"avg_line_length": 51.785714285714285,
"alnum_prop": 0.6731034482758621,
"repo_name": "CaptJakk/salt-project",
"id": "d973df4d38c38f63ae8104b6d05d015f6def025c",
"size": "725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/utils/sanctuaryEnv.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "67905"
},
{
"name": "Shell",
"bytes": "281"
}
],
"symlink_target": ""
} |
"""Utility functions (file reading, simple IDL parsing by regexes) for IDL build.
Design doc: http://www.chromium.org/developers/design-documents/idl-build
"""
import os
import cPickle as pickle
import re
import string
import subprocess
KNOWN_COMPONENTS = frozenset(['core', 'modules'])
KNOWN_COMPONENTS_WITH_TESTING = frozenset(['core', 'modules', 'testing'])
def idl_filename_to_interface_name(idl_filename):
# interface name is the root of the basename: InterfaceName.idl
return os.path.splitext(os.path.basename(idl_filename))[0]
def idl_filename_to_component_with_known_components(idl_filename, known_components):
path = os.path.dirname(os.path.realpath(idl_filename))
while path:
dirname, basename = os.path.split(path)
if not basename:
break
if basename.lower() in known_components:
return basename.lower()
path = dirname
raise Exception('Unknown component type for %s' % idl_filename)
def idl_filename_to_component(idl_filename):
return idl_filename_to_component_with_known_components(idl_filename, KNOWN_COMPONENTS)
def is_testing_target(idl_filename):
component = idl_filename_to_component_with_known_components(idl_filename, KNOWN_COMPONENTS_WITH_TESTING)
return component == 'testing'
# See whether "component" can depend on "dependency" or not:
# Suppose that we have interface X and Y:
# - if X is a partial interface and Y is the original interface,
# use is_valid_component_dependency(X, Y).
# - if X implements Y, use is_valid_component_dependency(X, Y)
# Suppose that X is a cpp file and Y is a header file:
# - if X includes Y, use is_valid_component_dependency(X, Y)
def is_valid_component_dependency(component, dependency):
assert component in KNOWN_COMPONENTS
assert dependency in KNOWN_COMPONENTS
if component == 'core' and dependency == 'modules':
return False
return True
class ComponentInfoProvider(object):
"""Base class of information provider which provides component-specific
information.
"""
def __init__(self):
pass
@property
def interfaces_info(self):
return {}
@property
def component_info(self):
return {}
@property
def enumerations(self):
return {}
@property
def typedefs(self):
return {}
@property
def union_types(self):
return set()
@property
def include_path_for_union_types(self):
return None
class ComponentInfoProviderCore(ComponentInfoProvider):
def __init__(self, interfaces_info, component_info):
super(ComponentInfoProviderCore, self).__init__()
self._interfaces_info = interfaces_info
self._component_info = component_info
@property
def interfaces_info(self):
return self._interfaces_info
@property
def component_info(self):
return self._component_info
@property
def enumerations(self):
return self._component_info['enumerations']
@property
def typedefs(self):
return self._component_info['typedefs']
@property
def union_types(self):
return self._component_info['union_types']
@property
def include_path_for_union_types(self):
return 'bindings/core/v8/UnionTypesCore.h'
@property
def specifier_for_export(self):
return 'CORE_EXPORT '
@property
def include_path_for_export(self):
return 'core/CoreExport.h'
class ComponentInfoProviderModules(ComponentInfoProvider):
def __init__(self, interfaces_info, component_info_core,
component_info_modules):
super(ComponentInfoProviderModules, self).__init__()
self._interfaces_info = interfaces_info
self._component_info_core = component_info_core
self._component_info_modules = component_info_modules
@property
def interfaces_info(self):
return self._interfaces_info
@property
def component_info(self):
return self._component_info_modules
@property
def enumerations(self):
enums = self._component_info_core['enumerations'].copy()
enums.update(self._component_info_modules['enumerations'])
return enums
@property
def typedefs(self):
typedefs = self._component_info_core['typedefs'].copy()
typedefs.update(self._component_info_modules['typedefs'])
return typedefs
@property
def union_types(self):
# Remove duplicate union types from component_info_modules to avoid
# generating multiple container generation.
return self._component_info_modules['union_types'] - self._component_info_core['union_types']
@property
def include_path_for_union_types(self):
return 'bindings/modules/v8/UnionTypesModules.h'
@property
def specifier_for_export(self):
return 'MODULES_EXPORT '
@property
def include_path_for_export(self):
return 'modules/ModulesExport.h'
def load_interfaces_info_overall_pickle(info_dir):
with open(os.path.join(info_dir, 'modules', 'InterfacesInfoOverall.pickle')) as interface_info_file:
return pickle.load(interface_info_file)
def merge_dict_recursively(target, diff):
"""Merges two dicts into one.
|target| will be updated with |diff|. Part of |diff| may be re-used in
|target|.
"""
for key, value in diff.iteritems():
if key not in target:
target[key] = value
elif type(value) == dict:
merge_dict_recursively(target[key], value)
elif type(value) == list:
target[key].extend(value)
elif type(value) == set:
target[key].update(value)
else:
# Testing IDLs want to overwrite the values. Production code
# doesn't need any overwriting.
target[key] = value
def create_component_info_provider_core(info_dir):
interfaces_info = load_interfaces_info_overall_pickle(info_dir)
with open(os.path.join(info_dir, 'core', 'ComponentInfoCore.pickle')) as component_info_file:
component_info = pickle.load(component_info_file)
return ComponentInfoProviderCore(interfaces_info, component_info)
def create_component_info_provider_modules(info_dir):
interfaces_info = load_interfaces_info_overall_pickle(info_dir)
with open(os.path.join(info_dir, 'core', 'ComponentInfoCore.pickle')) as component_info_file:
component_info_core = pickle.load(component_info_file)
with open(os.path.join(info_dir, 'modules', 'ComponentInfoModules.pickle')) as component_info_file:
component_info_modules = pickle.load(component_info_file)
return ComponentInfoProviderModules(
interfaces_info, component_info_core, component_info_modules)
def create_component_info_provider(info_dir, component):
if component == 'core':
return create_component_info_provider_core(info_dir)
elif component == 'modules':
return create_component_info_provider_modules(info_dir)
else:
return ComponentInfoProvider()
################################################################################
# Basic file reading/writing
################################################################################
def get_file_contents(filename):
with open(filename) as f:
return f.read()
def read_file_to_list(filename):
"""Returns a list of (stripped) lines for a given filename."""
with open(filename) as f:
return [line.rstrip('\n') for line in f]
def resolve_cygpath(cygdrive_names):
if not cygdrive_names:
return []
cmd = ['cygpath', '-f', '-', '-wa']
process = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
idl_file_names = []
for file_name in cygdrive_names:
process.stdin.write('%s\n' % file_name)
process.stdin.flush()
idl_file_names.append(process.stdout.readline().rstrip())
process.stdin.close()
process.wait()
return idl_file_names
def read_idl_files_list_from_file(filename):
"""Similar to read_file_to_list, but also resolves cygpath."""
with open(filename) as input_file:
file_names = sorted([os.path.realpath(line.rstrip('\n'))
for line in input_file])
idl_file_names = [file_name for file_name in file_names
if not file_name.startswith('/cygdrive')]
cygdrive_names = [file_name for file_name in file_names
if file_name.startswith('/cygdrive')]
idl_file_names.extend(resolve_cygpath(cygdrive_names))
return idl_file_names
def read_pickle_files(pickle_filenames):
for pickle_filename in pickle_filenames:
with open(pickle_filename) as pickle_file:
yield pickle.load(pickle_file)
def write_file(new_text, destination_filename, only_if_changed):
if only_if_changed and os.path.isfile(destination_filename):
with open(destination_filename) as destination_file:
if destination_file.read() == new_text:
return
destination_dirname = os.path.dirname(destination_filename)
if not os.path.exists(destination_dirname):
os.makedirs(destination_dirname)
with open(destination_filename, 'w') as destination_file:
destination_file.write(new_text)
def write_pickle_file(pickle_filename, data, only_if_changed):
if only_if_changed and os.path.isfile(pickle_filename):
with open(pickle_filename) as pickle_file:
try:
if pickle.load(pickle_file) == data:
return
except Exception:
# If trouble unpickling, overwrite
pass
with open(pickle_filename, 'w') as pickle_file:
pickle.dump(data, pickle_file)
################################################################################
# IDL parsing
#
# We use regular expressions for parsing; this is incorrect (Web IDL is not a
# regular language), but simple and sufficient in practice.
# Leading and trailing context (e.g. following '{') used to avoid false matches.
################################################################################
def is_callback_interface_from_idl(file_contents):
match = re.search(r'callback\s+interface\s+\w+\s*{', file_contents)
return bool(match)
def should_generate_impl_file_from_idl(file_contents):
"""True when a given IDL file contents could generate .h/.cpp files."""
# FIXME: This would be error-prone and we should use AST rather than
# improving the regexp pattern.
match = re.search(r'(interface|dictionary|exception)\s+\w+', file_contents)
return bool(match)
def match_interface_extended_attributes_from_idl(file_contents):
# Strip comments
# re.compile needed b/c Python 2.6 doesn't support flags in re.sub
single_line_comment_re = re.compile(r'//.*$', flags=re.MULTILINE)
block_comment_re = re.compile(r'/\*.*?\*/', flags=re.MULTILINE | re.DOTALL)
file_contents = re.sub(single_line_comment_re, '', file_contents)
file_contents = re.sub(block_comment_re, '', file_contents)
match = re.search(r'\[(.*)\]\s*'
r'((callback|partial)\s+)?'
r'(interface|exception)\s+'
r'\w+\s*'
r'(:\s*\w+\s*)?'
r'{',
file_contents, flags=re.DOTALL)
return match
def get_interface_extended_attributes_from_idl(file_contents):
match = match_interface_extended_attributes_from_idl(file_contents)
if not match:
return {}
extended_attributes_string = match.group(1)
extended_attributes = {}
# FIXME: this splitting is WRONG: it fails on extended attributes where lists of
# multiple values are used, which are seperated by a comma and a space.
parts = [extended_attribute.strip()
for extended_attribute in re.split(',\s+', extended_attributes_string)
# Discard empty parts, which may exist due to trailing comma
if extended_attribute.strip()]
for part in parts:
name, _, value = map(string.strip, part.partition('='))
extended_attributes[name] = value
return extended_attributes
def get_interface_exposed_arguments(file_contents):
match = match_interface_extended_attributes_from_idl(file_contents)
if not match:
return None
extended_attributes_string = match.group(1)
match = re.search(r'[^=]\bExposed\(([^)]*)\)', file_contents)
if not match:
return None
arguments = []
for argument in map(string.strip, match.group(1).split(',')):
exposed, runtime_enabled = argument.split()
arguments.append({'exposed': exposed, 'runtime_enabled': runtime_enabled})
return arguments
| {
"content_hash": "20502e8e1837bcf7ac0bd9704c8caa03",
"timestamp": "",
"source": "github",
"line_count": 373,
"max_line_length": 108,
"avg_line_length": 34.48793565683646,
"alnum_prop": 0.6442786069651741,
"repo_name": "Pluto-tv/blink-crosswalk",
"id": "7c89a62c40a9aca51392148b2c07f397b7fadd8f",
"size": "13027",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "Source/bindings/scripts/utilities.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "1835"
},
{
"name": "Assembly",
"bytes": "14768"
},
{
"name": "Batchfile",
"bytes": "35"
},
{
"name": "C",
"bytes": "128002"
},
{
"name": "C++",
"bytes": "45337051"
},
{
"name": "CSS",
"bytes": "596289"
},
{
"name": "CoffeeScript",
"bytes": "163"
},
{
"name": "GLSL",
"bytes": "11578"
},
{
"name": "Groff",
"bytes": "28067"
},
{
"name": "HTML",
"bytes": "64824312"
},
{
"name": "Java",
"bytes": "109377"
},
{
"name": "JavaScript",
"bytes": "25099309"
},
{
"name": "Objective-C",
"bytes": "45096"
},
{
"name": "Objective-C++",
"bytes": "302371"
},
{
"name": "PHP",
"bytes": "220636"
},
{
"name": "Perl",
"bytes": "115958"
},
{
"name": "Python",
"bytes": "3879209"
},
{
"name": "Ruby",
"bytes": "73952"
},
{
"name": "Shell",
"bytes": "10282"
},
{
"name": "XSLT",
"bytes": "50203"
},
{
"name": "Yacc",
"bytes": "10148"
}
],
"symlink_target": ""
} |
package cvx
import breeze.linalg.{DenseMatrix, DenseVector}
import breeze.numerics.log
/**
* Created by oar on 10.10.17.
*/
object CvxUtils {
/** Assert that A and b are either both defined or both undefined.
*/
def check(A:Option[DenseMatrix[Double]], b:Option[DenseVector[Double]]): Unit =
assert( A.isDefined && b.isDefined || A.isEmpty && b.isEmpty,
"\n\nA is " + (if(A.isDefined) "defined" else "undefined\n")+
"b is " + (if(b.isDefined) "defined" else "undefined.\n")
)
/** Starting from u(s)=x+s*dx with s=s0 backtrack s=beta*s
* until the termination criterion tc(u(s)) is satisfied or
* the maximum number of steps has been reached.
*
* Throw LineSearchFailedException if the final point u(s) does
* not satisfy the criterion tc(u(s)).
*
* @return u(s) at final value s.
*/
def lineSearch(
x:DenseVector[Double],dx:DenseVector[Double],
tc:(Double)=>Boolean, beta:Double, s0:Double=1.0
): DenseVector[Double] = {
assert(0<beta && beta <1,"\nbeta = "+beta+" not in (0,1)\n")
val maxSteps = -30/log(beta) // then beta^^maxSteps < 1e-13
var step = 0
var s=s0
var u = x+dx*s
while(!tc(s) && step<maxSteps){
s = beta*s
u = x+dx*s
step += 1
}
if(!tc(s)){
val msg = "\nLine search unsuccessful.\n"
throw LineSearchFailedException(msg)
}
u
}
/** Standard criterion for Barrier and PrimalDualSolver:
* terminates optimization as soon as the duality gap and equality
* gap are small enough.
*/
def standardTerminationCriterion(pars:SolverParams):(OptimizationState)=>Boolean =
(optState:OptimizationState) => {
assert(optState.dualityGap.nonEmpty,
"\nNonempty duality gap required, Solver should be Barrier or PrimalDual.\n"
)
(optState.dualityGap.get < pars.tolSolver) &&
(optState.equalityGap.isEmpty || optState.equalityGap.get < pars.tolSolver)
}
/** Criterion terminates for simple phase I analysis:
* optimization as soon as the objective function value is less than zero
* (sufficient to get a strictly feasible point) and the duality and equality gap
* are reasonably small.
*/
def phase_I_TerminationCriterion:(OptimizationState)=>Boolean =
(optState:OptimizationState) => {
assert(optState.dualityGap.nonEmpty,
"\nNonempty duality gap required, Solver should be Barrier or PrimalDual.\n"
)
(optState.objectiveFunctionValue < 0) &&
(optState.equalityGap.isEmpty || optState.equalityGap.get < 1e-6)
}
}
| {
"content_hash": "10e0f267528c4bb74070fcaf6433e89e",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 84,
"avg_line_length": 29.235955056179776,
"alnum_prop": 0.6502690238278247,
"repo_name": "spyqqqdia/cvx",
"id": "f6611ad0c50ab976ac9ebbc08f7be02619a40c14",
"size": "2602",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/scala/cvx/CvxUtils.scala",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "302089"
}
],
"symlink_target": ""
} |
' Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
Imports System.Collections.Generic
Imports Microsoft.CodeAnalysis.Text
Imports Microsoft.CodeAnalysis.VisualBasic.Symbols
Imports Microsoft.CodeAnalysis.VisualBasic.Syntax
Namespace Microsoft.CodeAnalysis.VisualBasic
''' <summary>
''' A region analysis walker that computes the set of variables whose values flow into (are used in)
''' the region.
''' An variable assigned outside is used inside if an analysis
''' that leaves the variable unassigned on entry to the region would cause the
''' generation of "unassigned" errors within the region.
''' </summary>
Friend Class DataFlowsInWalker
Inherits AbstractRegionDataFlowPass
' TODO: normalize the result by removing variables that are unassigned in an unmodified flow analysis.
Public Sub New(info As FlowAnalysisInfo, region As FlowAnalysisRegionInfo, unassignedVariables As HashSet(Of Symbol))
MyBase.New(info, region, unassignedVariables, trackStructsWithIntrinsicTypedFields:=True)
End Sub
Friend Overloads Shared Function Analyze(info As FlowAnalysisInfo, region As FlowAnalysisRegionInfo,
unassignedVariables As HashSet(Of Symbol),
ByRef succeeded As Boolean?,
ByRef invalidRegionDetected As Boolean) As HashSet(Of Symbol)
' remove static locals from unassigned, otherwise they will never reach ReportUnassigned(...)
Dim unassignedWithoutStatic As New HashSet(Of Symbol)
For Each var In unassignedVariables
If var.Kind <> SymbolKind.Local OrElse Not DirectCast(var, LocalSymbol).IsStatic Then
unassignedWithoutStatic.Add(var)
End If
Next
Dim walker = New DataFlowsInWalker(info, region, unassignedWithoutStatic)
Try
succeeded = walker.Analyze() AndAlso Not walker.InvalidRegionDetected
invalidRegionDetected = walker.InvalidRegionDetected
Return If(succeeded, walker._dataFlowsIn, New HashSet(Of Symbol)())
Finally
walker.Free()
End Try
End Function
Private ReadOnly _dataFlowsIn As HashSet(Of Symbol) = New HashSet(Of Symbol)()
Private Function ResetState(state As LocalState) As LocalState
Dim unreachable As Boolean = Not state.Reachable
state = ReachableState()
If unreachable Then
state.Assign(0)
End If
Return state
End Function
Protected Overrides Sub EnterRegion()
Me.SetState(ResetState(Me.State))
Me._dataFlowsIn.Clear()
MyBase.EnterRegion()
End Sub
Protected Overrides Sub NoteBranch(pending As PendingBranch, stmt As BoundStatement, labelStmt As BoundLabelStatement)
If stmt.Syntax IsNot Nothing AndAlso labelStmt.Syntax IsNot Nothing AndAlso Not IsInsideRegion(stmt.Syntax.Span) AndAlso IsInsideRegion(labelStmt.Syntax.Span) Then
pending.State = ResetState(pending.State)
End If
MyBase.NoteBranch(pending, stmt, labelStmt)
End Sub
Public Overrides Function VisitRangeVariable(node As BoundRangeVariable) As BoundNode
' Sometimes query expressions refer to query range variable just to
' copy its value to a new compound variable. There is no reference
' to the range variable in code and, from user point of view, there is
' no access to it.
' If and only if range variable is declared outside of the region and read inside, it flows in.
If Not node.WasCompilerGenerated AndAlso
IsInside AndAlso
Not IsInsideRegion(node.RangeVariable.Syntax.Span) Then
_dataFlowsIn.Add(node.RangeVariable)
End If
Return Nothing
End Function
Protected Overrides Sub VisitAmbiguousLocalSymbol(ambiguous As DataFlowPass.AmbiguousLocalsPseudoSymbol)
MyBase.VisitAmbiguousLocalSymbol(ambiguous)
' Locals from ambiguous implicit receiver can only be unassigned in *REGION* flow analysis
' if a new region starts after they are declared and begore the implicit receiver is referenced;
' region data flow analysis for such regions is prohibited and should return Succeeded = False.
' Check if the first local in the collection was 'unassigned' by entering a region,
' in which case set a flag that the region is not valid
If IsInside Then
Dim firstLocal As LocalSymbol = ambiguous.Locals(0)
If Not Me.State.IsAssigned(VariableSlot(firstLocal)) Then
Me.SetInvalidRegion()
End If
End If
End Sub
Protected Overrides Sub ReportUnassigned(local As Symbol,
node As VisualBasicSyntaxNode,
rwContext As ReadWriteContext,
Optional slot As Integer = SlotKind.NotTracked,
Optional boundFieldAccess As BoundFieldAccess = Nothing)
Debug.Assert(local.Kind <> SymbolKind.Field OrElse boundFieldAccess IsNot Nothing)
If IsInsideRegion(node.Span) Then
Debug.Assert(local.Kind <> SymbolKind.RangeVariable)
If local.Kind = SymbolKind.Field Then
Dim sym As Symbol = GetNodeSymbol(boundFieldAccess)
' Unreachable for AmbiguousLocalsPseudoSymbol: ambiguous implicit
' receiver should not ever be considered unassigned
Debug.Assert(Not TypeOf sym Is AmbiguousLocalsPseudoSymbol)
If sym IsNot Nothing Then
_dataFlowsIn.Add(sym)
End If
Else
_dataFlowsIn.Add(local)
End If
End If
MyBase.ReportUnassigned(local, node, rwContext, slot, boundFieldAccess)
End Sub
Friend Overrides Sub AssignLocalOnDeclaration(local As LocalSymbol, node As BoundLocalDeclaration)
' NOTE: static locals should not be considered assigned even in presence of initializer
If Not local.IsStatic Then
MyBase.AssignLocalOnDeclaration(local, node)
End If
End Sub
End Class
End Namespace
| {
"content_hash": "4f0fa34d360d08ed3cbe803c48acae6a",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 175,
"avg_line_length": 47.520833333333336,
"alnum_prop": 0.6308636562911004,
"repo_name": "MavenRain/roslyn",
"id": "2ae4bf89f8f6d20b55358340c0390ede64890c3b",
"size": "6845",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/Compilers/VisualBasic/Portable/Analysis/FlowAnalysis/DataFlowsInWalker.vb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8388"
},
{
"name": "C#",
"bytes": "72045407"
},
{
"name": "C++",
"bytes": "2298"
},
{
"name": "F#",
"bytes": "421"
},
{
"name": "PowerShell",
"bytes": "801"
},
{
"name": "Shell",
"bytes": "8169"
},
{
"name": "Visual Basic",
"bytes": "58165340"
}
],
"symlink_target": ""
} |
<div ng-controller="CrmCxnLinkDialogCtrl">
<iframe crm-ui-iframe crm-ui-iframe-src="model.url"></iframe>
</div>
| {
"content_hash": "92ca0c59356c1260f67d543079b3df25",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 63,
"avg_line_length": 38,
"alnum_prop": 0.7280701754385965,
"repo_name": "Hack4Eugene/Hack4Cause2016",
"id": "ad65640950a97fe901927fcaae6cfeaf711af01f",
"size": "114",
"binary": false,
"copies": "26",
"ref": "refs/heads/master",
"path": "src/cosmodojo/files/sites/all/modules/contrib/civicrm/ang/crmCxn/LinkDialogCtrl.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "8454"
},
{
"name": "Batchfile",
"bytes": "551"
},
{
"name": "C++",
"bytes": "138803"
},
{
"name": "CSS",
"bytes": "1327331"
},
{
"name": "HTML",
"bytes": "844470"
},
{
"name": "Java",
"bytes": "20354"
},
{
"name": "JavaScript",
"bytes": "3894404"
},
{
"name": "Makefile",
"bytes": "843"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Objective-C",
"bytes": "5003"
},
{
"name": "PHP",
"bytes": "45753869"
},
{
"name": "PostScript",
"bytes": "320927"
},
{
"name": "Python",
"bytes": "66551"
},
{
"name": "Ruby",
"bytes": "3056"
},
{
"name": "Shell",
"bytes": "43976"
},
{
"name": "Smarty",
"bytes": "5666163"
},
{
"name": "SourcePawn",
"bytes": "472616"
},
{
"name": "XSLT",
"bytes": "2135"
}
],
"symlink_target": ""
} |
"""
Python library for interacting with the T1 API. Uses third-party module Requests
(http://docs.python-requests.org/en/latest/) to get and post data, and ElementTree
to parse it.
"""
from __future__ import absolute_import
from .utils import filters
from .service import T1, T1Service
from . import errors
from .metadata import (__author__, __copyright__, __license__, __version__,
__maintainer__, __email__, __status__)
__all__ = ['T1', 'T1Service', 'filters', 'errors']
| {
"content_hash": "a5cffeb7cf0728dc505294580b74753e",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 82,
"avg_line_length": 35.57142857142857,
"alnum_prop": 0.6626506024096386,
"repo_name": "Cawb07/t1-python",
"id": "d3291d90c19ba42f76c708ed8f495be930f0e82b",
"size": "522",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "terminalone/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "177703"
}
],
"symlink_target": ""
} |
package com.datasaver.www.apis.services;
import com.datasaver.www.apis.APIAdapter;
import com.datasaver.www.apis.services.interfaces.UserServiceInterface;
/**
* UserService class.
*
* @author devetude
*/
public class UserService {
/**
* Methods to build.
*
* @return
*/
public static UserServiceInterface build() {
return (UserServiceInterface) APIAdapter.build(UserServiceInterface.class);
}
} | {
"content_hash": "c30adc8b3c51fd266b73f38087d41cae",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 83,
"avg_line_length": 22,
"alnum_prop": 0.6977272727272728,
"repo_name": "DataSaver-Dev/DataSaver-Android",
"id": "b934d5a5c6fbdd1d085c651fc15af4fb176a91c6",
"size": "440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/datasaver/www/apis/services/UserService.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "243275"
}
],
"symlink_target": ""
} |
<div class="commune_descr limited">
<p>
Foncine-le-Haut est
une commune localisée dans le département de Jura en Franche-Comté. Elle totalisait 1 014 habitants en 2008.</p>
<p>À Foncine-le-Haut, la valorisation moyenne à la vente d'un appartement se situe à 634 € du m² en vente. Le prix moyen d'une maison à l'achat se situe à 2 950 € du m². À la location la valeur moyenne se situe à 9,93 € du m² mensuel.</p>
<p>Le parc de logements, à Foncine-le-Haut, se décomposait en 2011 en 208 appartements et 502 maisons soit
un marché plutôt équilibré.</p>
<p>À proximité de Foncine-le-Haut sont positionnées géographiquement les villes de
<a href="{{VLROOT}}/immobilier/arsure-arsurette_39020/">Arsure-Arsurette</a> située à 6 km, 91 habitants,
<a href="{{VLROOT}}/immobilier/chaux-neuve_25142/">Chaux-Neuve</a> à 5 km, 253 habitants,
<a href="{{VLROOT}}/immobilier/bief-des-maisons_39052/">Bief-des-Maisons</a> à 5 km, 71 habitants,
<a href="{{VLROOT}}/immobilier/chapelle-des-bois_25121/">Chapelle-des-Bois</a> localisée à 7 km, 278 habitants,
<a href="{{VLROOT}}/immobilier/chalesmes_39091/">Les Chalesmes</a> située à 4 km, 83 habitants,
<a href="{{VLROOT}}/immobilier/petite-chaux_25451/">Petite-Chaux</a> localisée à 7 km, 129 habitants,
entre autres. De plus, Foncine-le-Haut est située à seulement 15 km de <a href="{{VLROOT}}/immobilier/champagnole_39097/">Champagnole</a>.</p>
<p>La commune compte de nombreux aménagements, elle dispose, entre autres, de un terrain de tennis, deux domaines skiables, un centre d'équitation et deux boucles de randonnée.</p>
</div>
| {
"content_hash": "cb06bd2c8bb9f8cbe0b52e9a7033a25e",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 258,
"avg_line_length": 96.82352941176471,
"alnum_prop": 0.7430133657351154,
"repo_name": "donaldinou/frontend",
"id": "11778ba165fb51ae9c4b3414a9b7a84ac7bfa423",
"size": "1684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Viteloge/CoreBundle/Resources/descriptions/39228.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3073"
},
{
"name": "CSS",
"bytes": "111338"
},
{
"name": "HTML",
"bytes": "58634405"
},
{
"name": "JavaScript",
"bytes": "88564"
},
{
"name": "PHP",
"bytes": "841919"
}
],
"symlink_target": ""
} |
/**
* This code was auto-generated by a Codezu.
*
* Changes to this file may cause incorrect behavior and will be lost if
* the code is regenerated.
*/
package com.mozu.api.contracts.sitesettings.shipping;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.joda.time.DateTime;
import com.mozu.api.contracts.core.AuditInfo;
import com.mozu.api.contracts.sitesettings.shipping.SiteShippingHandlingFee;
import com.mozu.api.contracts.sitesettings.shipping.SiteShippingSignatureRequirement;
/**
* Properties of the shipping settings configured for an individual site.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class SiteShippingSettings implements Serializable
{
// Default Serial Version UID
private static final long serialVersionUID = 1L;
/**
* Identifier and datetime stamp information recorded when a user or application creates, updates, or deletes a resource entity. This value is system-supplied and read-only.
*/
protected AuditInfo auditInfo;
public AuditInfo getAuditInfo() {
return this.auditInfo;
}
public void setAuditInfo(AuditInfo auditInfo) {
this.auditInfo = auditInfo;
}
/**
* The combined price for all items in the order, including all selected options but excluding any discounts.
*/
protected SiteShippingHandlingFee orderHandlingFee;
public SiteShippingHandlingFee getOrderHandlingFee() {
return this.orderHandlingFee;
}
public void setOrderHandlingFee(SiteShippingHandlingFee orderHandlingFee) {
this.orderHandlingFee = orderHandlingFee;
}
/**
* Properties of the package signature requirements for the site.
*/
protected SiteShippingSignatureRequirement signatureRequirement;
public SiteShippingSignatureRequirement getSignatureRequirement() {
return this.signatureRequirement;
}
public void setSignatureRequirement(SiteShippingSignatureRequirement signatureRequirement) {
this.signatureRequirement = signatureRequirement;
}
}
| {
"content_hash": "0359282889bde7e007f0c7706f162793",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 174,
"avg_line_length": 31.828125,
"alnum_prop": 0.7756504663721159,
"repo_name": "johngatti/mozu-java",
"id": "0641ef8adf8d9b4ea53473a8d31b4780cb7679f0",
"size": "2037",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/contracts/sitesettings/shipping/SiteShippingSettings.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "102"
},
{
"name": "Java",
"bytes": "10444263"
}
],
"symlink_target": ""
} |
package de.otto.roboapp.model;
public enum Gadget {
SUPER_SPEED
}
| {
"content_hash": "000f776eb994c1e82836cfa4d84cc68b",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 30,
"avg_line_length": 14.2,
"alnum_prop": 0.7183098591549296,
"repo_name": "phllipo/RoboRace",
"id": "bd8b8e0a8bbacb177256c44b4bda409cfb0c2d7d",
"size": "71",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "roboapp/app/src/main/java/de/otto/roboapp/model/Gadget.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1599"
},
{
"name": "HTML",
"bytes": "2267"
},
{
"name": "Java",
"bytes": "98901"
},
{
"name": "JavaScript",
"bytes": "24897"
},
{
"name": "Shell",
"bytes": "1376"
}
],
"symlink_target": ""
} |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ConvayerInItemInteraction : Interaction
{
public GameColor color;
private ItemInteraction holding;
[SerializeField]
public MachineItemInteraction machine;
[SerializeField]
public GameObject vanishPoint;
void Start()
{
}
public override void Highlight(GameObject player)
{
}
public override void Unhighlight(GameObject player)
{
}
void Update()
{
if (holding)
{
if ((holding.transform.position - vanishPoint.transform.position).magnitude < 0.5f)
{
if (machine.ReadyToProcess())
if (machine.StartProcessingItem(holding))
{
holding.MarkAsHeldBy(machine.gameObject);
holding.transform.parent = machine.gameObject.transform;
holding.transform.position = machine.gameObject.transform.position;
holding = null;
}
else
{
Debug.LogWarning("i have been unable to start processing");
}
}
else
holding.transform.position = Vector3.Lerp(holding.transform.position, vanishPoint.transform.position, 0.1f);
}
}
public override bool CanInteractWith(CharacterItemInteraction player, ItemInteraction item)
{
return (holding == null && player != null && player.color == color && item != null && !item.hasColor(color));
}
public bool ReceiveItem(CharacterItemInteraction playerItemInteraction, ItemInteraction itemInteraction)
{
if (!holding)
{
holding = itemInteraction;
holding.MarkAsHeldBy(gameObject);
holding.transform.parent = transform;
holding.transform.position = transform.position;
return true;
}
return false;
}
}
| {
"content_hash": "bbaf43b0a495015e388276ee30064529",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 124,
"avg_line_length": 29.681159420289855,
"alnum_prop": 0.5810546875,
"repo_name": "Goat-Improvement-Suite/The-Chinese-Room",
"id": "88ce865a03ea32e4e97108fb079c21af3c6b1763",
"size": "2050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Assets/Scripts/ConvayerInItemInteraction.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "48454"
}
],
"symlink_target": ""
} |
@interface CollectionViewController ()<UICollectionViewDataSource>
@property(nonatomic, strong) YiRefreshHeader *refreshHeader;
@property(nonatomic, strong) YiRefreshFooter *refreshFooter;
@property(nonatomic, assign) NSInteger total;
@end
@implementation CollectionViewController
@synthesize refreshHeader,refreshFooter,total;
#pragma mark - Lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
if ([[[UIDevice currentDevice]systemVersion] floatValue] >= 7.0) {
self.edgesForExtendedLayout = UIRectEdgeBottom | UIRectEdgeLeft | UIRectEdgeRight;
}
self.view.backgroundColor=[UIColor whiteColor];
self.title=@"collectionView刷新演示";
self.view.backgroundColor=[UIColor whiteColor];
total=0;
UICollectionViewFlowLayout *layout=[[UICollectionViewFlowLayout alloc] init];
if ([[UIScreen mainScreen] bounds].size.width>320) {
layout.itemSize=CGSizeMake(100, 100);
}else{
layout.itemSize=CGSizeMake(80, 80);
}
[layout setScrollDirection:UICollectionViewScrollDirectionVertical];
layout.sectionInset = UIEdgeInsetsMake(10, 10, 10, 10);
UICollectionView *collectionView=[[UICollectionView alloc] initWithFrame:CGRectMake(0, 0, [[UIScreen mainScreen] bounds].size.width, [[UIScreen mainScreen] bounds].size.height-64) collectionViewLayout:layout];
[collectionView registerClass:[UICollectionViewCell class] forCellWithReuseIdentifier:@"cell"];
collectionView.backgroundColor=[UIColor clearColor];
collectionView.showsVerticalScrollIndicator=NO;
[collectionView setUserInteractionEnabled:YES];
collectionView.dataSource=self;
[self.view addSubview:collectionView];
// YiRefreshHeader 头部刷新按钮的使用
refreshHeader=[[YiRefreshHeader alloc] init];
refreshHeader.scrollView=collectionView;
[refreshHeader header];
typeof(self) __weak weakSelf = self;
refreshHeader.beginRefreshingBlock=^(){
// 后台执行:
dispatch_async(dispatch_get_global_queue(0, 0), ^{
sleep(2);
dispatch_async(dispatch_get_main_queue(), ^{
typeof(weakSelf) __strong strongSelf = weakSelf;
// 主线程刷新视图
strongSelf.total=17;
[collectionView reloadData];
[strongSelf.refreshHeader endRefreshing];
});
});
};
[refreshHeader beginRefreshing];
// YiRefreshFooter 底部刷新按钮的使用
refreshFooter=[[YiRefreshFooter alloc] init];
refreshFooter.scrollView=collectionView;
[refreshFooter footer];
refreshFooter.beginRefreshingBlock=^(){
// 后台执行:
dispatch_async(dispatch_get_global_queue(0, 0), ^{
sleep(2);
dispatch_async(dispatch_get_main_queue(), ^{
typeof(weakSelf) __strong strongSelf = weakSelf;
// 主线程刷新视图
[collectionView reloadData];
[strongSelf.refreshFooter endRefreshing];
});
});
};
}
#pragma mark - UICollectionViewDataSource
-(NSInteger)collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section
{
return total;
}
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath
{
UICollectionViewCell *cell=[collectionView dequeueReusableCellWithReuseIdentifier:@"cell" forIndexPath:indexPath];
if (indexPath.row%3==0) {
cell.backgroundColor=[UIColor colorWithRed:0.24f green:0.72f blue:0.17f alpha:1.00f];
}else if (indexPath.row%3==1){
cell.backgroundColor=[UIColor colorWithRed:0.22f green:0.50f blue:0.78f alpha:1.00f];
}else if (indexPath.row%3==2){
cell.backgroundColor=[UIColor colorWithRed:0.00f green:0.38f blue:0.77f alpha:1.00f];
}
return cell;
}
@end
| {
"content_hash": "81f0b602d5baf1369ba22dd9baf20e84",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 213,
"avg_line_length": 36.5188679245283,
"alnum_prop": 0.6915525703952468,
"repo_name": "coderyi/YiRefresh",
"id": "c9d51fa7531f87045b30f367b0d7a933ca9fb42b",
"size": "4201",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "YiRefreshDemo/CollectionViewController.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "190"
},
{
"name": "Objective-C",
"bytes": "28501"
},
{
"name": "Ruby",
"bytes": "541"
}
],
"symlink_target": ""
} |
using System;
using System.Security.AccessControl;
using Xunit;
namespace Microsoft.Win32.RegistryTests
{
public class RegistryKey_OpenSubKey_str_rkpc : RegistryKeyOpenSubKeyTestsBase
{
[Fact]
public void NegativeTests()
{
// Should throw if passed subkey name is null
Assert.Throws<ArgumentNullException>(() => TestRegistryKey.OpenSubKey(name: null, rights: RegistryRights.ReadKey));
// Should throw if subkey name greater than 255 chars
Assert.Throws<ArgumentException>(() => TestRegistryKey.OpenSubKey(new string('a', 256), RegistryRights.FullControl));
// OpenSubKey should be read only
const string name = "FooBar";
TestRegistryKey.SetValue(name, 42);
TestRegistryKey.CreateSubKey(name);
using (var rk = Registry.CurrentUser.OpenSubKey(name: TestRegistryKeyName, rights: RegistryRights.ReadKey))
{
Assert.Throws<UnauthorizedAccessException>(() => rk.CreateSubKey(name));
Assert.Throws<UnauthorizedAccessException>(() => rk.SetValue(name, "String"));
Assert.Throws<UnauthorizedAccessException>(() => rk.DeleteValue(name));
Assert.Throws<UnauthorizedAccessException>(() => rk.DeleteSubKey(name));
Assert.Throws<UnauthorizedAccessException>(() => rk.DeleteSubKeyTree(name));
}
// Should throw if RegistryKey closed
Assert.Throws<ObjectDisposedException>(() =>
{
TestRegistryKey.Dispose();
TestRegistryKey.OpenSubKey(TestRegistryKeyName, RegistryRights.Delete);
});
}
[Fact]
public void OpenSubKeyTest()
{
// [] Vanilla; open a subkey in read/write mode and write to it
const string valueName = "FooBar";
const string expectedValue = "BLAH";
using (var rk = TestRegistryKey.OpenSubKey("", RegistryRights.SetValue | RegistryRights.QueryValues))
{
rk.SetValue(valueName, expectedValue);
Assert.Equal(expectedValue, rk.GetValue(valueName));
}
using (var rk = TestRegistryKey.OpenSubKey("", RegistryRights.CreateSubKey))
{
rk.CreateSubKey(valueName);
Assert.NotNull(rk.OpenSubKey(valueName));
}
}
private const RegistryRights Writable = RegistryRights.ReadKey | RegistryRights.WriteKey;
private const RegistryRights NonWritable = RegistryRights.ReadKey;
[Theory]
[MemberData(nameof(TestRegistrySubKeyNames))]
public void OpenSubKey_Writable_KeyExists_OpensWithFixedUpName(string expected, string subKeyName) =>
Verify_OpenSubKey_KeyExists_OpensWithFixedUpName(expected, () => TestRegistryKey.OpenSubKey(subKeyName, Writable));
[Theory]
[MemberData(nameof(TestRegistrySubKeyNames))]
public void OpenSubKey_NonWritable_KeyExists_OpensWithFixedUpName(string expected, string subKeyName) =>
Verify_OpenSubKey_KeyExists_OpensWithFixedUpName(expected, () => TestRegistryKey.OpenSubKey(subKeyName, NonWritable));
[Theory]
[MemberData(nameof(TestRegistrySubKeyNames))]
public void OpenSubKey_Writable_KeyDoesNotExist_ReturnsNull(string expected, string subKeyName) =>
Verify_OpenSubKey_KeyDoesNotExist_ReturnsNull(expected, () => TestRegistryKey.OpenSubKey(subKeyName, Writable));
[Theory]
[MemberData(nameof(TestRegistrySubKeyNames))]
public void OpenSubKey_NonWritable_KeyDoesNotExist_ReturnsNull(string expected, string subKeyName) =>
Verify_OpenSubKey_KeyDoesNotExist_ReturnsNull(expected, () => TestRegistryKey.OpenSubKey(subKeyName, NonWritable));
}
}
| {
"content_hash": "5904a4239e650383bc8f68a8fb0a7091",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 130,
"avg_line_length": 47.629629629629626,
"alnum_prop": 0.6565578019699326,
"repo_name": "alphonsekurian/corefx",
"id": "357a28bc090ae2937b4c0fa79f1bf68b6d08ce79",
"size": "4062",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Microsoft.Win32.Registry/tests/RegistryKey/RegistryKey_OpenSubKey_str_rkpc.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "1C Enterprise",
"bytes": "903"
},
{
"name": "ASP",
"bytes": "1687"
},
{
"name": "Batchfile",
"bytes": "14735"
},
{
"name": "C",
"bytes": "1110470"
},
{
"name": "C#",
"bytes": "99304584"
},
{
"name": "C++",
"bytes": "376749"
},
{
"name": "CMake",
"bytes": "50826"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "Groff",
"bytes": "4236"
},
{
"name": "Groovy",
"bytes": "22692"
},
{
"name": "HTML",
"bytes": "653"
},
{
"name": "Makefile",
"bytes": "9085"
},
{
"name": "Objective-C",
"bytes": "9335"
},
{
"name": "Perl",
"bytes": "3895"
},
{
"name": "PowerShell",
"bytes": "50639"
},
{
"name": "Python",
"bytes": "1535"
},
{
"name": "Shell",
"bytes": "50933"
},
{
"name": "Visual Basic",
"bytes": "829986"
}
],
"symlink_target": ""
} |
/* Build 'calendar year' dimension table. */
INSERT INTO #dim
SELECT DISTINCT
cp.person_id,
YEAR(cp.cohort_start_date) AS covariate_id,
1 AS covariate_count
FROM #cohort_person cp
;
| {
"content_hash": "33a96d863f1669db1eb882ca9e4c8b66",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 47,
"avg_line_length": 24.25,
"alnum_prop": 0.7061855670103093,
"repo_name": "ApproximateIdentity/hdps",
"id": "5a310fae381e060547212dc143bd91409e2ed4ab",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "inst/sql/dimensions/required/calendaryear.sql",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "187"
},
{
"name": "R",
"bytes": "32916"
}
],
"symlink_target": ""
} |
<Record>
<Term>Labetalol</Term>
<SemanticType>Pharmacologic Substance</SemanticType>
<SemanticType>Organic Chemical</SemanticType>
<PrimarySemanticType>Pharmacologic Substance</PrimarySemanticType>
<Synonym>Labetalol hydrochloride</Synonym>
<Synonym>Trandate</Synonym>
<Synonym>Labetalol hydrocloride</Synonym>
<Synonym>Labetalol</Synonym>
<Synonym>Normodyne</Synonym>
<Source>FDA Registry</Source>
</Record>
| {
"content_hash": "e5c32cb1adabe7aab0ff014bca0fec57",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 66,
"avg_line_length": 35.083333333333336,
"alnum_prop": 0.7980997624703088,
"repo_name": "detnavillus/modular-informatic-designs",
"id": "549cc9ff6004fc08c339b90b5972360a0c4e85c7",
"size": "421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pipeline/src/test/resources/thesaurus/fdarecords/labetalol.xml",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2069134"
}
],
"symlink_target": ""
} |
/* Generated automatically. DO NOT EDIT! */
#define SIMD_HEADER "simd-avx.h"
#include "../common/t1fuv_10.c"
| {
"content_hash": "163cc7a975e23670557734445ce385e0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 44,
"avg_line_length": 36.666666666666664,
"alnum_prop": 0.6909090909090909,
"repo_name": "jjh13/poisson",
"id": "80a4b06eb9d034c7e806735d1022d7526c0cb629",
"size": "110",
"binary": false,
"copies": "30",
"ref": "refs/heads/master",
"path": "lib/fftw-3.3.3/dft/simd/avx/t1fuv_10.c",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "9047157"
},
{
"name": "C++",
"bytes": "1709864"
},
{
"name": "CMake",
"bytes": "9194"
},
{
"name": "FORTRAN",
"bytes": "165275"
},
{
"name": "Groff",
"bytes": "17006"
},
{
"name": "HTML",
"bytes": "824679"
},
{
"name": "Makefile",
"bytes": "973860"
},
{
"name": "OCaml",
"bytes": "254027"
},
{
"name": "Perl",
"bytes": "51870"
},
{
"name": "Shell",
"bytes": "724407"
},
{
"name": "Standard ML",
"bytes": "1219"
},
{
"name": "TeX",
"bytes": "320197"
}
],
"symlink_target": ""
} |
package vrf
import (
"bytes"
"crypto"
"encoding/binary"
)
// A VRF is a pseudorandom function f_k from a secret key k, such that that
// knowledge of k not only enables one to evaluate f_k at for any message m,
// but also to provide an NP-proof that the value f_k(m) is indeed correct
// without compromising the unpredictability of f_k for any m' != m.
// http://ieeexplore.ieee.org/stamp/stamp.jsp?tp=&arnumber=814584
// PrivateKey supports evaluating the VRF function.
type PrivateKey interface {
// Evaluate returns the output of H(f_k(m)) and its proof.
Evaluate(m []byte) (index [32]byte, proof []byte)
// Public returns the corresponding public key.
Public() crypto.PublicKey
}
// PublicKey supports verifying output from the VRF function.
type PublicKey interface {
// ProofToHash verifies the NP-proof supplied by Proof and outputs Index.
ProofToHash(m, proof []byte) (index [32]byte, err error)
}
// UniqueID computes a unique string for a domain, userID and appID combo.
func UniqueID(userID, appID string) []byte {
b := new(bytes.Buffer)
binary.Write(b, binary.BigEndian, uint32(len(userID)))
b.WriteString(userID)
binary.Write(b, binary.BigEndian, uint32(len(appID)))
b.WriteString(appID)
return b.Bytes()
}
| {
"content_hash": "d28ee51cc809a3831967a532d0c174de",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 76,
"avg_line_length": 33.567567567567565,
"alnum_prop": 0.7359098228663447,
"repo_name": "alsophian/keytransparency",
"id": "437b64c0d17ea8c4135f4866f2ff8c59726c36cb",
"size": "1922",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "core/crypto/vrf/vrf.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "353884"
},
{
"name": "Makefile",
"bytes": "1663"
},
{
"name": "Protocol Buffer",
"bytes": "18204"
},
{
"name": "Shell",
"bytes": "12383"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.addthis.hydra.data.query.engine;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import com.addthis.basis.util.Parameter;
import com.google.common.base.Objects;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class implements an LRU cache to keep our QueryEngines. It is instantiated only from MeshQuerySource.
* <p/>
* It uses guava's cache loader to do most of the work. We periodically check to see if new data is available for
* a job, and if so, asynchronously prepare the new database before swapping it in. Multiple get or refresh attempts
* will block and wait on the existing one to finish.
* <p/>
* As per guava's specs, it is not guaranteed that we will wait until we are at maximum capacity to evict engines.
* Also, we are okay with evicting non-idle engines, but we do not force them to close. Rather, we set a flag and
* trust the query using them to close it when it is finished. This means we may have a number of engines open equal
* to the cache capacity + number of running queries. It is also possible for a few engines to be transiently open
* while waiting for the eviction listener to close engines. This is somewhat balanced by guava's more aggressive
* eviction policy, but in general we should not rely on the capacity as being an absolute hard max. In practice, it
* should be more than sufficient though.
* <p/>
* Basic flow is :
* Constructed from MQSource
* MQSource calls getAndLease()
* See if we have a suitable engine
* If so, return it, if not, make one and return it
*/
public class QueryEngineCache {
private static final Logger log = LoggerFactory.getLogger(QueryEngineCache.class);
/**
* 'soft cap' on the number of engines to have open. this + concurrent queries +/- a few should closely
* resemble the real cap on open engines
*/
private static final long DEFAULT_ENGINE_CACHE_SIZE = Parameter.longValue("queryEngineCache.engineCacheSize", 5);
/**
* seconds to let an engine be in cache before attempting to refresh it. Refreshing it means checking whether
* or not the job has finished running and has a new data directory; it does not force the reopening of the same
* directory. It is important to note that this scheduled refresh is not checked unless a get is called on it,
* and that even if the refresh returns the old engine, it resets the fail timer.
*/
private static final long DEFAULT_REFRESH_INTERVAL = Parameter.longValue("queryEngineCache.refreshInterval", 2 * 60);
/**
* seconds in between cache malongenance runs. This helps query sources and jobs in lower throughput environments.
* It does the guava api clean up method which handles any pending expiration events, and also attempts to provoke
* refresh attempts on cached keys by calling get on them. The latter is more important for our purposes. Without it,
* relatively idle engines would become stale or subject to undesired eviction by the fail longerval. 0 disables it.
*/
private static final long DEFAULT_MAINTENANCE_INTERVAL = Parameter.longValue("queryEngineCache.maintenanceInterval", 20 * 60);
/**
* seconds to let an engine be in cache after the most recent write. This is longended only for situations
* where re-opening that engine is failing, and thus while the refresh is not occuring. it might appear that
* an engine is alive and up to date and this attempts to limit that disparity if desired. Note that by failing,
* we mean that the refresh method is throwing exceptions.
*/
private static final long DEFAULT_FAIL_INTERVAL = Parameter.longValue("queryEngineCache.failInterval", 70 * 60);
/**
* thread pool for cache maintenance runs. Should only need one thread.
*/
private final ScheduledExecutorService queryEngineCacheMaintainer = MoreExecutors
.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder().setNameFormat("queryEngineCacheMaintainer=%d").build()));
/**
* The {@link LoadingCache} that provides the backing data structure for this class.
* Acts like an intelligent semi-persistent Map that has logic for loading and reloading complex objects.
*/
protected final LoadingCache<String, QueryEngine> loadingEngineCache;
private final long engineCacheSize;
private final long refreshInterval;
private final long failInterval;
private final long maintenanceInterval;
/**
* Initialize a {@link LoadingCache} that is capable of loading and reloading
* {@link QueryEngine}s. Reloads occur asynchronously to prevent blocking operations
* during unrelated calls to the cache. When reload is called the current engine will be compared with the
* newest available data directory. If the current engine is up to date it will be returned, otherwise a new
* engine will be opened to replace the current engine with the latest available.
* <p/>
* On removal, we have a listener that will call closeWhenIdle on engines. It has a guard against removal events
* generated by refreshes where we decide to keep the existing engine (no new job data is available). There is a
* race condition where that test can be passed more than once so any clean up done there must be okay with that.
* The race condition is such that the test will always be passed at least once, and never when the engine is still
* available to new get calls. This meets our requirements.
*/
public QueryEngineCache() {
this(DEFAULT_ENGINE_CACHE_SIZE, DEFAULT_REFRESH_INTERVAL, DEFAULT_FAIL_INTERVAL, DEFAULT_MAINTENANCE_INTERVAL);
}
public QueryEngineCache(long engineCacheSize, long refreshInterval, long failInterval, long maintenanceInterval) {
this(engineCacheSize, refreshInterval, failInterval, maintenanceInterval, new EngineLoader());
}
public QueryEngineCache(long engineCacheSize, long refreshInterval, long failInterval, long maintenanceInterval,
EngineLoader engineLoader) {
this.engineCacheSize = engineCacheSize;
this.refreshInterval = refreshInterval;
this.failInterval = failInterval;
this.maintenanceInterval = maintenanceInterval;
log.info("Initializing QueryEngineCache: {}", this); //using 'this' is just more efficient
// no easy way around escaping 'this' here, but at least it is more obvious what is going on now
loadingEngineCache = CacheBuilder.newBuilder()
.maximumWeight(engineCacheSize * 100)
.<String, QueryEngine>weigher(
(dir, engine) -> (int) (100 * engine.getTree().getAdvancedSettings().cacheWeight()))
.refreshAfterWrite(refreshInterval, TimeUnit.SECONDS)
.expireAfterWrite(failInterval, TimeUnit.SECONDS)
.removalListener(new EngineRemovalListener(this))
.build(engineLoader);
//schedule maintenance runs
maybeInitMaintenance();
}
/**
* schedules maintenance for the cache using the maintenanceInterval parameter. Values less than 1
* are treated as 'do not do maintenance'. Maintenance includes cache loader cleanUp() and an attempt
* to trigger refreshes in relatively idle engines. This is done by the thread safe iterator from
* the loading cache and performing getIfPresent calls on each entry. This will only trigger refreshes
* if the refresh interval has passed, and avoids a potential race condition where doing refresh() could
* end up re-loading an engine that was just evicted. This is important because in addition to being
* incorrect cache behavior, refresh will block instead of being asynchronous while doing so -- possibly
* leading to even more race conditions.
* <p/>
* since the thread safe iterator is weakly consistent, it is a good idea to configure the intervals so
* that maintenance will be performed more than once before the fail interval occurs (if we do not desire
* to evict and close 'relatively idle' engines). eg. maintenanceInterval * 2 < failInterval
* <p/>
* unfortunately, this somewhat confuses the eviction order heuristic because it considers these all to be
* valid r/ws. This is one reason to keep this value relatively long. It is possible to optimize against this
* somewhat, but probably at the cost of greatly increased complexity. It seems unlikely that it will have a
* large impact if performed infrequently enough though, especially since the evictor is not a simple LRU.
*/
private void maybeInitMaintenance() {
if (maintenanceInterval > 0) {
queryEngineCacheMaintainer.scheduleAtFixedRate(() -> {
loadingEngineCache.cleanUp();
loadingEngineCache.asMap().keySet().forEach(loadingEngineCache::getIfPresent);
}, maintenanceInterval, maintenanceInterval, TimeUnit.SECONDS);
}
}
/**
* Takes an unresolved (usually the gold path) path to a bdb query directory. This is mostly a thin
* layer between this class and the backing LoadingCache.
* <p/>
* Most importantly, it also attempts to lease the engine. This is because there is a rare race condition
* where after acquiring the engine, but before leasing it ourselves, it is evicted from the cache. Probably
* caused by refresh, since it is less likely that an engine we just acquired would be the target of size
* eviction in most cases. It is relatively unlikely to happen even twice in a row, but we try three times
* here anyway. I have never seen this exception but if we start to see it a lot, we can re-evaluate this approach.
*
* @param directoryPath The path of the engine directory
* @return a QueryEngine from the cache or constructed on demand (constructing blocks this thread)
* @throws Exception - any problem while getting the engine. Likely either an issue with leasing or with opening an engine
*/
public QueryEngine getAndLease(String directoryPath) throws Exception {
for (int i = 0; i < 3; i++) {
QueryEngine qe = loadingEngineCache.get(directoryPath);
if (qe.lease()) {
return qe;
}
}
log.warn("Tried three times but unable to get lease for engine with path: {}", directoryPath);
throw new RuntimeException("Can't lease engine");
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("engineCacheSize", engineCacheSize)
.add("refreshInterval", refreshInterval)
.add("maintenanceInterval", maintenanceInterval)
.add("failInterval", failInterval)
.toString();
}
}
| {
"content_hash": "adc024dcbfaacae34001cd8e0682b7a5",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 130,
"avg_line_length": 55.60849056603774,
"alnum_prop": 0.7211807617270337,
"repo_name": "mythguided/hydra",
"id": "12e6ca02fa41c099d8300355623fa6ff8cc95be5",
"size": "11789",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hydra-data/src/main/java/com/addthis/hydra/data/query/engine/QueryEngineCache.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37939"
},
{
"name": "HTML",
"bytes": "103283"
},
{
"name": "Java",
"bytes": "4732762"
},
{
"name": "JavaScript",
"bytes": "582863"
},
{
"name": "Shell",
"bytes": "13923"
}
],
"symlink_target": ""
} |
print "How old are you? "
age = gets.chomp.to_i
print "How tall are you? "
height = gets.chomp.to_f
print "how much do you weight? "
weight = gets.chomp.to_f
puts "So, you're #{age} old, #{height} tall and #{weight} heavy."
| {
"content_hash": "c59dfbe862f6e1ad0f56f6f5e7a06a60",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 65,
"avg_line_length": 28.125,
"alnum_prop": 0.6666666666666666,
"repo_name": "Fazendaaa/ruby",
"id": "b2ef8b9b094b9dd7b35ca8146c8215da59472d6d",
"size": "225",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "the_hard_way/ex11.rb",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "437"
},
{
"name": "Ruby",
"bytes": "35152"
}
],
"symlink_target": ""
} |
using System;
using Gtk;
using Mono.Unix;
using Hyena;
using Hyena.Jobs;
using Banshee.ServiceStack;
namespace Banshee.Gui.Dialogs
{
public class ConfirmShutdownDialog : ErrorListDialog
{
private Scheduler scheduler;
public ConfirmShutdownDialog ()
{
ListView.Model = new ListStore (typeof (string), typeof (Job));
ListView.AppendColumn ("Error", new CellRendererText (), "text", 0);
ListView.HeadersVisible = false;
Header = Catalog.GetString ("Important tasks are running");
Title = "";
Message = Catalog.GetString (
"Closing Banshee now will cancel any currently running tasks. They cannot " +
"be resumed automatically the next time Banshee is run.");
DialogIconNameStock = Stock.DialogWarning;
DefaultResponse = ResponseType.Cancel;
AddButton (Catalog.GetString ("Quit Anyway"), ResponseType.Ok, false);
AddButton (Catalog.GetString ("Continue Running"), ResponseType.Cancel, true);
scheduler = ServiceManager.JobScheduler;
foreach (Job job in scheduler.Jobs) {
AddJob (job);
}
scheduler.JobAdded += AddJob;
scheduler.JobRemoved += RemoveJob;
}
public void AddString (string message)
{
(ListView.Model as ListStore).AppendValues (message, null);
}
private void AddJob (Job job)
{
if (job.Has (PriorityHints.DataLossIfStopped)) {
ThreadAssist.ProxyToMain (delegate {
TreeIter iter = (ListView.Model as ListStore).Prepend ();
(ListView.Model as ListStore).SetValue (iter, 0, job.Title);
(ListView.Model as ListStore).SetValue (iter, 1, job);
});
}
}
private void RemoveJob (Job job)
{
if (!scheduler.HasAnyDataLossJobs) {
Respond (Gtk.ResponseType.Ok);
return;
}
if (ListView == null || ListView.Model == null) {
return;
}
for (int i = 0, n = ListView.Model.IterNChildren (); i < n; i++) {
TreeIter iter;
if (!ListView.Model.IterNthChild (out iter, i)) {
break;
}
if (ListView.Model.GetValue (iter, 1) == job) {
ThreadAssist.ProxyToMain (delegate {
(ListView.Model as ListStore).Remove (ref iter);
});
break;
}
}
}
}
}
| {
"content_hash": "33e5b62be3638376b7e5626a4a9c5ceb",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 93,
"avg_line_length": 31.558139534883722,
"alnum_prop": 0.5272660280029476,
"repo_name": "mono-soc-2011/banshee",
"id": "f418f9f2e043f436b41ea790847db63d4dfffb3b",
"size": "3927",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "src/Core/Banshee.ThickClient/Banshee.Gui.Dialogs/ConfirmShutdownDialog.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Boo",
"bytes": "3305"
},
{
"name": "C",
"bytes": "228976"
},
{
"name": "C#",
"bytes": "5553144"
},
{
"name": "JavaScript",
"bytes": "3579"
},
{
"name": "Perl",
"bytes": "10281"
},
{
"name": "Python",
"bytes": "8488"
},
{
"name": "R",
"bytes": "3362"
},
{
"name": "Shell",
"bytes": "67410"
}
],
"symlink_target": ""
} |
@interface CellJJFriend()
@property (nonatomic, strong) UIImageView *imgHead;
@property (nonatomic, strong) UILabel *labTitle;
@end;
@implementation CellJJFriend
- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier
{
self = [super initWithStyle:style reuseIdentifier:reuseIdentifier];
if (self)
{
[self initSubView];
}
return self;
}
- (void)awakeFromNib {
[super awakeFromNib];
// Initialization code
}
- (void)setSelected:(BOOL)selected animated:(BOOL)animated {
// [super setSelected:selected animated:animated];
// Configure the view for the selected state
}
- (void)initSubView
{
WS(ws);
_imgHead = [[UIImageView alloc] init];
[self.contentView addSubview:_imgHead];
[_imgHead mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.mas_equalTo(5);
make.centerY.mas_equalTo(0);
make.width.mas_equalTo(50);
make.height.mas_equalTo(50);
}];
_labTitle = [[UILabel alloc] init];
[self.contentView addSubview:_labTitle];
[_labTitle mas_makeConstraints:^(MASConstraintMaker *make) {
make.left.mas_equalTo(ws.imgHead.mas_right).offset(5);
make.centerY.mas_equalTo(0);
make.right.mas_equalTo(-10);
make.height.mas_equalTo(20);
}];
}
- (void)setModel:(JJModelFriend *)model
{
_model = model;
_imgHead.image = [UIImage imageNamed:model.imgName];
_labTitle.text = model.title;
}
@end
| {
"content_hash": "875de1c2c1e42b6f67603acb8920d67b",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 101,
"avg_line_length": 25.216666666666665,
"alnum_prop": 0.6642432253800397,
"repo_name": "wokenshin/KenshinPro",
"id": "3f5913d1cab4b79501df1ce7e6d236531df3e159",
"size": "1692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KenshinPro/KenshinPro/Classes/Controller/subNav/five/纯代码TableCell/VIew/CellJJFriend.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7899"
},
{
"name": "CSS",
"bytes": "86"
},
{
"name": "HTML",
"bytes": "134668"
},
{
"name": "JavaScript",
"bytes": "205"
},
{
"name": "Objective-C",
"bytes": "3767679"
},
{
"name": "Objective-C++",
"bytes": "11599"
},
{
"name": "Ruby",
"bytes": "353"
},
{
"name": "Swift",
"bytes": "741"
}
],
"symlink_target": ""
} |
from django.db import models
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from sorl.thumbnail import ImageField as SorlImageField
from .utils.urls import is_absolute_url
import os
from uuid import uuid4
class SlugURLValidator(object):
message = _("Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens.")
code = 'invalid'
def __init__(self):
pass
def __call__(self, value):
try:
if not is_absolute_url(value):
value.index('/')
raise ValidationError(self.message, code=self.code)
except ValueError:
pass
blocks_validator_slug = SlugURLValidator()
class SlugURLField(models.CharField):
default_validators = [blocks_validator_slug]
def validate(self, value, model_instance):
from .models import Menu
if isinstance(model_instance, Menu):
self.validators = []
if model_instance.type != Menu.TYPE_REDIRECT:
self.validators.append(blocks_validator_slug)
super(SlugURLField, self).validate(value, model_instance)
def to_python(self, value):
value = super(SlugURLField, self).to_python(value)
if value is None:
return value
if not is_absolute_url(value):
value = value.lower()
return value
class ImageField(SorlImageField):
#class ImageField(models.ImageField):
def __init__(self, verbose_name=None, name=None, upload_to=None, storage=None, **kwargs):
if not callable(upload_to):
upload_to = ImageField.path_and_rename(upload_to)
super(ImageField, self).__init__(verbose_name=verbose_name, name=name, upload_to=upload_to, storage=storage, **kwargs)
@staticmethod
def path_and_rename(path):
def wrapper(instance, filename):
ext = filename.split('.')[-1]
# set filename as random string
filename = '{}.{}'.format(uuid4().hex, ext)
# return the whole path to the file
return os.path.join('uploads', path, instance.__class__.__name__.lower(), filename)
return wrapper
class HiddenFormField(forms.IntegerField):
def __init__(self, *args, **kwargs):
kwargs['widget'] = forms.HiddenInput
super(HiddenFormField, self).__init__(*args, **kwargs)
class OrderField(models.PositiveSmallIntegerField):
def formfield(self, **kwargs):
defaults = {'form_class': HiddenFormField}
defaults.update(kwargs)
return super(OrderField, self).formfield(**defaults)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^blocks\.fields\.SlugURLField"])
add_introspection_rules([], ["^blocks\.fields\.ImageField"])
add_introspection_rules([], ["^blocks\.fields\.OrderField"])
except:
pass | {
"content_hash": "575f3326d4d9ed6789f9ddd3d089af7c",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 120,
"avg_line_length": 28.87912087912088,
"alnum_prop": 0.7256468797564688,
"repo_name": "kimus/django-blocks",
"id": "18394bb0254d77cffc2246678c4d888fa0990c3a",
"size": "2628",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "blocks/fields.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2345"
},
{
"name": "JavaScript",
"bytes": "23810"
},
{
"name": "Python",
"bytes": "111560"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<Inbox_Message xmlns="http://taxii.mitre.org/messages/taxii_xml_binding-1.1"
message_id="valid1"/>
| {
"content_hash": "43dd0f57cf81df2729ec8669038fb416",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 76,
"avg_line_length": 47.666666666666664,
"alnum_prop": 0.6853146853146853,
"repo_name": "anl-cyberscience/java-taxii",
"id": "5b11083d23df3ae4130d163a65f79095ec1e08ba",
"size": "143",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/test/resources/schematron/1.1/inbox-messages/inbox1-valid.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "54665"
},
{
"name": "Java",
"bytes": "339740"
},
{
"name": "XSLT",
"bytes": "196389"
}
],
"symlink_target": ""
} |
<!DOCTYPE html >
<html>
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<title>Spark NLP 4.2.3 ScalaDoc - com.johnsnowlabs.nlp.annotators.common.PrefixedToken</title>
<meta name="description" content="Spark NLP 4.2.3 ScalaDoc - com.johnsnowlabs.nlp.annotators.common.PrefixedToken" />
<meta name="keywords" content="Spark NLP 4.2.3 ScalaDoc com.johnsnowlabs.nlp.annotators.common.PrefixedToken" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../../../../lib/index.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript" src="../../../../../lib/jquery.min.js"></script>
<script type="text/javascript" src="../../../../../lib/jquery.panzoom.min.js"></script>
<script type="text/javascript" src="../../../../../lib/jquery.mousewheel.min.js"></script>
<script type="text/javascript" src="../../../../../lib/index.js"></script>
<script type="text/javascript" src="../../../../../index.js"></script>
<script type="text/javascript" src="../../../../../lib/scheduler.js"></script>
<script type="text/javascript" src="../../../../../lib/template.js"></script>
<script type="text/javascript">
/* this variable can be used by the JS to determine the path to the root document */
var toRoot = '../../../../../';
</script>
</head>
<body>
<div id="search">
<span id="doc-title">Spark NLP 4.2.3 ScalaDoc<span id="doc-version"></span></span>
<span class="close-results"><span class="left"><</span> Back</span>
<div id="textfilter">
<span class="input">
<input autocapitalize="none" placeholder="Search" id="index-input" type="text" accesskey="/" />
<i class="clear material-icons"></i>
<i id="search-icon" class="material-icons"></i>
</span>
</div>
</div>
<div id="search-results">
<div id="search-progress">
<div id="progress-fill"></div>
</div>
<div id="results-content">
<div id="entity-results"></div>
<div id="member-results"></div>
</div>
</div>
<div id="content-scroll-container" style="-webkit-overflow-scrolling: touch;">
<div id="content-container" style="-webkit-overflow-scrolling: touch;">
<div id="subpackage-spacer">
<div id="packages">
<h1>Packages</h1>
<ul>
<li name="_root_.root" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="_root_"></a><a id="root:_root_"></a>
<span class="permalink">
<a href="../../../../../index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../../../../index.html"><span class="name">root</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../../../index.html" class="extype" name="_root_">root</a></dd></dl></div>
</li><li name="_root_.com" visbl="pub" class="indented1 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="com"></a><a id="com:com"></a>
<span class="permalink">
<a href="../../../../../com/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../../../index.html"><span class="name">com</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../../../index.html" class="extype" name="_root_">root</a></dd></dl></div>
</li><li name="com.johnsnowlabs" visbl="pub" class="indented2 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="johnsnowlabs"></a><a id="johnsnowlabs:johnsnowlabs"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../../index.html"><span class="name">johnsnowlabs</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../../index.html" class="extype" name="com">com</a></dd></dl></div>
</li><li name="com.johnsnowlabs.nlp" visbl="pub" class="indented3 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="nlp"></a><a id="nlp:nlp"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../../index.html"><span class="name">nlp</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../../index.html" class="extype" name="com.johnsnowlabs">johnsnowlabs</a></dd></dl></div>
</li><li name="com.johnsnowlabs.nlp.annotators" visbl="pub" class="indented4 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="annotators"></a><a id="annotators:annotators"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="../index.html"><span class="name">annotators</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../../index.html" class="extype" name="com.johnsnowlabs.nlp">nlp</a></dd></dl></div>
</li><li name="com.johnsnowlabs.nlp.annotators.common" visbl="pub" class="indented5 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="common"></a><a id="common:common"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/index.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<a title="" href="index.html"><span class="name">common</span></a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd><a href="../index.html" class="extype" name="com.johnsnowlabs.nlp.annotators">annotators</a></dd></dl></div>
</li><li class="current-entities indented5">
<a class="object" href="Annotated$.html" title=""></a>
<a class="trait" href="Annotated.html" title=""></a>
<a href="Annotated.html" title="">Annotated</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="ChunkSplit$.html" title="Helper object to work work with Chunks"></a>
<a href="ChunkSplit$.html" title="Helper object to work work with Chunks">ChunkSplit</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="ConllSentence.html" title=""></a>
<a href="ConllSentence.html" title="">ConllSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="DatasetHelpers$.html" title=""></a>
<a href="DatasetHelpers$.html" title="">DatasetHelpers</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="DependencyParsed$.html" title=""></a>
<a href="DependencyParsed$.html" title="">DependencyParsed</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="DependencyParsedSentence.html" title=""></a>
<a href="DependencyParsedSentence.html" title="">DependencyParsedSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="EmbeddingsWithSentence$.html" title=""></a>
<a href="EmbeddingsWithSentence$.html" title="">EmbeddingsWithSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="IndexedTaggedWord.html" title=""></a>
<a href="IndexedTaggedWord.html" title="">IndexedTaggedWord</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="IndexedToken.html" title=""></a>
<a href="IndexedToken.html" title="">IndexedToken</a>
</li><li class="current-entities indented5">
<a class="object" href="InfixToken$.html" title=""></a>
<a class="class" href="InfixToken.html" title=""></a>
<a href="InfixToken.html" title="">InfixToken</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="LabeledDependency$.html" title=""></a>
<a href="LabeledDependency$.html" title="">LabeledDependency</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="NerTagged$.html" title=""></a>
<a href="NerTagged$.html" title="">NerTagged</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="PosTagged$.html" title=""></a>
<a href="PosTagged$.html" title="">PosTagged</a>
</li><li class="current-entities indented5">
<a class="object" href="" title=""></a>
<a class="class" href="PrefixedToken.html" title=""></a>
<a href="PrefixedToken.html" title="">PrefixedToken</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="trait" href="PreprocessingParser.html" title=""></a>
<a href="PreprocessingParser.html" title="">PreprocessingParser</a>
</li><li class="current-entities indented5">
<a class="object" href="Sentence$.html" title=""></a>
<a class="class" href="Sentence.html" title="structure representing a sentence and its boundaries"></a>
<a href="Sentence.html" title="structure representing a sentence and its boundaries">Sentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="SentenceSplit$.html" title="Helper object to work work with Sentence"></a>
<a href="SentenceSplit$.html" title="Helper object to work work with Sentence">SentenceSplit</a>
</li><li class="current-entities indented5">
<a class="object" href="SuffixedToken$.html" title=""></a>
<a class="class" href="SuffixedToken.html" title=""></a>
<a href="SuffixedToken.html" title="">SuffixedToken</a>
</li><li class="current-entities indented5">
<a class="object" href="TableData$.html" title=""></a>
<a class="class" href="TableData.html" title=""></a>
<a href="TableData.html" title="">TableData</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="trait" href="Tagged.html" title=""></a>
<a href="Tagged.html" title="">Tagged</a>
</li><li class="current-entities indented5">
<a class="object" href="TaggedSentence$.html" title=""></a>
<a class="class" href="TaggedSentence.html" title="Structure to hold Sentences as list of words and POS-tags"></a>
<a href="TaggedSentence.html" title="Structure to hold Sentences as list of words and POS-tags">TaggedSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="TaggedWord.html" title="Word tag pair"></a>
<a href="TaggedWord.html" title="Word tag pair">TaggedWord</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="TokenPiece.html" title=""></a>
<a href="TokenPiece.html" title="">TokenPiece</a>
</li><li class="current-entities indented5">
<a class="object" href="TokenPieceEmbeddings$.html" title=""></a>
<a class="class" href="TokenPieceEmbeddings.html" title=""></a>
<a href="TokenPieceEmbeddings.html" title="">TokenPieceEmbeddings</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="TokenizedSentence.html" title="Internal structure for a sentence that is split into tokens"></a>
<a href="TokenizedSentence.html" title="Internal structure for a sentence that is split into tokens">TokenizedSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="TokenizedWithSentence$.html" title=""></a>
<a href="TokenizedWithSentence$.html" title="">TokenizedWithSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="WordWithDependency.html" title=""></a>
<a href="WordWithDependency.html" title="">WordWithDependency</a>
</li><li class="current-entities indented5">
<a class="object" href="WordpieceEmbeddingsSentence$.html" title=""></a>
<a class="class" href="WordpieceEmbeddingsSentence.html" title=""></a>
<a href="WordpieceEmbeddingsSentence.html" title="">WordpieceEmbeddingsSentence</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="object" href="WordpieceTokenized$.html" title=""></a>
<a href="WordpieceTokenized$.html" title="">WordpieceTokenized</a>
</li><li class="current-entities indented5">
<span class="separator"></span>
<a class="class" href="WordpieceTokenizedSentence.html" title=""></a>
<a href="WordpieceTokenizedSentence.html" title="">WordpieceTokenizedSentence</a>
</li>
</ul>
</div>
</div>
<div id="content">
<body class="object value">
<div id="definition">
<a href="PrefixedToken.html" title="See companion class"><div class="big-circle object-companion-class">o</div></a>
<p id="owner"><a href="../../../../index.html" class="extype" name="com">com</a>.<a href="../../../index.html" class="extype" name="com.johnsnowlabs">johnsnowlabs</a>.<a href="../../index.html" class="extype" name="com.johnsnowlabs.nlp">nlp</a>.<a href="../index.html" class="extype" name="com.johnsnowlabs.nlp.annotators">annotators</a>.<a href="index.html" class="extype" name="com.johnsnowlabs.nlp.annotators.common">common</a></p>
<h1><a href="PrefixedToken.html" title="See companion class">PrefixedToken</a><span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html" title="Permalink">
<i class="material-icons"></i>
</a>
</span></h1>
<h3><span class="morelinks"><div>
Companion <a href="PrefixedToken.html" title="See companion class">class PrefixedToken</a>
</div></span></h3>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<span class="name">PrefixedToken</span>
</span>
</h4>
<div id="comment" class="fullcommenttop"><div class="toggleContainer block">
<span class="toggle">
Linear Supertypes
</span>
<div class="superTypes hiddenContent"><span class="extype" name="scala.AnyRef">AnyRef</span>, <span class="extype" name="scala.Any">Any</span></div>
</div></div>
<div id="mbrsel">
<div class="toggle"></div>
<div id="memberfilter">
<i class="material-icons arrow"></i>
<span class="input">
<input id="mbrsel-input" placeholder="Filter all members" type="text" accesskey="/" />
</span>
<i class="clear material-icons"></i>
</div>
<div id="filterby">
<div id="order">
<span class="filtertype">Ordering</span>
<ol>
<li class="alpha in"><span>Alphabetic</span></li>
<li class="inherit out"><span>By Inheritance</span></li>
</ol>
</div>
<div class="ancestors">
<span class="filtertype">Inherited<br />
</span>
<ol id="linearization">
<li class="in" name="com.johnsnowlabs.nlp.annotators.common.PrefixedToken"><span>PrefixedToken</span></li><li class="in" name="scala.AnyRef"><span>AnyRef</span></li><li class="in" name="scala.Any"><span>Any</span></li>
</ol>
</div><div class="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show All</span></li>
</ol>
</div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
</div>
<div id="template">
<div id="allMembers">
<div class="values members">
<h3>Value Members</h3>
<ol>
<li name="scala.AnyRef#!=" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:Any):Boolean"></a><a id="!=(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#!=(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef###" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="##():Int"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html###():Int" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $hash$hash" class="name">##</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#==" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:Any):Boolean"></a><a id="==(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#==(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="com.johnsnowlabs.nlp.annotators.common.PrefixedToken#apply" visbl="pub" class="indented0 " data-isabs="false" fullComment="no" group="Ungrouped">
<a id="apply(prefixes:Array[String]):com.johnsnowlabs.nlp.annotators.common.PrefixedToken"></a><a id="apply(Array[String]):PrefixedToken"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#apply(prefixes:Array[String]):com.johnsnowlabs.nlp.annotators.common.PrefixedToken" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">apply</span><span class="params">(<span name="prefixes">prefixes: <span class="extype" name="scala.Array">Array</span>[<span class="extype" name="scala.Predef.String">String</span>]</span>)</span><span class="result">: <a href="PrefixedToken.html" class="extype" name="com.johnsnowlabs.nlp.annotators.common.PrefixedToken">PrefixedToken</a></span>
</span>
</li><li name="scala.Any#asInstanceOf" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="asInstanceOf[T0]:T0"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#asInstanceOf[T0]:T0" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">asInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Any.asInstanceOf.T0">T0</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#clone" visbl="prt" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="clone():Object"></a><a id="clone():AnyRef"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#clone():Object" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">clone</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<span class="extype" name="java.lang">lang</span>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.CloneNotSupportedException]">...</span>
</span>)</span>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#eq" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="eq(x$1:AnyRef):Boolean"></a><a id="eq(AnyRef):Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#eq(x$1:AnyRef):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">eq</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#equals" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="equals(x$1:Any):Boolean"></a><a id="equals(Any):Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#equals(x$1:Any):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">equals</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Any">Any</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#finalize" visbl="prt" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="finalize():Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#finalize():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">finalize</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<span class="extype" name="java.lang">lang</span>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="symbol">classOf[java.lang.Throwable]</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#getClass" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="getClass():Class[_]"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#getClass():Class[_]" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">getClass</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.Class">Class</span>[_]</span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#hashCode" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="hashCode():Int"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#hashCode():Int" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">hashCode</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Int">Int</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.Any#isInstanceOf" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="isInstanceOf[T0]:Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#isInstanceOf[T0]:Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">isInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#ne" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="ne(x$1:AnyRef):Boolean"></a><a id="ne(AnyRef):Boolean"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#ne(x$1:AnyRef):Boolean" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">ne</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.AnyRef">AnyRef</span></span>)</span><span class="result">: <span class="extype" name="scala.Boolean">Boolean</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notify" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notify():Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#notify():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notify</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#notifyAll" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notifyAll():Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#notifyAll():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notifyAll</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li><li name="scala.AnyRef#synchronized" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="synchronized[T0](x$1:=>T0):T0"></a><a id="synchronized[T0](⇒T0):T0"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#synchronized[T0](x$1:=>T0):T0" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">synchronized</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="params">(<span name="arg0">arg0: ⇒ <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>)</span><span class="result">: <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#toString" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="toString():String"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#toString():String" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">toString</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.String">String</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait():Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#wait():Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">()</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long,x$2:Int):Unit"></a><a id="wait(Long,Int):Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#wait(x$1:Long,x$2:Int):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>, <span name="arg1">arg1: <span class="extype" name="scala.Int">Int</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" class="indented0 " data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long):Unit"></a><a id="wait(Long):Unit"></a>
<span class="permalink">
<a href="../../../../../com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html#wait(x$1:Long):Unit" title="Permalink">
<i class="material-icons"></i>
</a>
</span>
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <span class="extype" name="scala.Long">Long</span></span>)</span><span class="result">: <span class="extype" name="scala.Unit">Unit</span></span>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
<span class="name">@native</span><span class="args">()</span>
</dd></dl></div>
</li>
</ol>
</div>
</div>
<div id="inheritedMembers">
<div class="parent" name="scala.AnyRef">
<h3>Inherited from <span class="extype" name="scala.AnyRef">AnyRef</span></h3>
</div><div class="parent" name="scala.Any">
<h3>Inherited from <span class="extype" name="scala.Any">Any</span></h3>
</div>
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
</body>
</div>
</div>
</div>
</body>
</html>
| {
"content_hash": "bc56a64a2e6e356fa2286aa376aab54f",
"timestamp": "",
"source": "github",
"line_count": 760,
"max_line_length": 442,
"avg_line_length": 55.28815789473684,
"alnum_prop": 0.559342202337038,
"repo_name": "JohnSnowLabs/spark-nlp",
"id": "f4840a2911cddb3042b3241bd06f9d7227876d2a",
"size": "42099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/api/com/johnsnowlabs/nlp/annotators/common/PrefixedToken$.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "14452"
},
{
"name": "Java",
"bytes": "223289"
},
{
"name": "Makefile",
"bytes": "819"
},
{
"name": "Python",
"bytes": "1694517"
},
{
"name": "Scala",
"bytes": "4116435"
},
{
"name": "Shell",
"bytes": "5286"
}
],
"symlink_target": ""
} |
package org.apache.shardingsphere.db.protocol.postgresql.packet.command.query.extended.bind.protocol;
import org.apache.shardingsphere.db.protocol.postgresql.packet.command.query.extended.PostgreSQLColumnType;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
public final class PostgreSQLBinaryProtocolValueFactoryTest {
@Test
public void assertGetStringBinaryProtocolValueByVarchar() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_VARCHAR);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLStringBinaryProtocolValue.class));
}
@Test
public void assertGetStringBinaryProtocolValueByChar() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_CHAR);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLStringBinaryProtocolValue.class));
}
@Test
public void assertGetInt8BinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_INT8);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLInt8BinaryProtocolValue.class));
}
@Test
public void assertGetInt4BinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_INT4);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLInt4BinaryProtocolValue.class));
}
@Test
public void assertGetInt2BinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_INT2);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLInt2BinaryProtocolValue.class));
}
@Test
public void assertGetDoubleBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_FLOAT8);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLDoubleBinaryProtocolValue.class));
}
@Test
public void assertGetFloatBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_FLOAT4);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLFloatBinaryProtocolValue.class));
}
@Test
public void assertGetNumericBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_NUMERIC);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLNumericBinaryProtocolValue.class));
}
@Test
public void assertGetDateBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_DATE);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLDateBinaryProtocolValue.class));
}
@Test
public void assertGetTimeBinaryProtocolValue() {
PostgreSQLBinaryProtocolValue binaryProtocolValue = PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_TIMESTAMP);
assertThat(binaryProtocolValue, instanceOf(PostgreSQLTimeBinaryProtocolValue.class));
}
@Test(expected = IllegalArgumentException.class)
public void assertGetBinaryProtocolValueExThrown() {
PostgreSQLBinaryProtocolValueFactory.getBinaryProtocolValue(PostgreSQLColumnType.POSTGRESQL_TYPE_XML);
}
}
| {
"content_hash": "5df548b72778e2e3c4f69a9bfa65ee75",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 168,
"avg_line_length": 51.935064935064936,
"alnum_prop": 0.8102025506376594,
"repo_name": "apache/incubator-shardingsphere",
"id": "bcab4ffe5488e6e966a8a4e7ca52071a29e8fdf7",
"size": "4800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/test/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/protocol/PostgreSQLBinaryProtocolValueFactoryTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "269258"
},
{
"name": "Batchfile",
"bytes": "3280"
},
{
"name": "CSS",
"bytes": "2842"
},
{
"name": "Dockerfile",
"bytes": "1485"
},
{
"name": "HTML",
"bytes": "2146"
},
{
"name": "Java",
"bytes": "7542761"
},
{
"name": "JavaScript",
"bytes": "92884"
},
{
"name": "Shell",
"bytes": "9837"
},
{
"name": "TSQL",
"bytes": "68705"
},
{
"name": "Vue",
"bytes": "81855"
}
],
"symlink_target": ""
} |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("JoinLineCommon")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("JoinLineCommon")]
[assembly: AssemblyCopyright("Copyright © 2018")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("e916ae39-340d-4b04-b8a2-94aee763708b")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "bebcd0994c57133062506e5f27785ba9",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 38.77777777777778,
"alnum_prop": 0.748567335243553,
"repo_name": "Microsoft/VSSDK-Extensibility-Samples",
"id": "a7beaab227c482203e09e946a3117046a7f81b15",
"size": "1399",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Backwards_Compatible_Editor_Command/src/CommandImplementation/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1650655"
},
{
"name": "C++",
"bytes": "74277"
},
{
"name": "PowerShell",
"bytes": "1111"
}
],
"symlink_target": ""
} |
package connect.ui.activity.set;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.View;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.ProgressBar;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import connect.db.SharedPreferenceUtil;
import connect.ui.activity.R;
import connect.ui.base.BaseActivity;
import connect.utils.ActivityUtil;
import connect.view.TopToolBar;
/**
* support
* Created by Administrator on 2016/12/1.
*/
public class SupportActivity extends BaseActivity {
@Bind(R.id.toolbar_top)
TopToolBar toolbarTop;
@Bind(R.id.web_view)
WebView webView;
@Bind(R.id.myProgressBar)
ProgressBar myProgressBar;
private SupportActivity mActivity;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_set_support);
ButterKnife.bind(this);
initView();
}
@Override
public void initView() {
mActivity = this;
toolbarTop.setBlackStyle();
toolbarTop.setLeftImg(R.mipmap.back_white);
toolbarTop.setTitle(null, R.string.Set_Help_and_feedback);
toolbarTop.setRightText(R.string.Set_FeedBack);
toolbarTop.setRightTextEnable(true);
String languageCode = SharedPreferenceUtil.getInstance().getStringValue(SharedPreferenceUtil.APP_LANGUAGE_CODE);
webView.loadUrl("https://www.connect.im/mobile/faqs?locale=" + languageCode);
webView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
return true;
}
});
webView.setWebViewClient(new WebViewClient() {
public boolean shouldOverrideUrlLoading(WebView view, String url) {
view.loadUrl(url);
return true;
}
});
webView.setWebChromeClient(new WebChromeClient(){
@Override
public void onProgressChanged(WebView view, int newProgress) {
super.onProgressChanged(view, newProgress);
if (newProgress >= 70) {
myProgressBar.setVisibility(View.GONE);
} else {
if (View.GONE == myProgressBar.getVisibility()) {
myProgressBar.setVisibility(View.VISIBLE);
}
myProgressBar.setProgress(newProgress);
}
}
});
}
@OnClick(R.id.left_img)
void goback(View view) {
ActivityUtil.goBack(mActivity);
}
@OnClick(R.id.right_lin)
void goFeedBack(View view) {
ActivityUtil.next(mActivity,FeedBackActivity.class);
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (webView.canGoBack()) {
webView.goBack();
return true;
} else {
goback(new View(mActivity));
}
}
return super.onKeyDown(keyCode, event);
}
}
| {
"content_hash": "b0ed225ed814ec32b66d387ab5400274",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 120,
"avg_line_length": 31.40776699029126,
"alnum_prop": 0.6287480680061823,
"repo_name": "connectim/Android",
"id": "df9c6dd5c4a193480d0adf4213cf907668df025d",
"size": "3235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/connect/ui/activity/set/SupportActivity.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "FreeMarker",
"bytes": "51905"
},
{
"name": "HTML",
"bytes": "5531"
},
{
"name": "Java",
"bytes": "2716967"
},
{
"name": "Protocol Buffer",
"bytes": "18166"
}
],
"symlink_target": ""
} |
var React = require('react');
var ReactRouter = require('react-router');
var Router = ReactRouter.Router;
var Route = ReactRouter.Route;
var IndexRoute = ReactRouter.IndexRoute;
var hashHistory = ReactRouter.hashHistory;
var Main = require('../components/Main');
var Home = require('../components/Home');
var PromptContainer = require('../containers/PromptContainer');
var ConfirmBattleContainer = require('../containers/ConfirmBattleContainer');
var routes = (
<Router history={hashHistory}>
<Route path="/" component={Main}>
<IndexRoute component={Home}/>
<Route path="playerOne" header="Player One" component={PromptContainer}/>
<Route path="playerTwo/:playerOne" header="Player Two" component={PromptContainer}/>
<Route path="battle" component={ConfirmBattleContainer}/>
</Route>
</Router>
);
module.exports = routes; | {
"content_hash": "c3e9433dd2538d83bed4d5b79c6c888c",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 90,
"avg_line_length": 34.44,
"alnum_prop": 0.718931475029036,
"repo_name": "geordidearns/github-battle",
"id": "51e10a7ef5c485b6031559c94d3e7e1d780abf11",
"size": "861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/config/routes.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "404"
},
{
"name": "JavaScript",
"bytes": "9922"
}
],
"symlink_target": ""
} |
package com.googlecode.cqengine.index.radixinverted;
import com.googlecode.concurrenttrees.radix.node.concrete.DefaultCharArrayNodeFactory;
import com.googlecode.concurrenttrees.radix.node.concrete.SmartArrayBasedNodeFactory;
import com.googlecode.cqengine.testutil.Car;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Tests for {@link InvertedRadixTreeIndex}.
*
* Created by npgall on 13/05/2016.
*/
public class InvertedRadixTreeIndexTest {
@Test
public void testNodeFactory() {
InvertedRadixTreeIndex<String, Car> index1 = InvertedRadixTreeIndex.onAttribute(Car.MANUFACTURER);
InvertedRadixTreeIndex<String, Car> index2 = InvertedRadixTreeIndex.onAttributeUsingNodeFactory(Car.MANUFACTURER, new DefaultCharArrayNodeFactory());
InvertedRadixTreeIndex<String, Car> index3 = InvertedRadixTreeIndex.onAttributeUsingNodeFactory(Car.MANUFACTURER, new SmartArrayBasedNodeFactory());
assertTrue(index1.nodeFactory instanceof DefaultCharArrayNodeFactory);
assertTrue(index2.nodeFactory instanceof DefaultCharArrayNodeFactory);
assertTrue(index3.nodeFactory instanceof SmartArrayBasedNodeFactory);
}
} | {
"content_hash": "7986d6b63605e2d728675a25fee28aff",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 157,
"avg_line_length": 42.107142857142854,
"alnum_prop": 0.8015267175572519,
"repo_name": "npgall/cqengine",
"id": "1f2433389323c4a546ba3b48cfae0f671ed2a573",
"size": "1782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/src/test/java/com/googlecode/cqengine/index/radixinverted/InvertedRadixTreeIndexTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "49662"
},
{
"name": "Java",
"bytes": "1997856"
}
],
"symlink_target": ""
} |
"""WSGI application initialization for Nova APIs."""
import os
import sys
from oslo_config import cfg
from oslo_log import log as logging
from oslo_reports import guru_meditation_report as gmr
from oslo_reports import opts as gmr_opts
from oslo_service import _options as service_opts
from paste import deploy
from nova import config
from nova import context
from nova import exception
from nova import objects
from nova import service
from nova import utils
from nova import version
CONF = cfg.CONF
CONFIG_FILES = ['api-paste.ini', 'nova.conf']
LOG = logging.getLogger(__name__)
objects.register_all()
def _get_config_files(env=None):
if env is None:
env = os.environ
dirname = env.get('OS_NOVA_CONFIG_DIR', '/etc/nova').strip()
return [os.path.join(dirname, config_file)
for config_file in CONFIG_FILES]
def _setup_service(host, name):
try:
utils.raise_if_old_compute()
except exception.TooOldComputeService as e:
if CONF.workarounds.disable_compute_service_check_for_ffu:
LOG.warning(str(e))
else:
raise
binary = name if name.startswith('nova-') else "nova-%s" % name
ctxt = context.get_admin_context()
service_ref = objects.Service.get_by_host_and_binary(
ctxt, host, binary)
if service_ref:
service._update_service_ref(service_ref)
else:
try:
service_obj = objects.Service(ctxt)
service_obj.host = host
service_obj.binary = binary
service_obj.topic = None
service_obj.report_count = 0
service_obj.create()
except (exception.ServiceTopicExists,
exception.ServiceBinaryExists):
# If we race to create a record with a sibling, don't
# fail here.
pass
def error_application(exc, name):
# TODO(cdent): make this something other than a stub
def application(environ, start_response):
start_response('500 Internal Server Error', [
('Content-Type', 'text/plain; charset=UTF-8')])
return ['Out of date %s service %s\n' % (name, exc)]
return application
@utils.run_once('Global data already initialized, not re-initializing.',
LOG.info)
def init_global_data(conf_files, service_name):
# NOTE(melwitt): parse_args initializes logging and calls global rpc.init()
# and db_api.configure(). The db_api.configure() call does not initiate any
# connection to the database.
# NOTE(gibi): sys.argv is set by the wsgi runner e.g. uwsgi sets it based
# on the --pyargv parameter of the uwsgi binary
config.parse_args(sys.argv, default_config_files=conf_files)
logging.setup(CONF, "nova")
gmr_opts.set_defaults(CONF)
gmr.TextGuruMeditation.setup_autorun(
version, conf=CONF, service_name=service_name)
# dump conf at debug (log_options option comes from oslo.service)
# FIXME(mriedem): This is gross but we don't have a public hook into
# oslo.service to register these options, so we are doing it manually for
# now; remove this when we have a hook method into oslo.service.
CONF.register_opts(service_opts.service_opts)
if CONF.log_options:
CONF.log_opt_values(
logging.getLogger(__name__),
logging.DEBUG)
def init_application(name):
conf_files = _get_config_files()
# NOTE(melwitt): The init_application method can be called multiple times
# within a single python interpreter instance if any exception is raised
# during it (example: DBConnectionError while setting up the service) and
# apache/mod_wsgi reloads the init_application script. So, we initialize
# global data separately and decorate the method to run only once in a
# python interpreter instance.
init_global_data(conf_files, name)
try:
_setup_service(CONF.host, name)
except exception.ServiceTooOld as exc:
return error_application(exc, name)
# This global init is safe because if we got here, we already successfully
# set up the service and setting up the profile cannot fail.
service.setup_profiler(name, CONF.host)
conf = conf_files[0]
return deploy.loadapp('config:%s' % conf, name=name)
| {
"content_hash": "a9bb7b47fe6f088d34075916eb9a2b33",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 79,
"avg_line_length": 33.60629921259842,
"alnum_prop": 0.6747891283973758,
"repo_name": "mahak/nova",
"id": "d60069ce844f91a3f68626e02873f7e74ce11622",
"size": "4840",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/api/openstack/wsgi_app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "3545"
},
{
"name": "Mako",
"bytes": "1952"
},
{
"name": "Python",
"bytes": "23261880"
},
{
"name": "Shell",
"bytes": "28113"
},
{
"name": "Smarty",
"bytes": "507244"
}
],
"symlink_target": ""
} |
from indico_piwik.piwik import PiwikRequest
class PiwikQueryBase:
"""Base Piwik query"""
def __init__(self, query_script):
from indico_piwik.plugin import PiwikPlugin
self.request = PiwikRequest(server_url=PiwikPlugin.settings.get('server_api_url'),
site_id=PiwikPlugin.settings.get('site_id_events'),
api_token=PiwikPlugin.settings.get('server_token'),
query_script=query_script)
def call(self, **query_params):
return self.request.call(**query_params)
class PiwikQueryReportBase(PiwikQueryBase):
"""Base Piwik query to request reports"""
def __init__(self):
from indico_piwik.plugin import PiwikPlugin
super().__init__(query_script=PiwikPlugin.report_script)
def call(self, date=('last7',), period='day', **query_params):
date = ','.join(map(str, date))
return super().call(date=date, period=period, **query_params)
class PiwikQueryReportEventBase(PiwikQueryReportBase):
"""Base Piwik query to request reports of events and contributions"""
def __init__(self, event_id, start_date, end_date, contrib_id=None):
super().__init__()
self.event_id = event_id
self.contrib_id = contrib_id
self.start_date = start_date
self.end_date = end_date
def call(self, segmentation_enabled=True, **query_params):
if segmentation_enabled:
query_params['segment'] = self.get_segmentation()
return super().call(module='API', date=[self.start_date, self.end_date], **query_params)
def get_segmentation(self):
segmentation = {'customVariablePageName1': ('==', 'Conference'),
'customVariablePageValue1': ('==', self.event_id)}
if self.contrib_id:
segmentation['customVariablePageName2'] = ('==', 'Contribution')
segmentation['customVariablePageValue2'] = ('==', self.contrib_id)
segments = set()
for name, (equality, value) in segmentation.items():
segment = f'{name}{equality}{value}'
segments.add(segment)
return ';'.join(segments)
| {
"content_hash": "c02915ce0e53008e24a82ea2e73c0560",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 96,
"avg_line_length": 38.1551724137931,
"alnum_prop": 0.6095797559873475,
"repo_name": "indico/indico-plugins",
"id": "ce00289a41adb8350b03a1ffcfacee1b4d014b5e",
"size": "2456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "piwik/indico_piwik/queries/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4278"
},
{
"name": "HTML",
"bytes": "53511"
},
{
"name": "JavaScript",
"bytes": "19822"
},
{
"name": "Python",
"bytes": "469846"
},
{
"name": "SCSS",
"bytes": "2846"
},
{
"name": "Shell",
"bytes": "2926"
}
],
"symlink_target": ""
} |
<!---
Copyright 2017 The AMP HTML Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
# <a name="`amp-form`"></a> `amp-form`
<table>
<tr>
<td width="40%"><strong>Description</strong></td>
<td>Allows you to create <code>form</code> and <code>input</code> tags.</td>
</tr>
<tr>
<td><strong>Required Script</strong></td>
<td><code><script async custom-element="amp-form" src="https://cdn.ampproject.org/v0/amp-form-0.1.js"></script></code></td>
</tr>
<tr>
<td><strong><a href="https://www.ampproject.org/docs/design/responsive/control_layout.html#the-layout-attribute">Supported Layouts</a></strong></td>
<td>N/A</td>
</tr>
<tr>
<td><strong>Examples</strong></td>
<td>See AMP By Example's <a href="https://ampbyexample.com/components/amp-form/">amp-form</a> examples.</td>
</tr>
</table>
[TOC]
## Behavior
The `amp-form` extension allows you to create forms (`<form>`) to submit input fields in an AMP document. The `amp-form` extension also provides [polyfills](#polyfills) for some missing behaviors in browsers.
{% call callout('Important', type='caution') %}
If you're submitting data in your form, your server endpoint must implement the requirements for [CORS security](https://www.ampproject.org/docs/fundamentals/amp-cors-requests#cors-security-in-amp).
{% endcall %}
Before creating a `<form>`, you must include the required script for the `<amp-form>` extension, otherwise your document will be invalid. If you're using `input` tags for purposes other than submitting their values (e.g., inputs not inside a `<form>`), you do not need to load the `amp-form` extension.
Here's an example of a basic form:
<!-- embedded sample that is rendered on ampproject.org -->
<div>
<amp-iframe height="671"
layout="fixed-height"
sandbox="allow-scripts allow-forms allow-same-origin"
resizable
src="https://ampproject-b5f4c.firebaseapp.com/examples/ampform.basic.embed.html">
<div overflow tabindex="0" role="button" aria-label="Show more">Show full code</div>
<div placeholder></div>
</amp-iframe>
</div>
## Attributes
##### target
Indicates where to display the form response after submitting the form. The value must be `_blank` or `_top`.
##### action
Specifies a server endpoint to handle the form input. The value must be an `https` URL (absolute or relative) and must not be a link to a CDN.
* For `method=GET`: use this attribute or [`action-xhr`](#action-xhr).
* For `method=POST`: use the [`action-xhr`](#action-xhr) attribute.
{% call callout('Note', type='note') %}
The `target` and `action` attributes are only used for non-xhr GET requests. The AMP runtime will use `action-xhr` to make the request and will ignore `action` and `target`. When `action-xhr` is not provided, AMP makes a GET request to the `action` endpoint and uses `target` to open a new window (if `_blank`). The AMP runtime might also fallback to using `action` and `target` in cases where the `amp-form` extension fails to load.
{% endcall %}
##### action-xhr
Specifies a server endpoint to handle the form input and submit the form via XMLHttpRequest (XHR). An XHR request (sometimes called an AJAX request) is where the browser would make the request without a full load of the page or opening a new page. Browsers will send the request in the background using the [Fetch API](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API) when available and fallback to [XMLHttpRequest API](https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest) for older browsers.
{% call callout('Important', type='caution') %}
Your XHR endpoint must implement the requirements for [CORS security](https://www.ampproject.org/docs/fundamentals/amp-cors-requests#cors-security-in-amp).
{% endcall %}
This attribute is required for `method=POST`, and is optional for `method=GET`.
The value for `action-xhr` can be the same or a different endpoint than `action` and has the same `action` requirements above.
To learn about redirecting the user after successfully submitting the form, see the [Redirecting after a submission](#redirecting-after-a-submission) section below.
##### Other form attributes
All other [form attributes](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/form) are optional.
##### custom-validation-reporting
This is an optional attribute that enables and selects a custom validation reporting strategy. Valid values are one of: `show-first-on-submit`, `show-all-on-submit` or `as-you-go`.
See the [Custom Validation](#custom-validations) section for more details.
## Inputs and fields
**Allowed**:
* Other form-related elements, including: `<textarea>`, `<select>`, `<option>`, `<fieldset>`, `<label>`, `<input type=text>`, `<input type=submit>`, and so on.
* `<input type=password>` and `<input type=file>` inside of `<form method=POST action-xhr>`.
* [`amp-selector`](https://www.ampproject.org/docs/reference/components/amp-selector)
**Not Allowed**:
* `<input type=button>`, `<input type=image>`
* Most of the form-related attributes on inputs including: `form`, `formaction`, `formtarget`, `formmethod` and others.
(Relaxing some of these rules might be reconsidered in the future - [please let us know](https://github.com/ampproject/amphtml/blob/master/CONTRIBUTING.md#suggestions-and-feature-requests) if you require these and provide use cases).
For details on valid inputs and fields, see [amp-form rules](https://github.com/ampproject/amphtml/blob/master/validator/validator-main.protoascii) in the AMP validator specification.
## Actions
The `amp-form` element exposes the following actions:
| Action | Description |
|--------|-------------|
| `submit` | Allows you to trigger the form submission on a specific action, for example, tapping a link, or [submitting a form on input change](#input-events). |
| `clear` | Empties the values from each input in the form. This can allow users to quickly fill out forms a second time. |
{% call callout('Read on', type='read') %}
Learn more about [Actions and Events in AMP](https://www.ampproject.org/docs/interaction_dynamic/amp-actions-and-events.html).
{% endcall %}
## Events
The `amp-form` exposes the following events:
| Event | Fired when |
|-------|-------------|
| `submit` | The form is submitted and before the submission is complete. |
| `submit-success` | The form submission is done and the response is a success. |
| `submit-error` | The form submission is done and the response is an error. |
| `verify` | Asynchronous verification is initiated. |
| `verify-error` | Asynchronous verification is done and the response is an error. |
| `valid` | The form's validation state changes to "valid" (in accordance with its [reporting strategy](#reporting-strategies)). |
| `invalid` | The form's validation state to "invalid" (in accordance with its [reporting strategy](#reporting-strategies)). |
These events can be used via the [`on` attribute](https://www.ampproject.org/docs/fundamentals/spec#on).
For example, the following listens to both the `submit-success` and `submit-error` events and shows different lightboxes depending on the event:
```html
<form ... on="submit-success:success-lightbox;submit-error:error-lightbox" ...>
</form>
```
See the [full example here](../../examples/forms.amp.html).
#### Input events
AMP exposes `change` and `input-debounced` events on child `<input>` elements. This allows you to use the [`on` attribute](https://www.ampproject.org/docs/fundamentals/spec#on) to execute an action on any element when an input value changes.
For example, a common use case is to submit a form on input change (selecting a radio button to answer a poll, choosing a language from a `select` input to translate a page, etc.).
<!-- embedded sample that is rendered on ampproject.org -->
<div>
<amp-iframe height="450"
layout="fixed-height"
sandbox="allow-scripts allow-forms allow-same-origin"
resizable
src="https://ampproject-b5f4c.firebaseapp.com/examples/ampform.inputevent.embed.html">
<div overflow tabindex="0" role="button" aria-label="Show more">Show full code</div>
<div placeholder></div>
</amp-iframe>
</div>
See the [full example here](../../examples/forms.amp.html).
### Analytics triggers
The `amp-form` extension triggers the following events that you can track in your [amp-analytics](https://www.ampproject.org/docs/reference/components/amp-analytics) config:
| Event | Fired when |
|---------------------------|-----------------------------------|
| `amp-form-submit` | A form request is initiated. |
| `amp-form-submit-success` | A successful response is received (i.e, when the response has a status of `2XX`). |
| `amp-form-submit-error` | An unsuccessful response is received (i.e, when the response doesn't have a status of `2XX`). |
You can configure your analytics to send these events as in the following example:
```html
<amp-analytics>
<script type="application/json">
{
"requests": {
"event": "https://www.example.com/analytics/event?eid=${eventId}",
"searchEvent": "https://www.example.com/analytics/search?formId=${formId}&query=${formFields[query]}"
},
"triggers": {
"formSubmit": {
"on": "amp-form-submit",
"request": "searchEvent"
},
"formSubmitSuccess": {
"on": "amp-form-submit-success",
"request": "event",
"vars": {
"eventId": "form-submit-success"
}
},
"formSubmitError": {
"on": "amp-form-submit-error",
"request": "event",
"vars": {
"eventId": "form-submit-error"
}
}
}
}
</script>
</amp-analytics>
```
All three events generate a set of variables that correspond to the specific form and the fields in the form. These variables can be used for analytics.
For example, the following form has one field:
```html
<form action-xhr="/comment" method="POST" id="submit_form">
<input type="text" name="comment" />
<input type="submit" value="Comment" />
</form>
```
When the `amp-form-submit`, `amp-form-submit-success`, or `amp-form-submit-error` event fires, it generates the following variables containing the values that were specified in the form:
* `formId`
* `formFields[comment]`
## Success/error response rendering
You can render success or error responses in your form by using [extended templates](https://www.ampproject.org/docs/fundamentals/spec#extended-templates), such as [amp-mustache](https://www.ampproject.org/docs/reference/components/amp-mustache), or success responses through data binding with [amp-bind](https://www.ampproject.org/docs/reference/components/amp-bind) and the following response attributes:
| Response attribute | Description |
|-----------|---------------------|
| `submit-success` | Can be used to display a success message if the response is successful (i.e., has a status of `2XX`). |
| `submit-error` | an be used to display a submission error if the response is unsuccessful (i.e., does not have a status of `2XX`). |
| `submitting` | Can be used to display a message when the form is submitting. The template for this attribute has access to the form's input fields for any display purposes. Please see the [full form example below](#example-submitting) for how to use the `submitting` attribute. |
### To render responses with templating:
* Apply a response attribute to *any direct child* of the `<form>` element.
* Render the response in the child element by including a template via `<template></template>` or `<script type="text/plain"></script>` tag inside it or by referencing a template with a `template="id_of_other_template"` attribute.
* Provide a valid JSON object for responses to `submit-success` and `submit-error`. Both success and error responses should have a `Content-Type: application/json` header.
<a id="example-submitting"></a>
##### Example: Form displays success, error, and submitting messages
In the following example, the responses are rendered in an inline template inside the form.
```html
<form ...>
<fieldset>
<input type="text" name="firstName" />
...
</fieldset>
<div verify-error>
<template type="amp-mustache">
There is a mistake in the form!
{{#verifyErrors}}{{message}}{{/verifyErrors}}
</template>
</div>
<div submitting>
<template type="amp-mustache">
Form submitting... Thank you for waiting {{name}}.
</template>
</div>
<div submit-success>
<template type="amp-mustache">
Success! Thanks {{name}} for subscribing! Please make sure to check your email {{email}}
to confirm! After that we'll start sending you weekly articles on {{#interests}}<b>{{name}}</b> {{/interests}}.
</template>
</div>
<div submit-error>
<template type="amp-mustache">
Oops! {{name}}, {{message}}.
</template>
</div>
</form>
```
The publisher's `action-xhr` endpoint returns the following JSON responses:
On success:
```json
{
"name": "Jane Miller",
"interests": [{"name": "Basketball"}, {"name": "Swimming"}, {"name": "Reading"}],
"email": "email@example.com"
}
```
On error:
```json
{
"name": "Jane Miller",
"message": "The email (email@example.com) you used is already subscribed."
}
```
You can render the responses in a referenced template defined earlier in the document by using the template's id as the value of the `template` attribute, set on the elements with the `submit-success` and `submit-error` attributes.
```html
<template type="amp-mustache" id="submit_success_template">
Success! Thanks {{name}} for subscribing! Please make sure to check your email {{email}}
to confirm! After that we'll start sending you weekly articles on {{#interests}}<b>{{name}}</b> {{/interests}}.
</template>
<template type="amp-mustache" id="submit_error_template">
Oops! {{name}}, {{message}}.
</template>
<form ...>
<fieldset>
...
</fieldset>
<div submit-success template="submit_success_template"></div>
<div submit-error template="submit_error_template"></div>
</form>
```
See the [full example here](../../examples/forms.amp.html).
### To render a successful response with data binding
* Use the [on attribute](https://www.ampproject.org/docs/interaction_dynamic/amp-actions-and-events) to bind the form *submit-success* attribute to [`AMP.setState()`](https://www.ampproject.org/docs/reference/components/amp-bind#updating-state-with-amp.setstate()).
* Use the `event` property to capture the response data.
* Add the state attribute to the desired element to bind the form response.
The following example demonstrates a form `submit-success` response with [`amp-bind`](https://www.ampproject.org/docs/reference/components/amp-bind):
```html
<p [text]="'Thanks, ' + subscribe +'! You have successfully subscribed.'">Subscribe to our newsletter</p>
<form method="post"
action-xhr="/components/amp-form/submit-form-input-text-xhr"
target="_top"
on="submit-success: AMP.setState({'subscribe': event.response.name})">
<div>
<input type="text"
name="name"
placeholder="Name..."
required>
<input type="email"
name="email"
placeholder="Email..."
required>
</div>
<input type="submit" value="Subscribe">
</form>
```
When the form is submitted successfully it will return a JSON response similar to the following:
```json
{
"name": "Jane Miller",
"email": "email@example.com"
}
```
Then `amp-bind` updates the `<p>` element's text to match the `subscibe` state:
```html
...
<p [text]="'Thanks, ' + subscribe +'! You have successfully subscribed.'">Thanks Jane Miller! You have successfully subscribed.</p>
...
```
### Redirecting after a submission
You can redirect users to a new page after a successful form submission by setting the `AMP-Redirect-To` response header and specifying a redirect URL. The redirect URL must be a HTTPS URL, otherwise AMP will throw an error and redirection won't occur. HTTP response headers are configured via your server.
Make sure to update your `Access-Control-Expose-Headers` response header to include `AMP-Redirect-To` to the list of allowed headers. Learn more about these headers in [CORS Security in AMP](https://www.ampproject.org/docs/fundamentals/amp-cors-requests#cors-security-in-amp).
*Example response headers:*
```text
AMP-Redirect-To: https://example.com/forms/thank-you
Access-Control-Expose-Headers: AMP-Access-Control-Allow-Source-Origin, AMP-Redirect-To
```
{% call callout('Tip', type='success') %}
Check out AMP By Example's [Form Submission with Update](https://ampbyexample.com/components/amp-form/#form-submission-with-page-update) and [Product Page](https://ampbyexample.com/samples_templates/product_page/#product-page) that demonstrate using redirection after a form submission.
{% endcall %}
## Custom validations
The `amp-form` extension allows you to build your own custom validation UI by using the `custom-validation-reporting` attribute along with one the following reporting strategies: `show-first-on-submit`, `show-all-on-submit` or `as-you-go`.
To specify custom validation on your form:
1. Set the `custom-validation-reporting` attribute on your `form` to one of the [validation reporting strategies](#reporting-strategies).
2. Provide your own validation UI marked up with special attributes. AMP will discover the special attributes and report them at the right time depending on the reporting strategy you specified.
Here's an example:
<!-- embedded sample that is rendered on ampproject.org -->
<div>
<amp-iframe height="748"
layout="fixed-height"
sandbox="allow-scripts allow-forms allow-same-origin"
resizable
src="https://ampproject-b5f4c.firebaseapp.com/examples/ampform.customval.embed.html">
<div overflow tabindex="0" role="button" aria-label="Show more">Show full code</div>
<div placeholder></div>
</amp-iframe>
</div>
For more examples, see [examples/forms.amp.html](../../examples/forms.amp.html).
For validation messages, if your element contains no text content inside, AMP will fill it out with the browser's default validation message. In the example above, when the `name5` input is empty and validation is kicked off (i.e., user tried to submit the form) AMP will fill `<span visible-when-invalid="valueMissing" validation-for="name5"></span>` with the browser's validation message and show that `span` to the user.
{% call callout('Important', type='caution') %}
You must provide your own validation UI for each kind of invalid state that the input could have. If these are not present, users will not see any `custom-validation-reporting` for the missing error state. The validity states can be found in the [official W3C HTML validation reporting documentation](https://www.w3.org/TR/html50/forms.html#validitystate).
{% endcall %}
### Reporting strategies
Specify one of the following reporting options for the `custom-validation-reporting` attribute:
#### Show First on Submit
The `show-first-on-submit` reporting option mimics the browser's default behavior when default validation kicks in. It shows the first validation error it finds and stops there.
#### Show All on Submit
The `show-all-on-submit` reporting option shows all validation errors on all invalid inputs when the form is submitted. This is useful if you'd like to show a summary of validations.
#### As You Go
The `as-you-go` reporting option allows your user to see validation messages as they're interacting with the input. For example, if the user types an invalid email address, the user will see the error right away. Once they correct the value, the error goes away.
#### Interact and Submit
The `interact-and-submit` reporting option combines the behavior of `show-all-on-submit` and `as-you-go`. Individual fields will show any errors immediately after interactions, and on submit the form will show errors on all invalid fields.
## Verification
HTML5 validation gives feedback based only on information available on the page, such as if a value matches a certain pattern. With `amp-form` verification you can give the user feedback that HTML5 validation alone cannot. For example, a form can use verification to check if an email address has already been registered. Another use-case is verifying that a city field and a zip code field match each other.
Here's an example:
```html
<h4>Verification example</h4>
<form
method="post"
action-xhr="/form/verify-json/post"
verify-xhr="/form/verify-json/post"
target="_blank"
>
<fieldset>
<label>
<span>Email</span>
<input type="text" name="email" required>
</label>
<label>
<span>Zip Code</span>
<input type="tel" name="zip" required pattern="[0-9]{5}(-[0-9]{4})?">
</label>
<label>
<span>City</span>
<input type="text" name="city" required>
</label>
<label>
<span>Document</span>
<input type="file" name="document" no-verify>
</label>
<div class="spinner"></div>
<input type="submit" value="Submit">
</fieldset>
<div submit-success>
<template type="amp-mustache">
<p>Congratulations! You are registered with {{email}}</p>
</template>
</div>
<div submit-error>
<template type="amp-mustache">
{{#verifyErrors}}
<p>{{message}}</p>
{{/verifyErrors}}
{{^verifyErrors}}
<p>Something went wrong. Try again later?</p>
{{/verifyErrors}}
</template>
</div>
</form>
```
The form sends a `__amp_form_verify` field as part of the form data as a hint to
the server that the request is a verify request and not a formal submit.
This is helpful so the server knows not to store the verify request if the same
endpoint is used for verification and for submit.
Here is how an error response should look for verification:
```json
{
"verifyErrors": [
{"name": "email", "message": "That email is already taken."},
{"name": "zip", "message": "The city and zip do not match."}
]
}
```
To remove a field from the `verify-xhr` request, add the `no-verify` attribute
to the input element.
For more examples, see [examples/forms.amp.html](../../examples/forms.amp.html).
## Variable substitutions
The `amp-form` extension allows [platform variable substitutions](../../spec/amp-var-substitutions.md) for inputs that are hidden and that have the `data-amp-replace` attribute. On each form submission, `amp-form` finds all `input[type=hidden][data-amp-replace]` inside the form and applies variable substitutions to its `value` attribute and replaces it with the result of the substitution.
You must provide the variables you are using for each substitution on each input by specifying a space-separated string of the variables used in `data-amp-replace` (see example below). AMP will not replace variables that are not explicitly specified.
Here's an example of how inputs are before and after substitutions (note that you need to use platform syntax of variable substitutions and not analytics ones):
```html
<!-- Initial Load -->
<form ...>
<input name="canonicalUrl" type="hidden"
value="The canonical URL is: CANONICAL_URL - RANDOM - CANONICAL_HOSTNAME"
data-amp-replace="CANONICAL_URL RANDOM">
<input name="clientId" type="hidden"
value="CLIENT_ID(myid)"
data-amp-replace="CLIENT_ID">
...
</form>
```
Once the user tries to submit the form, AMP will try to resolve the variables and update the fields' `value` attribute of all fields with the appropriate substitutions. For XHR submissions, all variables are likely to be substituted and resolved. However, in non-XHR GET submissions, values that requires async-resolution might not be available due to having not been resolved previously. `CLIENT_ID` for example would not resolve if it wasn't resolved and cached previously.
```html
<!-- User submits the form, variables values are resolved into fields' value -->
<form ...>
<input name="canonicalUrl" type="hidden"
value="The canonical URL is: https://example.com/hello - 0.242513759125 - CANONICAL_HOSTNAME"
data-amp-replace="CANONICAL_URL RANDOM">
<input name="clientId" type="hidden"
value="amp:asqar893yfaiufhbas9g879ab9cha0cja0sga87scgas9ocnas0ch"
data-amp-replace="CLIENT_ID">
...
</form>
```
Note how `CANONICAL_HOSTNAME` above did not get replaced because it was not in the whitelist through `data-amp-replace` attribute on the first field.
Substitutions will happen on every subsequent submission. Read more about [variable substitutions in AMP](../../spec/amp-var-substitutions.md).
## Polyfills
The `amp-form` extension provide polyfills for behaviors and functionality missing from some browsers or being implemented in the next version of CSS.
#### Invalid submit blocking and validation message bubble
Browsers that use webkit-based engines currently (as of August 2016) do not support invalid form submissions. These include Safari on all platforms, and all iOS browsers. The `amp-form` extension polyfills this behavior to block any invalid submissions and shows validation message bubbles on invalid inputs.
#### User-interaction pseudo-classes
The `:user-invalid` and `:user-valid` pseudo classes are part of the [future CSS Selectors 4 spec](https://drafts.csswg.org/selectors-4/#user-pseudos) and are introduced to allow better hooks for styling invalid/valid fields based on a few criteria.
One of the main differences between `:invalid` and `:user-invalid` is when are they applied to the element. The `:user-invalid` class is applied after a significant interaction from the user with the field (e.g., the user types in a field, or blurs from the field).
The `amp-form` extension provides [classes](#classes-and-css-hooks) to polyfill these pseudo-classes. The `amp-form` extension also propagates these to ancestors `fieldset`elements and `form`.
#### `<textarea>` validation
Regular expression matching is a common validation feature supported natively on most input elements, except for `<textarea>`. We polyfill this functionality and support the `pattern` attribute on `<textarea>` elements.
AMP Form provides an `autoexpand` attribute to `<textarea>` elements. This allows the textarea
to expand and shrink to accomodate the user's rows of input, up to the field's maximum size. If the user manually resizes the field, the autoexpand behavior will be removed.
```html
<textarea autoexpand></textarea>
```
## Styling
### Classes and CSS hooks
The `amp-form` extension provides classes and CSS hooks for publishers to style their forms and inputs.
The following classes can be used to indicate the state of the form submission:
* `.amp-form-initial`
* `.amp-form-verify`
* `.amp-form-verify-error`
* `.amp-form-submitting`
* `.amp-form-submit-success`
* `.amp-form-submit-error`
The following classes are a [polyfill for the user interaction pseudo classes](#user-interaction-pseudo-classes):
* `.user-valid`
* `.user-invalid`
Publishers can use these classes to style their inputs and fieldsets to be responsive to user actions (e.g., highlighting an invalid input with a red border after user blurs from it).
See the [full example here](../../examples/forms.amp.html) on using these.
{% call callout('Tip', type='success') %}
Visit [AMP Start](https://ampstart.com/components#form-elements) for responsive, pre-styled AMP form elements that you can use in your AMP pages.
{% endcall %}
## Security considerations
### Protecting against XSRF
In addition to following the details in the [AMP CORS spec](https://www.ampproject.org/docs/fundamentals/amp-cors-requests.html), please pay extra attention to the section on ["Processing state changing requests" ](https://www.ampproject.org/docs/fundamentals/amp-cors-requests.html#processing-state-changing-requests) to protect against [XSRF attacks](https://en.wikipedia.org/wiki/Cross-site_request_forgery) where an attacker can execute unauthorized commands using the current user session without the user knowledge.
In general, keep in mind the following points when accepting input from the user:
* Only use POST for state changing requests.
* Use non-XHR GET for navigational purposes only (e.g., Search).
* non-XHR GET requests are not going to receive accurate origin/headers and backends won't be able to protect against XSRF with the above mechanism.
* In general, use XHR/non-XHR GET requests for navigational or information retrieval only.
* non-XHR POST requests are not allowed in AMP documents. This is due to inconsistencies of setting `Origin` header on these requests across browsers. And the complications supporting it would introduce in protecting against XSRF. This might be reconsidered and introduced later, please file an issue if you think this is needed.
| {
"content_hash": "b3ab5986733d96cb80353ff5f91dde92",
"timestamp": "",
"source": "github",
"line_count": 630,
"max_line_length": 521,
"avg_line_length": 47.0984126984127,
"alnum_prop": 0.7149164195200862,
"repo_name": "cvializ/amphtml",
"id": "66e5b8b71dcc4f0fd386374d70ed474170feec9d",
"size": "29672",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "extensions/amp-form/amp-form.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "409391"
},
{
"name": "Go",
"bytes": "7457"
},
{
"name": "HTML",
"bytes": "1852625"
},
{
"name": "Java",
"bytes": "37614"
},
{
"name": "JavaScript",
"bytes": "14769458"
},
{
"name": "Python",
"bytes": "72953"
},
{
"name": "Shell",
"bytes": "16742"
},
{
"name": "Yacc",
"bytes": "22883"
}
],
"symlink_target": ""
} |
layout: post
date: '2015-07-11'
title: "Justin Alexander Signature 9688 Wedding Dress"
category: Justin Alexander
tags: [Justin Alexander]
---
### Justin Alexander Signature 9688 Wedding Dress
Just **$305.99**
###
Not Available In Store. Currently available for order only. .
Strapless silk dupion with 3 inch bias band and bow at natural waist, full box pleated skirt, with pockets, buttons over the back zipper and chapel length train.
Available in sizes 4-32.
<a href="https://www.eudances.com/en/justin-alexander/611-justin-alexander-signature-9688-wedding-dress.html"><img src="//www.eudances.com/1737-thickbox_default/justin-alexander-signature-9688-wedding-dress.jpg" alt="Justin Alexander Signature 9688 Wedding Dress" style="width:100%;" /></a>
<!-- break --><a href="https://www.eudances.com/en/justin-alexander/611-justin-alexander-signature-9688-wedding-dress.html"><img src="//www.eudances.com/1738-thickbox_default/justin-alexander-signature-9688-wedding-dress.jpg" alt="Justin Alexander Signature 9688 Wedding Dress" style="width:100%;" /></a>
<a href="https://www.eudances.com/en/justin-alexander/611-justin-alexander-signature-9688-wedding-dress.html"><img src="//www.eudances.com/1736-thickbox_default/justin-alexander-signature-9688-wedding-dress.jpg" alt="Justin Alexander Signature 9688 Wedding Dress" style="width:100%;" /></a>
Buy it: [https://www.eudances.com/en/justin-alexander/611-justin-alexander-signature-9688-wedding-dress.html](https://www.eudances.com/en/justin-alexander/611-justin-alexander-signature-9688-wedding-dress.html)
| {
"content_hash": "90f2ea9a0761f9b517ae67a6b9b00aae",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 304,
"avg_line_length": 78.3,
"alnum_prop": 0.7745849297573435,
"repo_name": "lastgown/lastgown.github.io",
"id": "e1701afb596234dc7b2000c017b8f19d05c4877f",
"size": "1570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2015-07-11-Justin-Alexander-Signature-9688-Wedding-Dress.md",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83876"
},
{
"name": "HTML",
"bytes": "14755"
},
{
"name": "Ruby",
"bytes": "897"
}
],
"symlink_target": ""
} |
/////////////////////////////////////////////////////////////////////////////
// Name: wx/graphics.h
// Purpose: graphics context header
// Author: Stefan Csomor
// Modified by:
// Created:
// Copyright: (c) Stefan Csomor
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
#ifndef _WX_GRAPHICS_H_
# define _WX_GRAPHICS_H_
# include "wx/defs.h"
# if wxUSE_GRAPHICS_CONTEXT
# include "wx/affinematrix2d.h"
# include "wx/geometry.h"
# include "wx/colour.h"
# include "wx/dynarray.h"
# include "wx/font.h"
# include "wx/image.h"
# include "wx/peninfobase.h"
# include "wx/vector.h"
enum wxAntialiasMode {
wxANTIALIAS_NONE, // should be 0
wxANTIALIAS_DEFAULT
};
enum wxInterpolationQuality {
// default interpolation
wxINTERPOLATION_DEFAULT,
// no interpolation
wxINTERPOLATION_NONE,
// fast interpolation, suited for interactivity
wxINTERPOLATION_FAST,
// better quality
wxINTERPOLATION_GOOD,
// best quality, not suited for interactivity
wxINTERPOLATION_BEST
};
enum wxCompositionMode {
// R = Result, S = Source, D = Destination, premultiplied with alpha
// Ra, Sa, Da their alpha components
// classic Porter-Duff compositions
// http://keithp.com/~keithp/porterduff/p253-porter.pdf
wxCOMPOSITION_INVALID = -1, /* indicates invalid/unsupported mode */
wxCOMPOSITION_CLEAR, /* R = 0 */
wxCOMPOSITION_SOURCE, /* R = S */
wxCOMPOSITION_OVER, /* R = S + D*(1 - Sa) */
wxCOMPOSITION_IN, /* R = S*Da */
wxCOMPOSITION_OUT, /* R = S*(1 - Da) */
wxCOMPOSITION_ATOP, /* R = S*Da + D*(1 - Sa) */
wxCOMPOSITION_DEST, /* R = D, essentially a noop */
wxCOMPOSITION_DEST_OVER, /* R = S*(1 - Da) + D */
wxCOMPOSITION_DEST_IN, /* R = D*Sa */
wxCOMPOSITION_DEST_OUT, /* R = D*(1 - Sa) */
wxCOMPOSITION_DEST_ATOP, /* R = S*(1 - Da) + D*Sa */
wxCOMPOSITION_XOR, /* R = S*(1 - Da) + D*(1 - Sa) */
// mathematical compositions
wxCOMPOSITION_ADD /* R = S + D */
};
enum wxGradientType {
wxGRADIENT_NONE,
wxGRADIENT_LINEAR,
wxGRADIENT_RADIAL
};
class WXDLLIMPEXP_FWD_CORE wxDC;
class WXDLLIMPEXP_FWD_CORE wxWindowDC;
class WXDLLIMPEXP_FWD_CORE wxMemoryDC;
# if wxUSE_PRINTING_ARCHITECTURE
class WXDLLIMPEXP_FWD_CORE wxPrinterDC;
# endif
# ifdef __WXMSW__
# if wxUSE_ENH_METAFILE
class WXDLLIMPEXP_FWD_CORE wxEnhMetaFileDC;
# endif
# endif
class WXDLLIMPEXP_FWD_CORE wxGraphicsContext;
class WXDLLIMPEXP_FWD_CORE wxGraphicsPath;
class WXDLLIMPEXP_FWD_CORE wxGraphicsMatrix;
class WXDLLIMPEXP_FWD_CORE wxGraphicsFigure;
class WXDLLIMPEXP_FWD_CORE wxGraphicsRenderer;
class WXDLLIMPEXP_FWD_CORE wxGraphicsPen;
class WXDLLIMPEXP_FWD_CORE wxGraphicsBrush;
class WXDLLIMPEXP_FWD_CORE wxGraphicsFont;
class WXDLLIMPEXP_FWD_CORE wxGraphicsBitmap;
/*
* notes about the graphics context apis
*
* angles : are measured in radians, 0.0 being in direction of positive x axis, PI/2 being
* in direction of positive y axis.
*/
// Base class of all objects used for drawing in the new graphics API, the always point back to their
// originating rendering engine, there is no dynamic unloading of a renderer currently allowed,
// these references are not counted
//
// The data used by objects like graphics pens etc is ref counted, in order to avoid unnecessary expensive
// duplication. Any operation on a shared instance that results in a modified state, uncouples this
// instance from the other instances that were shared - using copy on write semantics
//
class WXDLLIMPEXP_FWD_CORE wxGraphicsObjectRefData;
class WXDLLIMPEXP_FWD_CORE wxGraphicsBitmapData;
class WXDLLIMPEXP_FWD_CORE wxGraphicsMatrixData;
class WXDLLIMPEXP_FWD_CORE wxGraphicsPathData;
class WXDLLIMPEXP_CORE wxGraphicsObject : public wxObject
{
public:
wxGraphicsObject();
wxGraphicsObject(wxGraphicsRenderer* renderer);
virtual ~wxGraphicsObject();
bool IsNull() const;
// returns the renderer that was used to create this instance, or NULL if it has not been initialized yet
wxGraphicsRenderer* GetRenderer() const;
wxGraphicsObjectRefData* GetGraphicsData() const;
protected:
wxObjectRefData* CreateRefData() const override;
wxObjectRefData* CloneRefData(const wxObjectRefData* data) const override;
wxDECLARE_DYNAMIC_CLASS(wxGraphicsObject);
};
class WXDLLIMPEXP_CORE wxGraphicsPen : public wxGraphicsObject
{
public:
wxGraphicsPen()
{
}
virtual ~wxGraphicsPen()
{
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsPen);
};
WXDLLIMPEXP_CORE extern wxGraphicsPen wxNullGraphicsPen;
class WXDLLIMPEXP_CORE wxGraphicsBrush : public wxGraphicsObject
{
public:
wxGraphicsBrush()
{
}
virtual ~wxGraphicsBrush()
{
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsBrush);
};
WXDLLIMPEXP_CORE extern wxGraphicsBrush wxNullGraphicsBrush;
class WXDLLIMPEXP_CORE wxGraphicsFont : public wxGraphicsObject
{
public:
wxGraphicsFont()
{
}
virtual ~wxGraphicsFont()
{
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsFont);
};
WXDLLIMPEXP_CORE extern wxGraphicsFont wxNullGraphicsFont;
class WXDLLIMPEXP_CORE wxGraphicsBitmap : public wxGraphicsObject
{
public:
wxGraphicsBitmap()
{
}
virtual ~wxGraphicsBitmap()
{
}
// Convert bitmap to wxImage: this is more efficient than converting to
// wxBitmap first and then to wxImage and also works without X server
// connection under Unix that wxBitmap requires.
# if wxUSE_IMAGE
wxImage ConvertToImage() const;
# endif
void* GetNativeBitmap() const;
const wxGraphicsBitmapData* GetBitmapData() const
{
return (const wxGraphicsBitmapData*) GetRefData();
}
wxGraphicsBitmapData* GetBitmapData()
{
return (wxGraphicsBitmapData*) GetRefData();
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsBitmap);
};
WXDLLIMPEXP_CORE extern wxGraphicsBitmap wxNullGraphicsBitmap;
class WXDLLIMPEXP_CORE wxGraphicsMatrix : public wxGraphicsObject
{
public:
wxGraphicsMatrix()
{
}
virtual ~wxGraphicsMatrix()
{
}
// concatenates the matrix
virtual void Concat(const wxGraphicsMatrix* t);
void Concat(const wxGraphicsMatrix& t)
{
Concat ( &t );
}
// sets the matrix to the respective values
virtual void Set(wxDouble a = 1.0, wxDouble b = 0.0, wxDouble c = 0.0, wxDouble d = 1.0, wxDouble tx = 0.0, wxDouble ty = 0.0);
// gets the component values of the matrix
virtual void Get(wxDouble* a = NULL, wxDouble* b = NULL, wxDouble* c = NULL, wxDouble* d = NULL, wxDouble* tx = NULL, wxDouble* ty = NULL) const;
// makes this the inverse matrix
virtual void Invert();
// returns true if the elements of the transformation matrix are equal ?
virtual bool IsEqual(const wxGraphicsMatrix* t) const;
bool IsEqual(const wxGraphicsMatrix& t) const
{
return IsEqual(&t);
}
// return true if this is the identity matrix
virtual bool IsIdentity() const;
//
// transformation
//
// add the translation to this matrix
virtual void Translate(wxDouble dx, wxDouble dy);
// add the scale to this matrix
virtual void Scale(wxDouble xScale, wxDouble yScale);
// add the rotation to this matrix (radians)
virtual void Rotate(wxDouble angle);
//
// apply the transforms
//
// applies that matrix to the point
virtual void TransformPoint(wxDouble* x, wxDouble* y) const;
// applies the matrix except for translations
virtual void TransformDistance(wxDouble* dx, wxDouble* dy) const;
// returns the native representation
virtual void* GetNativeMatrix() const;
const wxGraphicsMatrixData* GetMatrixData() const
{
return (const wxGraphicsMatrixData*) GetRefData();
}
wxGraphicsMatrixData* GetMatrixData()
{
return (wxGraphicsMatrixData*) GetRefData();
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsMatrix);
};
WXDLLIMPEXP_CORE extern wxGraphicsMatrix wxNullGraphicsMatrix;
// ----------------------------------------------------------------------------
// wxGradientStop and wxGradientStops: Specify what intermediate colors are used
// and how they are spread out in a gradient
// ----------------------------------------------------------------------------
// gcc 9 gives a nonsensical warning about implicitly generated move ctor not
// throwing but not being noexcept, suppress it.
# if wxCHECK_GCC_VERSION(9, 1) && !wxCHECK_GCC_VERSION(10, 0)
# endif
// Describes a single gradient stop.
class wxGraphicsGradientStop
{
public:
wxGraphicsGradientStop(wxColour col = wxTransparentColour, float pos = 0.0f)
: m_col(col)
, m_pos(pos)
{
}
// default copy ctor, assignment operator and dtor are ok
const wxColour& GetColour() const
{
return m_col;
}
void SetColour(const wxColour& col)
{
m_col = col;
}
float GetPosition() const
{
return m_pos;
}
void SetPosition(float pos)
{
wxASSERT_MSG( pos >= 0 && pos <= 1, "invalid gradient stop position" );
m_pos = pos;
}
private:
// The colour of this gradient band.
wxColour m_col;
// Its starting position: 0 is the beginning and 1 is the end.
float m_pos;
};
# if wxCHECK_GCC_VERSION(9, 1) && !wxCHECK_GCC_VERSION(10, 0)
# endif
// A collection of gradient stops ordered by their positions (from lowest to
// highest). The first stop (index 0, position 0.0) is always the starting
// colour and the last one (index GetCount() - 1, position 1.0) is the end
// colour.
class WXDLLIMPEXP_CORE wxGraphicsGradientStops
{
public:
wxGraphicsGradientStops(wxColour startCol = wxTransparentColour, wxColour endCol = wxTransparentColour)
{
// we can't use Add() here as it relies on having start/end stops as
// first/last array elements so do it manually
m_stops.push_back(wxGraphicsGradientStop(startCol, 0.f));
m_stops.push_back(wxGraphicsGradientStop(endCol, 1.f));
}
// default copy ctor, assignment operator and dtor are ok for this class
// Add a stop in correct order.
void Add(const wxGraphicsGradientStop& stop);
void Add(wxColour col, float pos)
{
Add(wxGraphicsGradientStop(col, pos));
}
// Get the number of stops.
size_t GetCount() const
{
return m_stops.size();
}
// Return the stop at the given index (which must be valid).
wxGraphicsGradientStop Item(unsigned n) const
{
return m_stops.at(n);
}
// Get/set start and end colours.
void SetStartColour(wxColour col)
{
m_stops[0].SetColour(col);
}
wxColour GetStartColour() const
{
return m_stops[0].GetColour();
}
void SetEndColour(wxColour col)
{
m_stops[m_stops.size() - 1].SetColour(col);
}
wxColour GetEndColour() const
{
return m_stops[m_stops.size() - 1].GetColour();
}
private:
// All the stops stored in ascending order of positions.
wxVector<wxGraphicsGradientStop> m_stops;
};
// ----------------------------------------------------------------------------
// wxGraphicsPenInfo describes a wxGraphicsPen
// ----------------------------------------------------------------------------
class wxGraphicsPenInfo : public wxPenInfoBase<wxGraphicsPenInfo>
{
public:
explicit wxGraphicsPenInfo(const wxColour& colour = wxColour(), wxDouble width = 1.0, wxPenStyle style = wxPENSTYLE_SOLID)
: wxPenInfoBase<wxGraphicsPenInfo>(colour, style)
{
m_width = width;
m_gradientType = wxGRADIENT_NONE;
}
// Setters
wxGraphicsPenInfo& Width(wxDouble width)
{
m_width = width;
return *this;
}
wxGraphicsPenInfo& LinearGradient(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, const wxColour& c1, const wxColour& c2, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix)
{
m_gradientType = wxGRADIENT_LINEAR;
m_x1 = x1;
m_y1 = y1;
m_x2 = x2;
m_y2 = y2;
m_stops.SetStartColour(c1);
m_stops.SetEndColour(c2);
m_matrix = matrix;
return *this;
}
wxGraphicsPenInfo& LinearGradient(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix)
{
m_gradientType = wxGRADIENT_LINEAR;
m_x1 = x1;
m_y1 = y1;
m_x2 = x2;
m_y2 = y2;
m_stops = stops;
m_matrix = matrix;
return *this;
}
wxGraphicsPenInfo& RadialGradient(wxDouble startX, wxDouble startY, wxDouble endX, wxDouble endY, wxDouble radius, const wxColour& oColor, const wxColour& cColor, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix)
{
m_gradientType = wxGRADIENT_RADIAL;
m_x1 = startX;
m_y1 = startY;
m_x2 = endX;
m_y2 = endY;
m_radius = radius;
m_stops.SetStartColour(oColor);
m_stops.SetEndColour(cColor);
m_matrix = matrix;
return *this;
}
wxGraphicsPenInfo& RadialGradient(wxDouble startX, wxDouble startY, wxDouble endX, wxDouble endY, wxDouble radius, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix)
{
m_gradientType = wxGRADIENT_RADIAL;
m_x1 = startX;
m_y1 = startY;
m_x2 = endX;
m_y2 = endY;
m_radius = radius;
m_stops = stops;
m_matrix = matrix;
return *this;
}
// Accessors
wxDouble GetWidth() const
{
return m_width;
}
wxGradientType GetGradientType() const
{
return m_gradientType;
}
wxDouble GetX1() const
{
return m_x1;
}
wxDouble GetY1() const
{
return m_y1;
}
wxDouble GetX2() const
{
return m_x2;
}
wxDouble GetY2() const
{
return m_y2;
}
wxDouble GetStartX() const
{
return m_x1;
}
wxDouble GetStartY() const
{
return m_y1;
}
wxDouble GetEndX() const
{
return m_x2;
}
wxDouble GetEndY() const
{
return m_y2;
}
wxDouble GetRadius() const
{
return m_radius;
}
const wxGraphicsGradientStops& GetStops() const
{
return m_stops;
}
const wxGraphicsMatrix& GetMatrix() const
{
return m_matrix;
}
private:
wxDouble m_width;
wxGradientType m_gradientType;
wxDouble m_x1, m_y1, m_x2, m_y2;
wxDouble m_radius;
wxGraphicsGradientStops m_stops;
wxGraphicsMatrix m_matrix;
};
class WXDLLIMPEXP_CORE wxGraphicsPath : public wxGraphicsObject
{
public:
wxGraphicsPath()
{
}
virtual ~wxGraphicsPath()
{
}
//
// These are the path primitives from which everything else can be constructed
//
// begins a new subpath at (x,y)
virtual void MoveToPoint(wxDouble x, wxDouble y);
void MoveToPoint(const wxPoint2DDouble& p);
// adds a straight line from the current point to (x,y)
virtual void AddLineToPoint(wxDouble x, wxDouble y);
void AddLineToPoint(const wxPoint2DDouble& p);
// adds a cubic Bezier curve from the current point, using two control points and an end point
virtual void AddCurveToPoint(wxDouble cx1, wxDouble cy1, wxDouble cx2, wxDouble cy2, wxDouble x, wxDouble y);
void AddCurveToPoint(const wxPoint2DDouble& c1, const wxPoint2DDouble& c2, const wxPoint2DDouble& e);
// adds another path
virtual void AddPath(const wxGraphicsPath& path);
// closes the current sub-path
virtual void CloseSubpath();
// gets the last point of the current path, (0,0) if not yet set
virtual void GetCurrentPoint(wxDouble* x, wxDouble* y) const;
wxPoint2DDouble GetCurrentPoint() const;
// adds an arc of a circle centering at (x,y) with radius (r) from startAngle to endAngle
virtual void AddArc(wxDouble x, wxDouble y, wxDouble r, wxDouble startAngle, wxDouble endAngle, bool clockwise);
void AddArc(const wxPoint2DDouble& c, wxDouble r, wxDouble startAngle, wxDouble endAngle, bool clockwise);
//
// These are convenience functions which - if not available natively will be assembled
// using the primitives from above
//
// adds a quadratic Bezier curve from the current point, using a control point and an end point
virtual void AddQuadCurveToPoint(wxDouble cx, wxDouble cy, wxDouble x, wxDouble y);
// appends a rectangle as a new closed subpath
virtual void AddRectangle(wxDouble x, wxDouble y, wxDouble w, wxDouble h);
// appends an ellipsis as a new closed subpath fitting the passed rectangle
virtual void AddCircle(wxDouble x, wxDouble y, wxDouble r);
// appends a an arc to two tangents connecting (current) to (x1,y1) and (x1,y1) to (x2,y2), also a straight line from (current) to (x1,y1)
virtual void AddArcToPoint(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, wxDouble r);
// appends an ellipse
virtual void AddEllipse(wxDouble x, wxDouble y, wxDouble w, wxDouble h);
// appends a rounded rectangle
virtual void AddRoundedRectangle(wxDouble x, wxDouble y, wxDouble w, wxDouble h, wxDouble radius);
// returns the native path
virtual void* GetNativePath() const;
// give the native path returned by GetNativePath() back (there might be some deallocations necessary)
virtual void UnGetNativePath(void* p) const;
// transforms each point of this path by the matrix
virtual void Transform(const wxGraphicsMatrix& matrix);
// gets the bounding box enclosing all points (possibly including control points)
virtual void GetBox(wxDouble* x, wxDouble* y, wxDouble* w, wxDouble* h) const;
wxRect2DDouble GetBox() const;
virtual bool Contains(wxDouble x, wxDouble y, wxPolygonFillMode fillStyle = wxODDEVEN_RULE) const;
bool Contains(const wxPoint2DDouble& c, wxPolygonFillMode fillStyle = wxODDEVEN_RULE) const;
const wxGraphicsPathData* GetPathData() const
{
return (const wxGraphicsPathData*) GetRefData();
}
wxGraphicsPathData* GetPathData()
{
return (wxGraphicsPathData*) GetRefData();
}
private:
wxDECLARE_DYNAMIC_CLASS(wxGraphicsPath);
};
WXDLLIMPEXP_CORE extern wxGraphicsPath wxNullGraphicsPath;
class WXDLLIMPEXP_CORE wxGraphicsContext : public wxGraphicsObject
{
public:
wxGraphicsContext(wxGraphicsRenderer* renderer, wxWindow* window = NULL);
virtual ~wxGraphicsContext();
static wxGraphicsContext* Create(const wxWindowDC& dc);
static wxGraphicsContext* Create(const wxMemoryDC& dc);
# if wxUSE_PRINTING_ARCHITECTURE
static wxGraphicsContext* Create(const wxPrinterDC& dc);
# endif
# ifdef __WXMSW__
# if wxUSE_ENH_METAFILE
static wxGraphicsContext* Create(const wxEnhMetaFileDC& dc);
# endif
# endif
// Create a context from a DC of unknown type, if supported, returns NULL otherwise
static wxGraphicsContext* CreateFromUnknownDC(const wxDC& dc);
static wxGraphicsContext* CreateFromNative(void* context);
static wxGraphicsContext* CreateFromNativeWindow(void* window);
# ifdef __WXMSW__
static wxGraphicsContext* CreateFromNativeHDC(WXHDC dc);
# endif
static wxGraphicsContext* Create(wxWindow* window);
# if wxUSE_IMAGE
// Create a context for drawing onto a wxImage. The image life time must be
// greater than that of the context itself as when the context is destroyed
// it will copy its contents to the specified image.
static wxGraphicsContext* Create(wxImage& image);
# endif
// create a context that can be used for measuring texts only, no drawing allowed
static wxGraphicsContext* Create();
// Return the window this context is associated with, if any.
wxWindow* GetWindow() const
{
return m_window;
}
// begin a new document (relevant only for printing / pdf etc) if there is a progress dialog, message will be shown
virtual bool StartDoc(const wxString& message);
// done with that document (relevant only for printing / pdf etc)
virtual void EndDoc();
// opens a new page (relevant only for printing / pdf etc) with the given size in points
// (if both are null the default page size will be used)
virtual void StartPage(wxDouble width = 0, wxDouble height = 0);
// ends the current page (relevant only for printing / pdf etc)
virtual void EndPage();
// make sure that the current content of this context is immediately visible
virtual void Flush();
wxGraphicsPath CreatePath() const;
wxGraphicsPen CreatePen(const wxPen& pen) const;
wxGraphicsPen CreatePen(const wxGraphicsPenInfo& info) const
{
return DoCreatePen(info);
}
virtual wxGraphicsBrush CreateBrush(const wxBrush& brush) const;
// sets the brush to a linear gradient, starting at (x1,y1) and ending at
// (x2,y2) with the given boundary colours or the specified stops
wxGraphicsBrush CreateLinearGradientBrush(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, const wxColour& c1, const wxColour& c2, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) const;
wxGraphicsBrush CreateLinearGradientBrush(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) const;
// sets the brush to a radial gradient originating at (xo,yc) and ending
// on a circle around (xc,yc) with the given radius; the colours may be
// specified by just the two extremes or the full array of gradient stops
wxGraphicsBrush CreateRadialGradientBrush(wxDouble startX, wxDouble startY, wxDouble endX, wxDouble endY, wxDouble radius, const wxColour& oColor, const wxColour& cColor, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) const;
wxGraphicsBrush CreateRadialGradientBrush(wxDouble startX, wxDouble startY, wxDouble endX, wxDouble endY, wxDouble radius, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) const;
// creates a font
virtual wxGraphicsFont CreateFont(const wxFont& font, const wxColour& col = *wxBLACK) const;
virtual wxGraphicsFont CreateFont(double sizeInPixels, const wxString& facename, int flags = wxFONTFLAG_DEFAULT, const wxColour& col = *wxBLACK) const;
// create a native bitmap representation
virtual wxGraphicsBitmap CreateBitmap(const wxBitmap& bitmap) const;
# if wxUSE_IMAGE
wxGraphicsBitmap CreateBitmapFromImage(const wxImage& image) const;
# endif
// create a native bitmap representation
virtual wxGraphicsBitmap CreateSubBitmap(const wxGraphicsBitmap& bitmap, wxDouble x, wxDouble y, wxDouble w, wxDouble h) const;
// create a 'native' matrix corresponding to these values
virtual wxGraphicsMatrix CreateMatrix(wxDouble a = 1.0, wxDouble b = 0.0, wxDouble c = 0.0, wxDouble d = 1.0, wxDouble tx = 0.0, wxDouble ty = 0.0) const;
wxGraphicsMatrix CreateMatrix(const wxAffineMatrix2DBase& mat) const
{
wxMatrix2D mat2D;
wxPoint2DDouble tr;
mat.Get(&mat2D, &tr);
return CreateMatrix(mat2D.m_11, mat2D.m_12, mat2D.m_21, mat2D.m_22, tr.m_x, tr.m_y);
}
// push the current state of the context, ie the transformation matrix on a stack
virtual void PushState() = 0;
// pops a stored state from the stack
virtual void PopState() = 0;
// clips drawings to the region intersected with the current clipping region
virtual void Clip(const wxRegion& region) = 0;
// clips drawings to the rect intersected with the current clipping region
virtual void Clip(wxDouble x, wxDouble y, wxDouble w, wxDouble h) = 0;
// resets the clipping to original extent
virtual void ResetClip() = 0;
// returns bounding box of the clipping region
virtual void GetClipBox(wxDouble* x, wxDouble* y, wxDouble* w, wxDouble* h) = 0;
// returns the native context
virtual void* GetNativeContext() = 0;
// returns the current shape antialiasing mode
virtual wxAntialiasMode GetAntialiasMode() const
{
return m_antialias;
}
// sets the antialiasing mode, returns true if it supported
virtual bool SetAntialiasMode(wxAntialiasMode antialias) = 0;
// returns the current interpolation quality
virtual wxInterpolationQuality GetInterpolationQuality() const
{
return m_interpolation;
}
// sets the interpolation quality, returns true if it supported
virtual bool SetInterpolationQuality(wxInterpolationQuality interpolation) = 0;
// returns the current compositing operator
virtual wxCompositionMode GetCompositionMode() const
{
return m_composition;
}
// sets the compositing operator, returns true if it supported
virtual bool SetCompositionMode(wxCompositionMode op) = 0;
// returns the size of the graphics context in device coordinates
void GetSize(wxDouble* width, wxDouble* height) const
{
if (width)
{
*width = m_width;
}
if (height)
{
*height = m_height;
}
}
// returns the resolution of the graphics context in device points per inch
virtual void GetDPI(wxDouble* dpiX, wxDouble* dpiY) const;
// sets the current alpha on this context
virtual void SetAlpha( wxDouble alpha );
// returns the alpha on this context
virtual wxDouble GetAlpha() const;
#endif
// all rendering is done into a fully transparent temporary context
virtual void BeginLayer(wxDouble opacity) = 0;
// composites back the drawings into the context with the opacity given at
// the BeginLayer call
virtual void EndLayer() = 0;
//
// transformation : changes the current transformation matrix CTM of the context
//
// translate
virtual void Translate(wxDouble dx, wxDouble dy) = 0;
// scale
virtual void Scale(wxDouble xScale, wxDouble yScale) = 0;
// rotate (radians)
virtual void Rotate(wxDouble angle) = 0;
// concatenates this transform with the current transform of this context
virtual void ConcatTransform(const wxGraphicsMatrix& matrix) = 0;
// sets the transform of this context
virtual void SetTransform(const wxGraphicsMatrix& matrix) = 0;
// gets the matrix of this context
virtual wxGraphicsMatrix GetTransform() const = 0;
//
// setting the paint
//
// sets the pen
virtual void SetPen(const wxGraphicsPen& pen);
void SetPen(const wxPen& pen);
// sets the brush for filling
virtual void SetBrush(const wxGraphicsBrush& brush);
void SetBrush(const wxBrush& brush);
// sets the font
virtual void SetFont(const wxGraphicsFont& font);
void SetFont(const wxFont& font, const wxColour& colour);
// strokes along a path with the current pen
virtual void StrokePath(const wxGraphicsPath& path) = 0;
// fills a path with the current brush
virtual void FillPath(const wxGraphicsPath& path, wxPolygonFillMode fillStyle = wxODDEVEN_RULE) = 0;
// draws a path by first filling and then stroking
virtual void DrawPath(const wxGraphicsPath& path, wxPolygonFillMode fillStyle = wxODDEVEN_RULE);
// paints a transparent rectangle (only useful for bitmaps or windows)
virtual void ClearRectangle(wxDouble x, wxDouble y, wxDouble w, wxDouble h);
//
// text
//
void DrawText(const wxString& str, wxDouble x, wxDouble y)
{
DoDrawText(str, x, y);
}
void DrawText(const wxString& str, wxDouble x, wxDouble y, wxDouble angle)
{
DoDrawRotatedText(str, x, y, angle);
}
void DrawText(const wxString& str, wxDouble x, wxDouble y, const wxGraphicsBrush& backgroundBrush)
{
DoDrawFilledText(str, x, y, backgroundBrush);
}
void DrawText(const wxString& str, wxDouble x, wxDouble y, wxDouble angle, const wxGraphicsBrush& backgroundBrush)
{
DoDrawRotatedFilledText(str, x, y, angle, backgroundBrush);
}
virtual void GetTextExtent(const wxString& text, wxDouble* width, wxDouble* height, wxDouble* descent = NULL, wxDouble* externalLeading = NULL) const = 0;
virtual void GetPartialTextExtents(const wxString& text, wxArrayDouble& widths) const = 0;
//
// image support
//
virtual void DrawBitmap(const wxGraphicsBitmap& bmp, wxDouble x, wxDouble y, wxDouble w, wxDouble h) = 0;
virtual void DrawBitmap(const wxBitmap& bmp, wxDouble x, wxDouble y, wxDouble w, wxDouble h) = 0;
virtual void DrawIcon(const wxIcon& icon, wxDouble x, wxDouble y, wxDouble w, wxDouble h) = 0;
//
// convenience methods
//
// strokes a single line
virtual void StrokeLine(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2);
// stroke lines connecting each of the points
virtual void StrokeLines(size_t n, const wxPoint2DDouble* points);
// stroke disconnected lines from begin to end points
virtual void StrokeLines(size_t n, const wxPoint2DDouble* beginPoints, const wxPoint2DDouble* endPoints);
// draws a polygon
virtual void DrawLines(size_t n, const wxPoint2DDouble* points, wxPolygonFillMode fillStyle = wxODDEVEN_RULE);
// draws a rectangle
virtual void DrawRectangle(wxDouble x, wxDouble y, wxDouble w, wxDouble h);
// draws an ellipse
virtual void DrawEllipse(wxDouble x, wxDouble y, wxDouble w, wxDouble h);
// draws a rounded rectangle
virtual void DrawRoundedRectangle(wxDouble x, wxDouble y, wxDouble w, wxDouble h, wxDouble radius);
// wrappers using wxPoint2DDouble TODO
// helper to determine if a 0.5 offset should be applied for the drawing operation
virtual bool ShouldOffset() const
{
return false;
}
// indicates whether the context should try to offset for pixel boundaries, this only makes sense on
// bitmap devices like screen, by default this is turned off
virtual void EnableOffset(bool enable = true);
void DisableOffset()
{
EnableOffset(false);
}
bool OffsetEnabled()
{
return m_enableOffset;
}
protected:
// These fields must be initialized in the derived class ctors.
wxDouble m_width, m_height;
wxGraphicsPen m_pen;
wxGraphicsBrush m_brush;
wxGraphicsFont m_font;
wxAntialiasMode m_antialias;
wxCompositionMode m_composition;
wxInterpolationQuality m_interpolation;
bool m_enableOffset;
// implementations of overloaded public functions: we use different names
// for them to avoid the virtual function hiding problems in the derived
// classes
virtual wxGraphicsPen DoCreatePen(const wxGraphicsPenInfo& info) const;
virtual void DoDrawText(const wxString& str, wxDouble x, wxDouble y) = 0;
virtual void DoDrawRotatedText(const wxString& str, wxDouble x, wxDouble y, wxDouble angle);
virtual void DoDrawFilledText(const wxString& str, wxDouble x, wxDouble y, const wxGraphicsBrush& backgroundBrush);
virtual void DoDrawRotatedFilledText(const wxString& str, wxDouble x, wxDouble y, wxDouble angle, const wxGraphicsBrush& backgroundBrush);
private:
// The associated window, if any, i.e. if one was passed directly to
// Create() or the associated window of the wxDC this context was created
// from.
wxWindow* const m_window;
wxDECLARE_NO_COPY_CLASS(wxGraphicsContext);
wxDECLARE_ABSTRACT_CLASS(wxGraphicsContext);
};
//
// A graphics figure allows to cache path, pen etc creations, also will be a basis for layering/grouping elements
//
class WXDLLIMPEXP_CORE wxGraphicsFigure : public wxGraphicsObject
{
public:
wxGraphicsFigure(wxGraphicsRenderer* renderer);
virtual ~wxGraphicsFigure();
void SetPath( wxGraphicsMatrix* matrix );
void SetMatrix( wxGraphicsPath* path);
// draws this object on the context
virtual void Draw( wxGraphicsContext* cg );
// returns the path of this object
wxGraphicsPath* GetPath() { return m_path; }
// returns the transformation matrix of this object, may be null if there is no transformation necessary
wxGraphicsMatrix* GetMatrix() { return m_matrix; }
private:
wxGraphicsMatrix* m_matrix;
wxGraphicsPath* m_path;
wxDECLARE_DYNAMIC_CLASS(wxGraphicsFigure);
};
#endif
//
// The graphics renderer is the instance corresponding to the rendering engine used, eg there is ONE core graphics renderer
// instance on OSX. This instance is pointed back to by all objects created by it. Therefore you can create eg additional
// paths at any point from a given matrix etc.
//
class WXDLLIMPEXP_CORE wxGraphicsRenderer : public wxObject
{
public:
wxGraphicsRenderer()
{
}
virtual ~wxGraphicsRenderer()
{
}
static wxGraphicsRenderer* GetDefaultRenderer();
static wxGraphicsRenderer* GetCairoRenderer();
# ifdef __WXMSW__
# if wxUSE_GRAPHICS_GDIPLUS
static wxGraphicsRenderer* GetGDIPlusRenderer();
# endif
# if wxUSE_GRAPHICS_DIRECT2D
static wxGraphicsRenderer* GetDirect2DRenderer();
# endif
# endif
// Context
virtual wxGraphicsContext* CreateContext(const wxWindowDC& dc) = 0;
virtual wxGraphicsContext* CreateContext(const wxMemoryDC& dc) = 0;
# if wxUSE_PRINTING_ARCHITECTURE
virtual wxGraphicsContext* CreateContext(const wxPrinterDC& dc) = 0;
# endif
# ifdef __WXMSW__
# if wxUSE_ENH_METAFILE
virtual wxGraphicsContext* CreateContext(const wxEnhMetaFileDC& dc) = 0;
# endif
# endif
wxGraphicsContext* CreateContextFromUnknownDC(const wxDC& dc);
virtual wxGraphicsContext* CreateContextFromNativeContext(void* context) = 0;
virtual wxGraphicsContext* CreateContextFromNativeWindow(void* window) = 0;
# ifdef __WXMSW__
virtual wxGraphicsContext* CreateContextFromNativeHDC(WXHDC dc) = 0;
# endif
virtual wxGraphicsContext* CreateContext(wxWindow* window) = 0;
# if wxUSE_IMAGE
virtual wxGraphicsContext* CreateContextFromImage(wxImage& image) = 0;
# endif
// create a context that can be used for measuring texts only, no drawing allowed
virtual wxGraphicsContext* CreateMeasuringContext() = 0;
// Path
virtual wxGraphicsPath CreatePath() = 0;
// Matrix
virtual wxGraphicsMatrix CreateMatrix(wxDouble a = 1.0, wxDouble b = 0.0, wxDouble c = 0.0, wxDouble d = 1.0, wxDouble tx = 0.0, wxDouble ty = 0.0) = 0;
// Paints
virtual wxGraphicsPen CreatePen(const wxGraphicsPenInfo& info) = 0;
virtual wxGraphicsBrush CreateBrush(const wxBrush& brush) = 0;
// Gradient brush creation functions may not honour all the stops specified
// stops and use just its boundary colours (this is currently the case
// under OS X)
virtual wxGraphicsBrush CreateLinearGradientBrush(wxDouble x1, wxDouble y1, wxDouble x2, wxDouble y2, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) = 0;
virtual wxGraphicsBrush CreateRadialGradientBrush(wxDouble startX, wxDouble startY, wxDouble endX, wxDouble endY, wxDouble radius, const wxGraphicsGradientStops& stops, const wxGraphicsMatrix& matrix = wxNullGraphicsMatrix) = 0;
// sets the font
virtual wxGraphicsFont CreateFont(const wxFont& font, const wxColour& col = *wxBLACK) = 0;
virtual wxGraphicsFont CreateFont(double sizeInPixels, const wxString& facename, int flags = wxFONTFLAG_DEFAULT, const wxColour& col = *wxBLACK) = 0;
virtual wxGraphicsFont CreateFontAtDPI(const wxFont& font, const wxRealPoint& dpi, const wxColour& col = *wxBLACK) = 0;
// create a native bitmap representation
virtual wxGraphicsBitmap CreateBitmap(const wxBitmap& bitmap) = 0;
# if wxUSE_IMAGE
virtual wxGraphicsBitmap CreateBitmapFromImage(const wxImage& image) = 0;
virtual wxImage CreateImageFromBitmap(const wxGraphicsBitmap& bmp) = 0;
# endif
// create a graphics bitmap from a native bitmap
virtual wxGraphicsBitmap CreateBitmapFromNativeBitmap(void* bitmap) = 0;
// create a subimage from a native image representation
virtual wxGraphicsBitmap CreateSubBitmap(const wxGraphicsBitmap& bitmap, wxDouble x, wxDouble y, wxDouble w, wxDouble h) = 0;
virtual wxString GetName() const = 0;
virtual void GetVersion(int* major, int* minor = NULL, int* micro = NULL) const = 0;
private:
wxDECLARE_NO_COPY_CLASS(wxGraphicsRenderer);
wxDECLARE_ABSTRACT_CLASS(wxGraphicsRenderer);
};
# if wxUSE_IMAGE
inline wxImage wxGraphicsBitmap::ConvertToImage() const
{
wxGraphicsRenderer* renderer = GetRenderer();
return renderer ? renderer->CreateImageFromBitmap(*this) : wxNullImage;
}
# endif
# endif
#endif
| {
"content_hash": "af891fa240df9f19db5905f1bd77f3f7",
"timestamp": "",
"source": "github",
"line_count": 937,
"max_line_length": 234,
"avg_line_length": 37.963713980789755,
"alnum_prop": 0.7161531541661982,
"repo_name": "satya-das/cppparser",
"id": "9662cc4295a65f223fda1105b88bffedd8f14ef5",
"size": "35572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/e2e/test_master/wxWidgets/include/wx/graphics.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3858548"
},
{
"name": "C++",
"bytes": "40366039"
},
{
"name": "CMake",
"bytes": "5653"
},
{
"name": "Lex",
"bytes": "39563"
},
{
"name": "Objective-C",
"bytes": "10345580"
},
{
"name": "Shell",
"bytes": "1365"
},
{
"name": "Yacc",
"bytes": "103019"
}
],
"symlink_target": ""
} |
<?php
namespace AppBundle\FormType;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\Extension\Core\Type\EmailType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Validator\Constraints\NotBlank;
class SupportTicketFormType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder->add("name", TextType::class, [
"label" => "Vaše jméno",
"attr" => [
"class" => "form-control",
],
"constraints" => [
new NotBlank(["message" => "Prosím vyplňte Vaše jméno"]),
],
])->add("email", EmailType::class, [
"label" => "Váš email",
"attr" => [
"class" => "form-control",
],
"constraints" => [
new NotBlank(["message" => "Prosím vyplňte Váš e-mail abychom vás mohli kontaktovat s odpovědí"]),
],
])->add("text", TextareaType::class, [
"label" => "Váš dotaz",
"attr" => [
"class" => "form-control",
],
"constraints" => [
new NotBlank(["message" => "Vyplňte prosím s čím vám můžeme pomoci."]),
],
]);
}
}
| {
"content_hash": "a79bc105baf4d3a083edded1aeb02e1b",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 102,
"avg_line_length": 28.19047619047619,
"alnum_prop": 0.6587837837837838,
"repo_name": "czcodecamp/filtry",
"id": "cab78dda152a18a445dc3edcc41d94c8b1ca9110",
"size": "1208",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/AppBundle/FormType/SupportTicketFormType.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3605"
},
{
"name": "HTML",
"bytes": "28377"
},
{
"name": "PHP",
"bytes": "137530"
},
{
"name": "Shell",
"bytes": "500"
}
],
"symlink_target": ""
} |
openid_dev_pkgs = value_for_platform_family(
'debian' => %w[automake make g++ apache2-prefork-dev libopkele-dev libopkele3 libtool],
%w[rhel fedora] => %w[gcc-c++ httpd-devel curl-devel libtidy libtidy-devel sqlite-devel pcre-devel openssl-devel make libtool],
'arch' => %w[libopkele],
'freebsd' => %w[libopkele pcre sqlite3]
)
make_cmd = value_for_platform_family(
'freebsd' => { 'default' => 'gmake' },
'default' => 'make'
)
case node['platform_family']
when 'arch'
include_recipe 'pacman::default'
package 'tidyhtml'
pacman_aur openid_dev_pkgs.first do
action [:build, :install]
end
else
openid_dev_pkgs.each do |pkg|
package pkg
end
end
case node['platform_family']
when 'rhel', 'fedora'
package 'libopkele'
package 'mod_auth_openid'
when 'debian'
package 'libapache2-mod-auth-openid'
end
template "#{node['apache']['dir']}/mods-available/authopenid.load" do
source 'mods/authopenid.load.erb'
owner 'root'
group node['apache']['root_group']
mode '0644'
end
apache_module 'authopenid' do
filename 'mod_auth_openid.so'
end
| {
"content_hash": "1d0dd94fe6d5884aa8cc33732843cf47",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 129,
"avg_line_length": 24.57777777777778,
"alnum_prop": 0.6763110307414105,
"repo_name": "weidongshao/compass-adapters",
"id": "922f833a21ac25160d54d7d49a72a9543c62129f",
"size": "1749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chef/cookbooks/apache2/recipes/mod_auth_openid.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "111630"
},
{
"name": "Clojure",
"bytes": "3024"
},
{
"name": "Cucumber",
"bytes": "900"
},
{
"name": "HTML",
"bytes": "461917"
},
{
"name": "Perl",
"bytes": "847"
},
{
"name": "Python",
"bytes": "1862857"
},
{
"name": "Ruby",
"bytes": "2080546"
},
{
"name": "Shell",
"bytes": "33826"
}
],
"symlink_target": ""
} |
package raft
import (
"bufio"
"errors"
"fmt"
"io"
"os"
"sync"
"github.com/influxdb/influxdb/_vendor/raft/protobuf"
)
//------------------------------------------------------------------------------
//
// Typedefs
//
//------------------------------------------------------------------------------
// A log is a collection of log entries that are persisted to durable storage.
type Log struct {
ApplyFunc func(*LogEntry, Command) (interface{}, error)
file *os.File
path string
entries []*LogEntry
commitIndex uint64
mutex sync.RWMutex
startIndex uint64 // the index before the first entry in the Log entries
startTerm uint64
initialized bool
}
// The results of the applying a log entry.
type logResult struct {
returnValue interface{}
err error
}
//------------------------------------------------------------------------------
//
// Constructor
//
//------------------------------------------------------------------------------
// Creates a new log.
func newLog() *Log {
return &Log{
entries: make([]*LogEntry, 0),
}
}
//------------------------------------------------------------------------------
//
// Accessors
//
//------------------------------------------------------------------------------
//--------------------------------------
// Log Indices
//--------------------------------------
// The last committed index in the log.
func (l *Log) CommitIndex() uint64 {
l.mutex.RLock()
defer l.mutex.RUnlock()
return l.commitIndex
}
// The current index in the log.
func (l *Log) currentIndex() uint64 {
l.mutex.RLock()
defer l.mutex.RUnlock()
return l.internalCurrentIndex()
}
// The current index in the log without locking
func (l *Log) internalCurrentIndex() uint64 {
if len(l.entries) == 0 {
return l.startIndex
}
return l.entries[len(l.entries)-1].Index()
}
// The next index in the log.
func (l *Log) nextIndex() uint64 {
return l.currentIndex() + 1
}
// Determines if the log contains zero entries.
func (l *Log) isEmpty() bool {
l.mutex.RLock()
defer l.mutex.RUnlock()
return (len(l.entries) == 0) && (l.startIndex == 0)
}
// The name of the last command in the log.
func (l *Log) lastCommandName() string {
l.mutex.RLock()
defer l.mutex.RUnlock()
if len(l.entries) > 0 {
if entry := l.entries[len(l.entries)-1]; entry != nil {
return entry.CommandName()
}
}
return ""
}
//--------------------------------------
// Log Terms
//--------------------------------------
// The current term in the log.
func (l *Log) currentTerm() uint64 {
l.mutex.RLock()
defer l.mutex.RUnlock()
if len(l.entries) == 0 {
return l.startTerm
}
return l.entries[len(l.entries)-1].Term()
}
//------------------------------------------------------------------------------
//
// Methods
//
//------------------------------------------------------------------------------
//--------------------------------------
// State
//--------------------------------------
// Opens the log file and reads existing entries. The log can remain open and
// continue to append entries to the end of the log.
func (l *Log) open(path string) error {
// Read all the entries from the log if one exists.
var readBytes int64
var err error
debugln("log.open.open ", path)
// open log file
l.file, err = os.OpenFile(path, os.O_RDWR, 0600)
l.path = path
if err != nil {
// if the log file does not exist before
// we create the log file and set commitIndex to 0
if os.IsNotExist(err) {
l.file, err = os.OpenFile(path, os.O_WRONLY|os.O_CREATE, 0600)
debugln("log.open.create ", path)
if err == nil {
l.initialized = true
}
return err
}
return err
}
debugln("log.open.exist ", path)
// Read the file and decode entries.
for {
// Instantiate log entry and decode into it.
entry, _ := newLogEntry(l, nil, 0, 0, nil)
entry.Position, _ = l.file.Seek(0, os.SEEK_CUR)
n, err := entry.Decode(l.file)
if err != nil {
if err == io.EOF {
debugln("open.log.append: finish ")
} else {
if err = os.Truncate(path, readBytes); err != nil {
return fmt.Errorf("raft.Log: Unable to recover: %v", err)
}
}
break
}
if entry.Index() > l.startIndex {
// Append entry.
l.entries = append(l.entries, entry)
if entry.Index() <= l.commitIndex {
command, err := newCommand(entry.CommandName(), entry.Command())
if err != nil {
continue
}
l.ApplyFunc(entry, command)
}
debugln("open.log.append log index ", entry.Index())
}
readBytes += int64(n)
}
debugln("open.log.recovery number of log ", len(l.entries))
l.initialized = true
return nil
}
// Closes the log file.
func (l *Log) close() {
l.mutex.Lock()
defer l.mutex.Unlock()
if l.file != nil {
l.file.Close()
l.file = nil
}
l.entries = make([]*LogEntry, 0)
}
// sync to disk
func (l *Log) sync() error {
return l.file.Sync()
}
//--------------------------------------
// Entries
//--------------------------------------
// Creates a log entry associated with this log.
func (l *Log) createEntry(term uint64, command Command, e *ev) (*LogEntry, error) {
return newLogEntry(l, e, l.nextIndex(), term, command)
}
// Retrieves an entry from the log. If the entry has been eliminated because
// of a snapshot then nil is returned.
func (l *Log) getEntry(index uint64) *LogEntry {
l.mutex.RLock()
defer l.mutex.RUnlock()
if index <= l.startIndex || index > (l.startIndex+uint64(len(l.entries))) {
return nil
}
return l.entries[index-l.startIndex-1]
}
// Checks if the log contains a given index/term combination.
func (l *Log) containsEntry(index uint64, term uint64) bool {
entry := l.getEntry(index)
return (entry != nil && entry.Term() == term)
}
// Retrieves a list of entries after a given index as well as the term of the
// index provided. A nil list of entries is returned if the index no longer
// exists because a snapshot was made.
func (l *Log) getEntriesAfter(index uint64, maxLogEntriesPerRequest uint64) ([]*LogEntry, uint64) {
l.mutex.RLock()
defer l.mutex.RUnlock()
// Return nil if index is before the start of the log.
if index < l.startIndex {
traceln("log.entriesAfter.before: ", index, " ", l.startIndex)
return nil, 0
}
// Return an error if the index doesn't exist.
if index > (uint64(len(l.entries)) + l.startIndex) {
panic(fmt.Sprintf("raft: Index is beyond end of log: %v %v", len(l.entries), index))
}
// If we're going from the beginning of the log then return the whole log.
if index == l.startIndex {
traceln("log.entriesAfter.beginning: ", index, " ", l.startIndex)
return l.entries, l.startTerm
}
traceln("log.entriesAfter.partial: ", index, " ", l.entries[len(l.entries)-1].Index)
entries := l.entries[index-l.startIndex:]
length := len(entries)
traceln("log.entriesAfter: startIndex:", l.startIndex, " length", len(l.entries))
if uint64(length) < maxLogEntriesPerRequest {
// Determine the term at the given entry and return a subslice.
return entries, l.entries[index-1-l.startIndex].Term()
} else {
return entries[:maxLogEntriesPerRequest], l.entries[index-1-l.startIndex].Term()
}
}
//--------------------------------------
// Commit
//--------------------------------------
// Retrieves the last index and term that has been committed to the log.
func (l *Log) commitInfo() (index uint64, term uint64) {
l.mutex.RLock()
defer l.mutex.RUnlock()
// If we don't have any committed entries then just return zeros.
if l.commitIndex == 0 {
return 0, 0
}
// No new commit log after snapshot
if l.commitIndex == l.startIndex {
return l.startIndex, l.startTerm
}
// Return the last index & term from the last committed entry.
debugln("commitInfo.get.[", l.commitIndex, "/", l.startIndex, "]")
entry := l.entries[l.commitIndex-1-l.startIndex]
return entry.Index(), entry.Term()
}
// Retrieves the last index and term that has been appended to the log.
func (l *Log) lastInfo() (index uint64, term uint64) {
l.mutex.RLock()
defer l.mutex.RUnlock()
// If we don't have any entries then just return zeros.
if len(l.entries) == 0 {
return l.startIndex, l.startTerm
}
// Return the last index & term
entry := l.entries[len(l.entries)-1]
return entry.Index(), entry.Term()
}
// Updates the commit index
func (l *Log) updateCommitIndex(index uint64) {
l.mutex.Lock()
defer l.mutex.Unlock()
if index > l.commitIndex {
l.commitIndex = index
}
debugln("update.commit.index ", index)
}
// Updates the commit index and writes entries after that index to the stable storage.
func (l *Log) setCommitIndex(index uint64) error {
l.mutex.Lock()
defer l.mutex.Unlock()
// this is not error any more after limited the number of sending entries
// commit up to what we already have
if index > l.startIndex+uint64(len(l.entries)) {
debugln("raft.Log: Commit index", index, "set back to ", len(l.entries))
index = l.startIndex + uint64(len(l.entries))
}
// Do not allow previous indices to be committed again.
// This could happens, since the guarantee is that the new leader has up-to-dated
// log entries rather than has most up-to-dated committed index
// For example, Leader 1 send log 80 to follower 2 and follower 3
// follower 2 and follow 3 all got the new entries and reply
// leader 1 committed entry 80 and send reply to follower 2 and follower3
// follower 2 receive the new committed index and update committed index to 80
// leader 1 fail to send the committed index to follower 3
// follower 3 promote to leader (server 1 and server 2 will vote, since leader 3
// has up-to-dated the entries)
// when new leader 3 send heartbeat with committed index = 0 to follower 2,
// follower 2 should reply success and let leader 3 update the committed index to 80
if index < l.commitIndex {
return nil
}
// Find all entries whose index is between the previous index and the current index.
for i := l.commitIndex + 1; i <= index; i++ {
entryIndex := i - 1 - l.startIndex
entry := l.entries[entryIndex]
// Update commit index.
l.commitIndex = entry.Index()
// Decode the command.
command, err := newCommand(entry.CommandName(), entry.Command())
if err != nil {
return err
}
// Apply the changes to the state machine and store the error code.
returnValue, err := l.ApplyFunc(entry, command)
debugf("setCommitIndex.set.result index: %v, entries index: %v", i, entryIndex)
if entry.event != nil {
entry.event.returnValue = returnValue
entry.event.c <- err
}
_, isJoinCommand := command.(JoinCommand)
// we can only commit up to the most recent join command
// if there is a join in this batch of commands.
// after this commit, we need to recalculate the majority.
if isJoinCommand {
return nil
}
}
return nil
}
// Set the commitIndex at the head of the log file to the current
// commit Index. This should be called after obtained a log lock
func (l *Log) flushCommitIndex() {
l.file.Seek(0, os.SEEK_SET)
fmt.Fprintf(l.file, "%8x\n", l.commitIndex)
l.file.Seek(0, os.SEEK_END)
}
//--------------------------------------
// Truncation
//--------------------------------------
// Truncates the log to the given index and term. This only works if the log
// at the index has not been committed.
func (l *Log) truncate(index uint64, term uint64) error {
l.mutex.Lock()
defer l.mutex.Unlock()
debugln("log.truncate: ", index)
// Do not allow committed entries to be truncated.
if index < l.commitIndex {
debugln("log.truncate.before")
return fmt.Errorf("raft.Log: Index is already committed (%v): (IDX=%v, TERM=%v)", l.commitIndex, index, term)
}
// Do not truncate past end of entries.
if index > l.startIndex+uint64(len(l.entries)) {
debugln("log.truncate.after")
return fmt.Errorf("raft.Log: Entry index does not exist (MAX=%v): (IDX=%v, TERM=%v)", len(l.entries), index, term)
}
// If we're truncating everything then just clear the entries.
if index == l.startIndex {
debugln("log.truncate.clear")
l.file.Truncate(0)
l.file.Seek(0, os.SEEK_SET)
// notify clients if this node is the previous leader
for _, entry := range l.entries {
if entry.event != nil {
entry.event.c <- errors.New("command failed to be committed due to node failure")
}
}
l.entries = []*LogEntry{}
} else {
// Do not truncate if the entry at index does not have the matching term.
entry := l.entries[index-l.startIndex-1]
if len(l.entries) > 0 && entry.Term() != term {
debugln("log.truncate.termMismatch")
return fmt.Errorf("raft.Log: Entry at index does not have matching term (%v): (IDX=%v, TERM=%v)", entry.Term(), index, term)
}
// Otherwise truncate up to the desired entry.
if index < l.startIndex+uint64(len(l.entries)) {
debugln("log.truncate.finish")
position := l.entries[index-l.startIndex].Position
l.file.Truncate(position)
l.file.Seek(position, os.SEEK_SET)
// notify clients if this node is the previous leader
for i := index - l.startIndex; i < uint64(len(l.entries)); i++ {
entry := l.entries[i]
if entry.event != nil {
entry.event.c <- errors.New("command failed to be committed due to node failure")
}
}
l.entries = l.entries[0 : index-l.startIndex]
}
}
return nil
}
//--------------------------------------
// Append
//--------------------------------------
// Appends a series of entries to the log.
func (l *Log) appendEntries(entries []*protobuf.LogEntry) error {
l.mutex.Lock()
defer l.mutex.Unlock()
startPosition, _ := l.file.Seek(0, os.SEEK_CUR)
w := bufio.NewWriter(l.file)
var size int64
var err error
// Append each entry but exit if we hit an error.
for i := range entries {
logEntry := &LogEntry{
log: l,
Position: startPosition,
pb: entries[i],
}
if size, err = l.writeEntry(logEntry, w); err != nil {
return err
}
startPosition += size
}
w.Flush()
err = l.sync()
if err != nil {
panic(err)
}
return nil
}
// Writes a single log entry to the end of the log.
func (l *Log) appendEntry(entry *LogEntry) error {
l.mutex.Lock()
defer l.mutex.Unlock()
if l.file == nil {
return errors.New("raft.Log: Log is not open")
}
// Make sure the term and index are greater than the previous.
if len(l.entries) > 0 {
lastEntry := l.entries[len(l.entries)-1]
if entry.Term() < lastEntry.Term() {
return fmt.Errorf("raft.Log: Cannot append entry with earlier term (%x:%x <= %x:%x)", entry.Term(), entry.Index(), lastEntry.Term(), lastEntry.Index())
} else if entry.Term() == lastEntry.Term() && entry.Index() <= lastEntry.Index() {
return fmt.Errorf("raft.Log: Cannot append entry with earlier index in the same term (%x:%x <= %x:%x)", entry.Term(), entry.Index(), lastEntry.Term(), lastEntry.Index())
}
}
position, _ := l.file.Seek(0, os.SEEK_CUR)
entry.Position = position
// Write to storage.
if _, err := entry.Encode(l.file); err != nil {
return err
}
// Append to entries list if stored on disk.
l.entries = append(l.entries, entry)
return nil
}
// appendEntry with Buffered io
func (l *Log) writeEntry(entry *LogEntry, w io.Writer) (int64, error) {
if l.file == nil {
return -1, errors.New("raft.Log: Log is not open")
}
// Make sure the term and index are greater than the previous.
if len(l.entries) > 0 {
lastEntry := l.entries[len(l.entries)-1]
if entry.Term() < lastEntry.Term() {
return -1, fmt.Errorf("raft.Log: Cannot append entry with earlier term (%x:%x <= %x:%x)", entry.Term(), entry.Index(), lastEntry.Term(), lastEntry.Index())
} else if entry.Term() == lastEntry.Term() && entry.Index() <= lastEntry.Index() {
return -1, fmt.Errorf("raft.Log: Cannot append entry with earlier index in the same term (%x:%x <= %x:%x)", entry.Term(), entry.Index(), lastEntry.Term(), lastEntry.Index())
}
}
// Write to storage.
size, err := entry.Encode(w)
if err != nil {
return -1, err
}
// Append to entries list if stored on disk.
l.entries = append(l.entries, entry)
return int64(size), nil
}
//--------------------------------------
// Log compaction
//--------------------------------------
// compact the log before index (including index)
func (l *Log) compact(index uint64, term uint64) error {
var entries []*LogEntry
l.mutex.Lock()
defer l.mutex.Unlock()
if index == 0 {
return nil
}
// nothing to compaction
// the index may be greater than the current index if
// we just recovery from on snapshot
if index >= l.internalCurrentIndex() {
entries = make([]*LogEntry, 0)
} else {
// get all log entries after index
entries = l.entries[index-l.startIndex:]
}
// create a new log file and add all the entries
new_file_path := l.path + ".new"
file, err := os.OpenFile(new_file_path, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0600)
if err != nil {
return err
}
for _, entry := range entries {
position, _ := l.file.Seek(0, os.SEEK_CUR)
entry.Position = position
if _, err = entry.Encode(file); err != nil {
file.Close()
os.Remove(new_file_path)
return err
}
}
file.Sync()
old_file := l.file
// rename the new log file
err = os.Rename(new_file_path, l.path)
if err != nil {
file.Close()
os.Remove(new_file_path)
return err
}
l.file = file
// close the old log file
old_file.Close()
// compaction the in memory log
l.entries = entries
l.startIndex = index
l.startTerm = term
return nil
}
| {
"content_hash": "7300903fb26ccd9d8607f5e1933700f9",
"timestamp": "",
"source": "github",
"line_count": 632,
"max_line_length": 176,
"avg_line_length": 27.454113924050635,
"alnum_prop": 0.6210016713734079,
"repo_name": "clarktrimble/influxdb",
"id": "2fea2add8c94521cfd7d5135de244e44e6ac507b",
"size": "17351",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "_vendor/raft/log.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7587"
},
{
"name": "CSS",
"bytes": "62"
},
{
"name": "Go",
"bytes": "4566625"
},
{
"name": "HTML",
"bytes": "18210"
},
{
"name": "Protocol Buffer",
"bytes": "1833"
},
{
"name": "Ruby",
"bytes": "4708"
},
{
"name": "Shell",
"bytes": "9599"
}
],
"symlink_target": ""
} |
package com.yakami.light.bean;
import com.yakami.light.bean.base.Entity;
import java.util.List;
/**
* Created by Yakami on 2016/6/6, enjoying it!
*/
public class RawTimeRankContainer extends Entity {
private String name;
private int id;
private long time;
private String title;
private List<RawDiscRank> discs;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public long getTime() {
return time;
}
public void setTime(long time) {
this.time = time;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<RawDiscRank> getDiscs() {
return discs;
}
public void setDiscs(List<RawDiscRank> discs) {
this.discs = discs;
}
}
| {
"content_hash": "946756e823c14cc9b3f4cd5c260ec5dd",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 51,
"avg_line_length": 17.275862068965516,
"alnum_prop": 0.5898203592814372,
"repo_name": "hanFengSan/light",
"id": "8bb3331d5ed4e016ce34143703869b5d67fb823f",
"size": "1002",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/yakami/light/bean/RawTimeRankContainer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "913630"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, maximum-scale=1, user-scalable=no, width=device-width">
<title></title>
<link href="lib/ionic/css/ionic.css" rel="stylesheet">
<link href="css/style.css" rel="stylesheet">
<!-- IF using Sass (run gulp sass first), then uncomment below and remove the CSS includes above
<link href="css/ionic.app.css" rel="stylesheet">
-->
<!-- ionic/angularjs js -->
<script src="lib/ionic/js/ionic.bundle.js"></script>
<script src="lib/ionic/js/angular/angular-resource.min.js" ></script>
<!-- cordova script (this will be a 404 during development) -->
<script src="cordova.js"></script>
<!-- your app's js -->
<script src="js/app.js"></script>
<script src="js/controllers.js"></script>
</head>
<body ng-app="starter">
<ion-nav-view></ion-nav-view>
</body>
</html>
| {
"content_hash": "f51297546aae687047d11191ac01491f",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 107,
"avg_line_length": 29.967741935483872,
"alnum_prop": 0.6318622174381054,
"repo_name": "tkssharma/Ionic-conferenceApp",
"id": "c1bfef60c1b84c196487e9ca056a6ba68a49e45a",
"size": "929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ionicapp/www/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "563206"
},
{
"name": "HTML",
"bytes": "6786"
},
{
"name": "JavaScript",
"bytes": "3204064"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.