text stringlengths 2 1.04M | meta dict |
|---|---|
layout: post
title: "Eurospec MP5"
date: 2016-03-26 23:38:00
categories: lock
tags: euro ebay
image: /images/lock-32.png
picked: true
number: 32
---
# {{page.number}} {{ page.title }}
{% if page.picked %}
### Status: <i class="fa fa-unlock"/>
{% else %}
### Status: <i class="fa fa-lock"/>
{% endif %}
### Description
E*S Euro Cylinde from ebay. 5-pin
### Methods
- short side: raked (city, snake)
### Notes
| {
"content_hash": "ca4cdd72d3fe5d8eeccfbd6d5b3f160f",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 37,
"avg_line_length": 15.481481481481481,
"alnum_prop": 0.6244019138755981,
"repo_name": "phonyduck/phonyduck.github.io",
"id": "e6e9ab5c039eac12c27753473e28ae3e483f6390",
"size": "422",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2016-03-26-eurospec-mp5.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "72120"
},
{
"name": "HTML",
"bytes": "15123"
},
{
"name": "JavaScript",
"bytes": "14838"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "1342c60c99fe13d5051539bf8c847728",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "7144b450ac5841558319576b707ddd174a7c5c59",
"size": "186",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Pteridophyta/Polypodiopsida/Cyatheales/Dicksoniaceae/Dicksonia/Dicksonia cicutaria/Dicksonia cicutaria remota/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Tests for record_input_op."""
import os
from tensorflow.python.framework import test_util
from tensorflow.python.framework.errors_impl import NotFoundError
from tensorflow.python.lib.io import tf_record
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class RecordInputOpTest(test.TestCase):
def generateTestData(self,
prefix,
n,
m,
compression_type=tf_record.TFRecordCompressionType.NONE):
options = tf_record.TFRecordOptions(compression_type)
for i in range(n):
f = os.path.join(self.get_temp_dir(), prefix + "." + str(i))
w = tf_record.TFRecordWriter(f, options=options)
for j in range(m):
w.write("{0:0{width}}".format(i * m + j, width=10).encode("utf-8"))
w.close()
def testRecordInputSimple(self):
with self.cached_session() as sess:
self.generateTestData("basic", 1, 1)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input").get_yield_op()
self.assertEqual(self.evaluate(yield_op), b"0000000000")
def testRecordInputSimpleGzip(self):
with self.cached_session() as sess:
self.generateTestData(
"basic",
1,
1,
compression_type=tf_record.TFRecordCompressionType.GZIP)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input",
compression_type=tf_record.TFRecordCompressionType.GZIP).get_yield_op(
)
self.assertEqual(self.evaluate(yield_op), b"0000000000")
def testRecordInputSimpleZlib(self):
with self.cached_session() as sess:
self.generateTestData(
"basic",
1,
1,
compression_type=tf_record.TFRecordCompressionType.ZLIB)
yield_op = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=1,
batch_size=1,
name="record_input",
compression_type=tf_record.TFRecordCompressionType.ZLIB).get_yield_op(
)
self.assertEqual(self.evaluate(yield_op), b"0000000000")
@test_util.run_deprecated_v1
def testRecordInputEpochs(self):
files = 100
records_per_file = 100
batches = 2
with self.cached_session() as sess:
self.generateTestData("basic", files, records_per_file)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=2,
buffer_size=2000,
batch_size=1,
shift_ratio=0.33,
seed=10,
name="record_input",
batches=batches)
yield_op = records.get_yield_op()
# cycle over 3 epochs and make sure we never duplicate
for _ in range(3):
epoch_set = set()
for _ in range(int(files * records_per_file / batches)):
op_list = self.evaluate(yield_op)
self.assertTrue(len(op_list) is batches)
for r in op_list:
self.assertTrue(r[0] not in epoch_set)
epoch_set.add(r[0])
@test_util.run_deprecated_v1
def testDoesNotDeadlock(self):
# Iterate multiple times to cause deadlock if there is a chance it can occur
for _ in range(30):
with self.cached_session() as sess:
self.generateTestData("basic", 1, 1)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=1,
buffer_size=100,
batch_size=1,
name="record_input")
yield_op = records.get_yield_op()
for _ in range(50):
self.evaluate(yield_op)
@test_util.run_deprecated_v1
def testEmptyGlob(self):
with self.cached_session() as sess:
record_input = data_flow_ops.RecordInput(file_pattern="foo")
yield_op = record_input.get_yield_op()
self.evaluate(variables.global_variables_initializer())
with self.assertRaises(NotFoundError):
self.evaluate(yield_op)
@test_util.run_deprecated_v1
def testBufferTooSmall(self):
files = 10
records_per_file = 10
batches = 2
with self.cached_session() as sess:
self.generateTestData("basic", files, records_per_file)
records = data_flow_ops.RecordInput(
file_pattern=os.path.join(self.get_temp_dir(), "basic.*"),
parallelism=2,
buffer_size=2000,
batch_size=1,
shift_ratio=0.33,
seed=10,
name="record_input",
batches=batches)
yield_op = records.get_yield_op()
# cycle over 3 epochs and make sure we never duplicate
for _ in range(3):
epoch_set = set()
for _ in range(int(files * records_per_file / batches)):
op_list = self.evaluate(yield_op)
self.assertTrue(len(op_list) is batches)
for r in op_list:
self.assertTrue(r[0] not in epoch_set)
epoch_set.add(r[0])
if __name__ == "__main__":
test.main()
| {
"content_hash": "66183f1f7518593d9a4db19cffce6647",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 80,
"avg_line_length": 32.06547619047619,
"alnum_prop": 0.6047893075923519,
"repo_name": "gautam1858/tensorflow",
"id": "8ab4e45bf09bcfa968e00d41be256d69b98cca5a",
"size": "6076",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/io_ops/record_input_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "47492"
},
{
"name": "C",
"bytes": "1129549"
},
{
"name": "C#",
"bytes": "13496"
},
{
"name": "C++",
"bytes": "116904214"
},
{
"name": "CMake",
"bytes": "165809"
},
{
"name": "Cython",
"bytes": "5003"
},
{
"name": "Dockerfile",
"bytes": "341994"
},
{
"name": "Go",
"bytes": "2052513"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "1053827"
},
{
"name": "JavaScript",
"bytes": "5772"
},
{
"name": "Jupyter Notebook",
"bytes": "787371"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "9549263"
},
{
"name": "Makefile",
"bytes": "2760"
},
{
"name": "Objective-C",
"bytes": "180638"
},
{
"name": "Objective-C++",
"bytes": "295149"
},
{
"name": "Pawn",
"bytes": "5336"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "43775271"
},
{
"name": "Roff",
"bytes": "5034"
},
{
"name": "Ruby",
"bytes": "7854"
},
{
"name": "Shell",
"bytes": "566970"
},
{
"name": "Smarty",
"bytes": "89664"
},
{
"name": "SourcePawn",
"bytes": "8509"
},
{
"name": "Starlark",
"bytes": "6897556"
},
{
"name": "Swift",
"bytes": "78435"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
package org.wso2.developerstudio.eclipse.esb.mediators.impl;
import java.util.Collection;
import java.util.Map;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.w3c.dom.Element;
import org.wso2.developerstudio.eclipse.esb.impl.ModelObjectImpl;
import org.wso2.developerstudio.eclipse.esb.mediators.MediatorsPackage;
import org.wso2.developerstudio.eclipse.esb.mediators.RuleFact;
import org.wso2.developerstudio.eclipse.esb.mediators.RuleFactsConfiguration;
import org.wso2.developerstudio.eclipse.esb.util.ObjectValidator;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Rule Facts Configuration</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.wso2.developerstudio.eclipse.esb.mediators.impl.RuleFactsConfigurationImpl#getFacts <em>Facts</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class RuleFactsConfigurationImpl extends ModelObjectImpl implements RuleFactsConfiguration {
/**
* The cached value of the '{@link #getFacts() <em>Facts</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFacts()
* @generated
* @ordered
*/
protected EList<RuleFact> facts;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RuleFactsConfigurationImpl() {
super();
}
/**
* {@inheritDoc}
*/
protected void doLoad(Element self) throws Exception {
loadObjects(self, "fact", RuleFact.class, new ObjectHandler<RuleFact>() {
public void handle(RuleFact object) {
getFacts().add(object);
}
});
super.doLoad(self);
}
/**
* {@inheritDoc}
*/
protected Element doSave(Element parent) throws Exception {
Element self = createChildElement(parent, "facts");
for (RuleFact fact : getFacts()) {
fact.save(self);
}
addComments(self);
return self;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MediatorsPackage.Literals.RULE_FACTS_CONFIGURATION;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<RuleFact> getFacts() {
if (facts == null) {
facts = new EObjectContainmentEList<RuleFact>(RuleFact.class, this, MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS);
}
return facts;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS:
return ((InternalEList<?>)getFacts()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS:
return getFacts();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS:
getFacts().clear();
getFacts().addAll((Collection<? extends RuleFact>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS:
getFacts().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MediatorsPackage.RULE_FACTS_CONFIGURATION__FACTS:
return facts != null && !facts.isEmpty();
}
return super.eIsSet(featureID);
}
public Map<String, ObjectValidator> validate() {
// TODO Auto-generated method stub
return null;
}
} //RuleFactsConfigurationImpl
| {
"content_hash": "fcf9e8313eb66f5810d0fe1f871afd01",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 124,
"avg_line_length": 24.27027027027027,
"alnum_prop": 0.6750556792873051,
"repo_name": "splinter/developer-studio",
"id": "e17436f23fb3b8c1e2f62c584746b43ea176f6ad",
"size": "5102",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "esb/org.wso2.developerstudio.eclipse.esb/src/org/wso2/developerstudio/eclipse/esb/mediators/impl/RuleFactsConfigurationImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
/*
* Created on Apr 27, 2006
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package edu.wustl.common.tree;
import java.io.Serializable;
import java.util.Vector;
/**
* @author gautam_shetty
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class TreeNodeImpl implements Serializable, TreeNode
{
/**
* serialVersionUID.
*/
private static final long serialVersionUID = 1L;
/**
* identifier for the node.
*/
private Long identifier;
/**
* Name of the node.
*/
private String value;
/**
* Parent node of this node.
*/
private TreeNode parentNode;
/**
* List of child nodes.
*/
private Vector childNodes = new Vector();
/**
* Default Constructor.
*/
public TreeNodeImpl()
{
// Empty Constructor TreeNodeImpl.
}
/**
* Default Constructor.
* @param identifier Long object
* @param value String value
*/
public TreeNodeImpl(Long identifier, String value)
{
this.identifier = identifier;
this.value = value;
}
/**
* @return Returns the identifier.
*/
public Long getIdentifier()
{
return identifier;
}
/**
* @param identifier The identifier to set.
*/
public void setIdentifier(Long identifier)
{
this.identifier = identifier;
}
/**
* @return Returns the value.
*/
public String getValue()
{
return value;
}
/**
* @param value The value to set.
*/
public void setValue(String value)
{
this.value = value;
}
/**
* @return Returns the parentNode.
*/
public TreeNode getParentNode()
{
return parentNode;
}
/**
* @param parentNode The parentNode to set.
*/
public void setParentNode(TreeNode parentNode)
{
this.parentNode = parentNode;
}
/**
* @return Returns the childNodes.
*/
public Vector getChildNodes()
{
return childNodes;
}
/**
* @param childNodes The childNodes to set.
*/
public void setChildNodes(Vector childNodes)
{
this.childNodes = childNodes;
}
/**
* @param obj Object
* @return boolean result.
*/
public boolean equals(Object obj)
{
boolean flag = false;
if (obj instanceof TreeNodeImpl)
{
TreeNodeImpl treeNodeImpl = (TreeNodeImpl) obj;
if (this.getIdentifier().equals(treeNodeImpl.getIdentifier()))
{
flag = true;
}
}
return flag;
}
/**
* @return integer hash code.
*/
public int hashCode()
{
int i = 0;
if (getIdentifier() != null)
{
i += getIdentifier().hashCode();
}
return i;
}
/**
* To display Tooltip for the Tree node. By default it will return value,
* override this method if need different tool tip.
* @return The tooltip to display
*/
public String getToolTip()
{
return this.value;
}
/**
* @return String value.
*/
public String toString()
{
String str = this.value ;
if (this.identifier.longValue() != 0)
{
str = this.value + " : " + this.identifier;
}
return str ;
}
}
| {
"content_hash": "71d59a28de7cceca0af638f7be86e29c",
"timestamp": "",
"source": "github",
"line_count": 166,
"max_line_length": 74,
"avg_line_length": 19.07831325301205,
"alnum_prop": 0.6182507104515315,
"repo_name": "NCIP/commons-module",
"id": "485b5a1205fbd88114aa356aaf4d37e1fa2f0ef8",
"size": "3406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "software/washu-commons/src/main/java/edu/wustl/common/tree/TreeNodeImpl.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "1719980"
},
{
"name": "XSLT",
"bytes": "243925"
}
],
"symlink_target": ""
} |
<?php
namespace Synapse\Cmf\Framework\Media\File\Repository\InMemory;
use Synapse\Cmf\Framework\Media\File\Entity\File;
use Synapse\Cmf\Framework\Media\File\Repository\RepositoryInterface;
use Majora\Framework\Repository\InMemory\AbstractInMemoryRepository;
use Majora\Framework\Repository\InMemory\InMemoryRepositoryTrait;
/**
* File persistence implementation using InMemory Orm.
*/
class InMemoryRepository extends AbstractInMemoryRepository implements RepositoryInterface
{
use InMemoryRepositoryTrait;
/**
* @see EventSubscriberInterface::getSubscribedEvents()
* @codeCoverageIgnore : configuration method
*/
public static function getSubscribedEvents()
{
return array();
}
/**
* Proxy for persist() repository general method.
*
* @see RepositoryInterface::save()
*/
public function save(File $file)
{
return $this->persist($file);
}
/**
* Proxy for remove() repository general method.
*
* @see RepositoryInterface::delete()
*/
public function delete(File $file)
{
return $this->remove($file);
}
}
| {
"content_hash": "3c226508eda4da9763755080e2331bd3",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 90,
"avg_line_length": 25.377777777777776,
"alnum_prop": 0.6943957968476357,
"repo_name": "Synapse-Cmf/synapse-cmf",
"id": "9651fbf6ef221b84db1c3d614190381ed248e2d0",
"size": "1142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Synapse/Cmf/Framework/Media/File/Repository/InMemory/InMemoryRepository.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3605"
},
{
"name": "CSS",
"bytes": "4518"
},
{
"name": "HTML",
"bytes": "34283"
},
{
"name": "JavaScript",
"bytes": "12664"
},
{
"name": "Makefile",
"bytes": "4478"
},
{
"name": "PHP",
"bytes": "644363"
}
],
"symlink_target": ""
} |
require 'net/http'
module Selenium
module WebDriver
module Chrome
autoload :Bridge, 'selenium/webdriver/chrome/bridge'
autoload :Driver, 'selenium/webdriver/chrome/driver'
autoload :Profile, 'selenium/webdriver/chrome/profile'
autoload :Options, 'selenium/webdriver/chrome/options'
autoload :Service, 'selenium/webdriver/chrome/service'
def self.driver_path=(path)
WebDriver.logger.deprecate 'Selenium::WebDriver::Chrome#driver_path=',
'Selenium::WebDriver::Chrome::Service#driver_path='
Selenium::WebDriver::Chrome::Service.driver_path = path
end
def self.driver_path
WebDriver.logger.deprecate 'Selenium::WebDriver::Chrome#driver_path',
'Selenium::WebDriver::Chrome::Service#driver_path'
Selenium::WebDriver::Chrome::Service.driver_path
end
def self.path=(path)
Platform.assert_executable path
@path = path
end
def self.path
@path ||= nil
end
end # Chrome
end # WebDriver
end # Selenium
| {
"content_hash": "cd4ae262e012ffc01cae955d1c4c51c9",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 86,
"avg_line_length": 32.64705882352941,
"alnum_prop": 0.6351351351351351,
"repo_name": "oddui/selenium",
"id": "02c9a7f48cc2f51d9d9531ae1ec1125bc0a37fa6",
"size": "1929",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "rb/lib/selenium/webdriver/chrome.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "825"
},
{
"name": "Batchfile",
"bytes": "347"
},
{
"name": "C",
"bytes": "47126"
},
{
"name": "C#",
"bytes": "3507419"
},
{
"name": "C++",
"bytes": "2219970"
},
{
"name": "CSS",
"bytes": "11660"
},
{
"name": "HTML",
"bytes": "1625237"
},
{
"name": "Java",
"bytes": "5362660"
},
{
"name": "JavaScript",
"bytes": "3575635"
},
{
"name": "Makefile",
"bytes": "4655"
},
{
"name": "Python",
"bytes": "1108508"
},
{
"name": "Ragel",
"bytes": "3086"
},
{
"name": "Ruby",
"bytes": "809667"
},
{
"name": "Shell",
"bytes": "15964"
},
{
"name": "XSLT",
"bytes": "1047"
}
],
"symlink_target": ""
} |
module Gitlab
module GithubImport
module Representation
class User
include ToHash
include ExposeAttribute
attr_reader :attributes
expose_attribute :id, :login
# Builds a user from a GitHub API response.
#
# user - An instance of `Sawyer::Resource` containing the user details.
def self.from_api_response(user)
new(id: user.id, login: user.login)
end
# Builds a user using a Hash that was built from a JSON payload.
def self.from_json_hash(raw_hash)
new(Representation.symbolize_hash(raw_hash))
end
# attributes - A Hash containing the user details. The keys of this
# Hash (and any nested hashes) must be symbols.
def initialize(attributes)
@attributes = attributes
end
end
end
end
end
| {
"content_hash": "e9318590620cc4312f7d083b43d9157b",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 27.6875,
"alnum_prop": 0.6060948081264108,
"repo_name": "mmkassem/gitlabhq",
"id": "e00dcfca33d0a773aceec65a1cbe32c3551242f4",
"size": "917",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "lib/gitlab/github_import/representation/user.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "113683"
},
{
"name": "CoffeeScript",
"bytes": "139197"
},
{
"name": "Cucumber",
"bytes": "119759"
},
{
"name": "HTML",
"bytes": "447030"
},
{
"name": "JavaScript",
"bytes": "29805"
},
{
"name": "Ruby",
"bytes": "2417833"
},
{
"name": "Shell",
"bytes": "14336"
}
],
"symlink_target": ""
} |
title: Comparison to Other Systems
kind: misc
weight: 16
---
Often the easiest way to understand a new language is by comparing
it to languages you already know. Here we show how policies from
several existing policy systems can be implemented with the Open
Policy Agent.
## Role-based access control (RBAC)
Role-based access control (RBAC) is pervasive today for authorization.
To use RBAC for authorization, you write down two different kinds of
information.
* Which users have which roles
* Which roles have which permissions
Once you provide RBAC with both those assignments, RBAC tells you
how to make an authorization decision. A user is authorized for
all those permissions assigned to any of the roles she is assigned to.
For example, we might have the following user/role assignments:
| User | Role |
| --- | --- |
| ``alice`` | ``engineering`` |
| ``alice`` | ``webdev`` |
| ``bob`` | ``hr`` |
And the following role/permission assignments:
| Role | Permission | Resource |
| --- | --- | --- |
| ``engineering`` | ``read`` | ``server123`` |
| ``webdev`` | ``write`` | ``server123`` |
| ``webdev`` | ``read`` | ``server123`` |
| ``hr`` | ``write`` | ``database456`` |
In this example, RBAC makes the following authorization decisions:
| User | Operation | Resource | Decision |
| --- | --- | --- | --- |
| ``alice`` | ``read`` | ``server123`` | ``allow`` because ``alice`` is in ``engineering`` |
| ``alice`` | ``write`` | ``server123`` | ``allow`` because ``alice`` is in ``webdev`` |
| ``bob`` | ``read`` | ``database456`` | ``allow`` because ``bob`` is in ``hr`` |
| ``bob`` | ``read`` | ``server123`` | ``deny`` because ``bob`` is not in ``engineering`` or ``webdev`` |
With OPA, you can write the following snippets to implement the
example RBAC policy shown above.
```live:rbac:module:openable
package rbac.authz
# user-role assignments
user_roles := {
"alice": ["engineering", "webdev"],
"bob": ["hr"]
}
# role-permissions assignments
role_permissions := {
"engineering": [{"action": "read", "object": "server123"}],
"webdev": [{"action": "read", "object": "server123"},
{"action": "write", "object": "server123"}],
"hr": [{"action": "read", "object": "database456"}]
}
# logic that implements RBAC.
default allow = false
allow {
# lookup the list of roles for the user
roles := user_roles[input.user]
# for each role in that list
r := roles[_]
# lookup the permissions list for role r
permissions := role_permissions[r]
# for each permission
p := permissions[_]
# check if the permission granted to r matches the user's request
p == {"action": input.action, "object": input.object}
}
```
```live:rbac:query:hidden
allow
```
As you can see, querying the `allow` rule with the following input
```live:rbac:input
{
"user": "bob",
"action": "read",
"object": "server123"
}
```
Results in the response you'd expect.
```live:rbac:output
```
### RBAC Separation of duty (SOD)
Separation of duty (SOD) refers to the idea that there are certain
combinations of permissions that no one should have at the same time.
For example, no one should be able to both create payments and approve payments.
In RBAC, that means there are some pairs of roles that no one should be
assigned simultaneously. For example, any user assigned both of the roles
in each pair below would violate SOD.
* create-payment and approve-payment
* create-vendor and pay-vendor
OPA's API does not yet let you enforce SOD by rejecting improper role-assignments,
but it does let you express SOD constraints and ask for all SOD violations,
as shown below. (Here we assume the statements below are added to the RBAC
statements above.)
```live:rbac/sod:module:openable
# Pairs of roles that no user can be assigned to simultaneously
sod_roles := [
["create-payment", "approve-payment"],
["create-vendor", "pay-vendor"],
]
# Find all users violating SOD
sod_violation[user] {
some user
# grab one role for a user
role1 := user_roles[user][_]
# grab another role for that same user
role2 := user_roles[user][_]
# check if those roles are forbidden by SOD
sod_roles[_] == [role1, role2]
}
```
(For those familiar with SOD, this is the static version since SOD violations
happen whenever a user is assigned two conflicting roles. The dynamic version of SOD allows
a single user to be assigned two conflicting roles but requires that the same user not
utilize those roles on the same transaction, which is out of scope for this document.)
## Attribute-based access control (ABAC)
With attribute-based access control, you make policy decisions using the
attributes of the users, objects, and actions involved in the request.
It has three main components:
* Attributes for users
* Attributes for objects
* Logic dictating which attribute combinations are authorized
For example, we might know the following attributes for our users
* alice
* joined the company 15 years ago
* is a trader
* bob
* joined the company 5 years ago
* is an analyst
We would also have attributes for the objects, in this case stock ticker symbols.
* MSFT
* is sold on NASDAQ
* sells at $59.20 per share
* AMZN
* is sold on NASDAQ
* sells at $813.64 per share
An example ABAC policy in english might be:
* Traders may purchase NASDAQ stocks for under $2M
* Traders with 10+ years experience may purchase NASDAQ stocks for under $5M
OPA supports ABAC policies as shown below.
```live:abac:module:openable
package abac
# User attributes
user_attributes := {
"alice": {"tenure": 15, "title": "trader"},
"bob": {"tenure": 5, "title": "analyst"}
}
# Stock attributes
ticker_attributes := {
"MSFT": {"exchange": "NASDAQ", "price": 59.20},
"AMZN": {"exchange": "NASDAQ", "price": 813.64}
}
default allow = false
# all traders may buy NASDAQ under $2M
allow {
# lookup the user's attributes
user := user_attributes[input.user]
# check that the user is a trader
user.title == "trader"
# check that the stock being purchased is sold on the NASDAQ
ticker_attributes[input.ticker].exchange == "NASDAQ"
# check that the purchase amount is under $2M
input.amount <= 2000000
}
# traders with 10+ years experience may buy NASDAQ under $5M
allow {
# lookup the user's attributes
user := user_attributes[input.user]
# check that the user is a trader
user.title == "trader"
# check that the stock being purchased is sold on the NASDAQ
ticker_attributes[input.ticker].exchange == "NASDAQ"
# check that the user has at least 10 years of experience
user.tenure > 10
# check that the purchase amount is under $5M
input.amount <= 5000000
}
```
```live:abac:query:hidden
allow
```
```live:abac:input
{
"user": "alice",
"ticker": "MSFT",
"action": "buy",
"amount": 1000000
}
```
Querying the `allow` rule with the input above returns the following answer:
```live:abac:output
```
In OPA, there's nothing special about users and objects. You can attach
attributes to anything. And the attributes can themselves be structured JSON objects
and have attributes on attributes on attributes, etc. Because OPA was designed to work
with arbitrarily nested JSON data, it supports incredibly rich ABAC policies.
## Amazon Web Services IAM
Amazon Web Services (AWS) lets you create policies that can be attached to users, roles, groups,
and selected resources. You write `allow` and `deny` statements to enforce which users/roles can/can't
execute which API calls on which resources under certain conditions.
By default all API access requests are implicitly denied (i.e., not allowed). Policy statements
can explicitly allow or deny API requests. If a request is both allowed and denied, it is always denied.
Let's assume that the following [customer managed policy](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_managed-vs-inline.html#customer-managed-policies) is defined in AWS:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "FirstStatement",
"Effect": "Allow",
"Action": ["iam:ChangePassword"],
"Resource": "*"
},
{
"Sid": "SecondStatement",
"Effect": "Allow",
"Action": "s3:ListAllMyBuckets",
"Resource": "*"
},
{
"Sid": "ThirdStatement",
"Effect": "Allow",
"Action": [
"s3:List*",
"s3:Get*"
],
"Resource": [
"arn:aws:s3:::confidential-data",
"arn:aws:s3:::confidential-data/*"
]
}
]
}
```
And the above policy is attached to principal alice in AWS using
[attach-user-policy](https://docs.aws.amazon.com/cli/latest/reference/iam/attach-user-policy.html) API.
In OPA, you write each of the AWS `allow` statements as a separate statement, and you
expect the input to have `principal`, `action`, and `resource` fields.
```live:iam:module:openable
package aws
default allow = false
# FirstStatement
allow {
principals_match
input.action == "iam:ChangePassword"
}
# SecondStatement
allow {
principals_match
input.action == "s3:ListAllMyBuckets"
}
# ThirdStatement
# Use helpers to handle implicit OR in the AWS policy.
# Below all of the 'principals_match', 'actions_match' and 'resources_match' must be true.
allow {
principals_match
actions_match
resources_match
}
# principals_match is true if input.principal matches
principals_match {
input.principal == "alice"
}
# actions_match is true if input.action matches one in the list
actions_match {
# iterate over the actions in the list
actions := ["s3:List.*","s3:Get.*"]
action := actions[_]
# check if input.action matches an action
regex.globs_match(input.action, action)
}
# resources_match is true if input.resource matches one in the list
resources_match {
# iterate over the resources in the list
resources := ["arn:aws:s3:::confidential-data","arn:aws:s3:::confidential-data/.*"]
resource := resources[_]
# check if input.resource matches a resource
regex.globs_match(input.resource, resource)
}
```
```live:iam:input
{
"principal": "alice",
"action": "ec2:StartInstance",
"resource": "arn:aws:ec2:::instance/i78999879"
}
```
Querying `allow` with the input above returns the following answer:
```live:iam:query:hidden
allow
```
```live:iam:output
```
## XACML
eXtensible Access Control Markup Language (XACML) was designed to express security policies: allow/deny decisions using attributes of users, resources, actions, and the environment.
The following policy says that users from the organization Curtiss or Packard who are US or GreatBritain nationals and who work on DetailedDesign or Simulation are permitted access to documents about NavigationSystems.
```xml
<Policy PolicyId="urn:curtiss:ba:taa:taa-1.1" RuleCombiningAlgId="urn:oasis:names:tc:xacml:1.0:rule-combining-algorithm:deny-overrides">
<Description>Policy for Business Authorization category TAA-1.1</Description>
<Target>
<AnyOf>
<AllOf>
<Match
MatchId="urn:oasis:names:tc:xacml:1.0:function:string-equal">
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">NavigationSystem</AttributeValue>
<AttributeDesignator
MustBePresent="true"
Category="urn:oasis:names:tc:xacml:3.0:attribute-category:resource"
AttributeId="urn:curtiss:names:tc:xacml:1.0:resource:Topics"
DataType="http://www.w3.org/2001/XMLSchema#string"/>
</Match>
</AllOf>
</AnyOf>
</Target>
<Rule Effect="Permit">
<Description />
<Target>
<Actions>
<Action>
<ActionMatch MatchId="urn:oasis:names:tc:xacml:1.0:function:string-equal">
<ActionAttributeDesignator
AttributeId="urn:oasis:names:tc:xacml:1.0:action:action-id"
DataType="http://www.w3.org/2001/XMLSchema#string" />
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">Any</AttributeValue>
</ActionMatch>
</Action>
</Actions>
</Target>
<Condition FunctionId="urn:oasis:names:tc:xacml:1.0:function:and">
<Apply xsi:type="AtLeastMemberOf" functionId="urn:oasis:names:tc:xacml:1.0:function:string-at-least-one-member-of">
<Apply functionId="urn:oasis:names:tc:xacml:1.0:function:string-bag">
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">Curtiss</AttributeValue>
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">Packard</AttributeValue>
</Apply>
<AttributeDesignator AttributeId="http://schemas.tscp.org/2012-03/claims/OrganizationID" DataType="http://www.w3.org/2001/XMLSchema#string" />
</Apply>
<Apply xsi:type="AtLeastMemberOf" functionId="urn:oasis:names:tc:xacml:1.0:function:string-at-least-one-member-of">
<Apply functionId="urn:oasis:names:tc:xacml:1.0:function:string-bag">
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">US</AttributeValue>
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">GB</AttributeValue>
</Apply>
<AttributeDesignator AttributeId="http://schemas.tscp.org/2012-03/claims/Nationality" DataType="http://www.w3.org/2001/XMLSchema#string" />
</Apply>
<Apply xsi:type="AtLeastMemberOf" functionId="urn:oasis:names:tc:xacml:1.0:function:string-at-least-one-member-of">
<Apply functionId="urn:oasis:names:tc:xacml:1.0:function:string-bag">
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">DetailedDesign</AttributeValue>
<AttributeValue DataType="http://www.w3.org/2001/XMLSchema#string">Simulation</AttributeValue>
</Apply>
<AttributeDesignator AttributeId="http://schemas.tscp.org/2012-03/claims/Work-Effort" DataType="http://www.w3.org/2001/XMLSchema#string" />
</Apply>
<Apply xsi:type="AndFunction" functionId="urn:oasis:names:tc:xacml:1.0:function:and" />
</Condition>
</Rule>
</Policy>
```
The same statement is shown below in OPA. Here the inputs are assumed to be
roughly the same as for XACML: attributes of users, actions, and resources.
```live:xacml:module:openable
package xacml
permit {
# Check that resource has a "NavigationSystem" entry
input.resource["NavigationSystem"]
# Check that organization is one of the options (underscore implements "any")
org_options := ["Packard", "Curtiss"]
input.user.organization == org_options[_]
# Check that nationality is one of the options (underscore implements "any")
nationality_options := ["GB", "US"]
input.user.nationality == nationality_options[_]
# Check that work_effort is one of the options (underscore implements "any")
work_options := ["DetailedDesign", "Simulation"]
input.user.work_effort == work_options[_]
}
```
```live:xacml:input
{
"user": {
"name": "alice",
"organization": "Packard",
"nationality": "GB",
"work_effort": "DetailedDesign"
},
"resource": {
"NavigationSystem": true
},
"action": {
"name": "read"
}
}
```
Querying `permit` with the input above returns the following answer:
```live:xacml:query:hidden
permit
```
```live:xacml:output
```
| {
"content_hash": "1568968390930323ca1761f7eb756437",
"timestamp": "",
"source": "github",
"line_count": 479,
"max_line_length": 218,
"avg_line_length": 31.989561586638832,
"alnum_prop": 0.6851791424655747,
"repo_name": "tsandall/opa",
"id": "3e357b3af4951475aef69f50ddd7ae00d459c982",
"size": "15327",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/content/comparison-to-other-systems.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "894440"
},
{
"name": "C++",
"bytes": "707511"
},
{
"name": "Dockerfile",
"bytes": "2718"
},
{
"name": "Go",
"bytes": "5505691"
},
{
"name": "JavaScript",
"bytes": "14203"
},
{
"name": "Makefile",
"bytes": "21894"
},
{
"name": "Open Policy Agent",
"bytes": "15272"
},
{
"name": "Python",
"bytes": "6039"
},
{
"name": "Shell",
"bytes": "14785"
}
],
"symlink_target": ""
} |
/*@z37.c:Font Service:Declarations@*******************************************/
/* */
/* THE LOUT DOCUMENT FORMATTING SYSTEM (VERSION 3.24) */
/* COPYRIGHT (C) 1991, 2000 Jeffrey H. Kingston */
/* */
/* Jeffrey H. Kingston (jeff@cs.usyd.edu.au) */
/* Basser Department of Computer Science */
/* The University of Sydney 2006 */
/* AUSTRALIA */
/* */
/* This program is free software; you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation; either Version 2, or (at your option) */
/* any later version. */
/* */
/* This program is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with this program; if not, write to the Free Software */
/* Foundation, Inc., 59 Temple Place, Suite 330, Boston MA 02111-1307 USA */
/* */
/* FILE: z37.c */
/* MODULE: Font Service */
/* EXTERNS: FontInit(), FontDefine(), FontChange(), FontWordSize(), */
/* FontSize(), FontHalfXHeight(), FontEncoding(), */
/* FontMapping(), FontFamilyAndFace(), FontNeeded() */
/* */
/* This module implements fonts, using encoding vectors and Adobe font */
/* metrics files (.AFM files, version 2). */
/* */
/*****************************************************************************/
#include "externs.h"
#define DEFAULT_XHEIGHT 500 /* the default XHeight if font has none */
#define NO_FONT 0 /* the not-a-font font number */
#define SZ_DFT 1000 /* default lout size is 50p */
#define INIT_FINFO_SIZE 100 /* initial number of sized fonts set aside */
/*****************************************************************************/
/* */
/* These definitions have been moved to "externs.h" since z24.c needs them: */
/* */
/* struct metrics { */
/* SHORT_LENGTH up; */
/* SHORT_LENGTH down; */
/* SHORT_LENGTH left; */
/* SHORT_LENGTH right; */
/* SHORT_LENGTH last_adjust; */
/* }; */
/* */
/* typedef struc composite_rec { */
/* FULL_CHAR char_code; */
/* SHORT_LENGTH x_offset; */
/* SHORT_LENGTH y_offset; */
/* } COMPOSITE; */
/* */
/* typedef struct font_rec { */
/* struct metrics *size_table; metrics of sized fonts */
/* FULL_CHAR *lig_table; ligatures */
/* unsigned short *composite; non-zero means composite */
/* COMPOSITE *cmp_table; composites to build */
/* int cmp_top; length of cmp_table */
/* OBJECT font_table; record of sized fonts */
/* OBJECT original_face; face object of font */
/* SHORT_LENGTH underline_pos; position of underline */
/* SHORT_LENGTH underline_thick; thickness of underline */
/* unsigned short *kern_table; first kerning chars */
/* FULL_CHAR *kern_chars; second kerning chars */
/* unsigned char *kern_value; points into kern_lengths */
/* SHORT_LENGTH *kern_sizes; sizes of kernings */
/* } FONT_INFO; */
/* */
/*****************************************************************************/
/*****************************************************************************/
/* */
/* Private data structures of this module */
/* */
/* +++++++++++++++++++++++++++ */
/* + + */
/* root -> + ACAT + */
/* + + */
/* + + */
/* +++++++++++++++++++++++++++ */
/* | font families... */
/* | */
/* +-----+-----------------------------------------------+ ... */
/* | | */
/* | | */
/* +++++++++++++++++++++++++++ */
/* + + */
/* family -> + WORD + */
/* + string (family name) + */
/* + + */
/* +++++++++++++++++++++++++++ */
/* | faces of this family... */
/* | */
/* +-----+-----------------------------------------------+ ... */
/* | | */
/* | | */
/* +++++++++++++++++++++++++++++++++ */
/* + + */
/* face -> + WORD + */
/* + string (face name) + */
/* + font_recoded + */
/* + font_mapping + */
/* + font_page + */
/* + + */
/* +++++++++++++++++++++++++++++++++ */
/* | size records... */
/* | */
/* +----------+---------+--------------------+-----------------------+ */
/* | | | | */
/* | | | | */
/* +++++++++++++++++++ +++++++++++++++++++ +++++++++++++++++++++ */
/* + + + + + + */
/* + WORD + + WORD + + WORD + */
/* + string (font + + string (AFM + + string (short + */
/* + name) + + file name) + + font name) + */
/* + + + + + font_num + */
/* +++++++++++++++++++ +++++++++++++++++++ + font_size + */
/* | + font_xheight2 + */
/* | + font_recoded + */
/* ++++++++++++++++++++ + font_mapping + */
/* + + + font_spacewidth + */
/* (optional) + WORD + + + */
/* + string (extra + +++++++++++++++++++++ */
/* + AFM file name) + */
/* + + */
/* ++++++++++++++++++++ */
/* */
/*****************************************************************************/
int font_curr_page; /* current page number */
FONT_INFO *finfo; /* all the font table info */
static int finfo_size; /* current finfo array size */
static OBJECT font_root; /* root of tree of fonts */
static OBJECT font_used; /* fonts used on this page */
static FONT_NUM font_count; /* number of sized fonts */
static int font_seqnum; /* unique number for a font */
static OBJECT FontDefSym; /* symtab entry for @FontDef */
static OBJECT fd_tag; /* @FontDef @Tag entry */
static OBJECT fd_family; /* @FontDef @Family entry */
static OBJECT fd_face; /* @FontDef @Face entry */
static OBJECT fd_name; /* @FontDef @Name entry */
static OBJECT fd_metrics; /* @FontDef @Metrics entry */
static OBJECT fd_extra_metrics; /* @FontDef @ExtraMetrics */
static OBJECT fd_mapping; /* @FontDef @Mapping entry */
static OBJECT fd_recode; /* @FontDef @Recode entry */
/*@::FontInit(), FontDebug()@*************************************************/
/* */
/* FontInit() */
/* */
/* Initialise this module. */
/* */
/*****************************************************************************/
static OBJECT load(FULL_CHAR *name, unsigned dtype, OBJECT encl, BOOLEAN compulsory)
{ OBJECT res;
res = InsertSym(name, dtype, no_fpos, DEFAULT_PREC, FALSE, FALSE, 0, encl,
MakeWord(WORD, STR_EMPTY, no_fpos));
if( dtype == NPAR ) visible(res) = TRUE;
if( compulsory )
{ has_compulsory(encl)++;
is_compulsory(res) = TRUE;
}
return res;
}
void FontInit(void)
{
debug0(DFT, D, "FontInit()");
font_curr_page = 1;
font_count = 0;
New(font_root, ACAT);
New(font_used, ACAT);
font_seqnum = 0;
finfo = (FONT_INFO *) malloc(INIT_FINFO_SIZE * sizeof(FONT_INFO));
finfo_size = INIT_FINFO_SIZE;
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 1, INIT_FINFO_SIZE * sizeof(FONT_INFO)));
/* set up FontDefSym */
FontDefSym = load(KW_FONTDEF, LOCAL, StartSym, FALSE);
fd_tag = load(KW_TAG, NPAR, FontDefSym, TRUE);
fd_family = load(KW_FAMILY, NPAR, FontDefSym, TRUE);
fd_face = load(KW_FACE, NPAR, FontDefSym, TRUE);
fd_name = load(KW_NAME, NPAR, FontDefSym, TRUE);
fd_metrics = load(KW_METRICS, NPAR, FontDefSym, TRUE);
fd_extra_metrics = load(KW_EXTRA_METRICS, NPAR, FontDefSym, FALSE);
fd_mapping = load(KW_MAPPING, NPAR, FontDefSym, TRUE);
fd_recode = load(KW_RECODE, NPAR, FontDefSym, FALSE);
debug0(DFT, D, "FontInit returning.");
}
/*****************************************************************************/
/* */
/* FontDebug() */
/* */
/* Print out font tree (not currectly used). */
/* */
/*****************************************************************************/
#if DEBUG_ON
static void FontDebug(void)
{ OBJECT family, face, link, flink, zlink, z; int i;
assert(font_root!=nilobj && type(font_root)==ACAT, "FontDebug: font_root!");
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(family, link);
assert( is_word(type(family)), "FontDebug: family!" );
debug1(DFS, D, "family %s:", string(family));
for( flink = Down(family); flink != family; flink = NextDown(flink) )
{ Child(face, flink);
assert( is_word(type(face)), "FontDebug: face!" );
debug1(DFS, D, " face %s:", string(face));
for( zlink = Down(face); zlink != face; zlink = NextDown(zlink) )
{ Child(z, zlink);
if( is_word(type(z)) )
{ debug2(DFS, D, " %s%s", string(z), Down(z) != z ? " child" : "");
}
else
{ debug1(DFS, D, " %s", Image(type(z)));
}
}
}
}
for( i = 1; i <= font_count; i++ )
fprintf(stderr, " finfo[%d].font_table = %s\n", i,
EchoObject(finfo[i].font_table));
} /* end FontDebug */
/*****************************************************************************/
/* */
/* DebugKernTable(fnum) */
/* */
/* Print debug output of kern table for font fnum. */
/* */
/*****************************************************************************/
static void DebugKernTable(FONT_NUM fnum)
{ int i, j;
unsigned short *kt = finfo[fnum].kern_table;
FULL_CHAR *kc = finfo[fnum].kern_chars;
unsigned char *kv = finfo[fnum].kern_value;
SHORT_LENGTH *ks = finfo[fnum].kern_sizes;
debug1(DFT, DD, "DebugKernTable(%d)", fnum);
for( i = 0; i < MAX_CHARS; i++ )
{ if( kt[i] != 0 )
{ debug1(DFT, DD, "kt[%d]:", i);
for( j = kt[i]; kc[j] != '\0'; j++ )
{ debug3(DFT, DD, "KPX %c %c %d", i, kc[j], ks[kv[j]]);
}
}
}
debug1(DFT, DD, "DebugKernTable(%d) returning", fnum);
} /* DebugKernTable */
#endif
/*****************************************************************************/
/* */
/* ReadCharMetrics(face, fixed_pitch, xheight2,lig,ligtop,fnum,fnt,lnum,fp) */
/* */
/* Read a sequence of character metrics lines. The font record is */
/* face, its ligatures are lig[0..ligtop], font number fnum, metrics fnt. */
/* The line number is lnum; input is to be read from file fp. */
/* */
/*****************************************************************************/
static void ReadCharMetrics(OBJECT face, BOOLEAN fixed_pitch, int xheight2,
FULL_CHAR *lig, int *ligtop, FILE_NUM fnum, struct metrics *fnt,
int *lnum, FILE *fp)
{ FULL_CHAR buff[MAX_BUFF], command[MAX_BUFF], ch, ligchar;
int i, wx, llx, lly, urx, ury;
float fl_wx, fl_llx, fl_lly, fl_urx, fl_ury;
BOOLEAN wxfound, bfound;
OBJECT AFMfilename;
Child(AFMfilename, NextDown(Down(face)));
while( StringFGets(buff, MAX_BUFF, fp) != NULL &&
!StringBeginsWith(buff, AsciiToFull("EndCharMetrics")) &&
!StringBeginsWith(buff, AsciiToFull("EndExtraCharMetrics")) )
{
/* read one line containing metric info for one character */
debug1(DFT, DD, " ReadCharMetrics: %s", buff);
(*lnum)++; ch = '\0';
wxfound = bfound = FALSE;
i = 0; while( buff[i] == ' ' ) i++;
while( buff[i] != '\n' )
{
debug2(DFT, DDD, " ch = %d, &buff[i] = %s", ch, &buff[i]);
sscanf( (char *) &buff[i], "%s", command);
if( StringEqual(command, "N") )
{ sscanf( (char *) &buff[i], "N %s", command);
ch = MapCharEncoding(command, font_mapping(face));
}
else if( StringEqual(command, "WX") )
{ sscanf( (char *) &buff[i], "WX %f", &fl_wx);
wx = fl_wx;
wxfound = TRUE;
}
else if( StringEqual(command, "B") )
{ sscanf( (char *) &buff[i], "B %f %f %f %f",
&fl_llx, &fl_lly, &fl_urx, &fl_ury);
llx = fl_llx;
lly = fl_lly;
urx = fl_urx;
ury = fl_ury;
bfound = TRUE;
}
else if( StringEqual(command, "L") &&
BackEnd->uses_font_metrics && ch != '\0' )
{ if( lig[ch] == 1 ) lig[ch] = (*ligtop) - MAX_CHARS;
lig[(*ligtop)++] = ch;
i++; /* skip L */
while( buff[i] == ' ' ) i++;
while( buff[i] != ';' && buff[i] != '\n' )
{ sscanf( (char *) &buff[i], "%s", command);
ligchar = MapCharEncoding(command, font_mapping(face));
if( ligchar != '\0' ) lig[(*ligtop)++] = ligchar;
else
{ Error(37, 1, "ignoring unencoded ligature character %s in font file %s (line %d)",
WARN, &fpos(AFMfilename), command, FileName(fnum), *lnum);
lig[ch] = 1;
}
if( *ligtop > 2*MAX_CHARS - 5 )
Error(37, 2, "too many ligature characters in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), *lnum);
while( buff[i] != ' ' && buff[i] != ';' ) i++;
while( buff[i] == ' ' ) i++;
}
lig[(*ligtop)++] = '\0';
}
while( buff[i] != ';' && buff[i] != '\n' ) i++;
if( buff[i] == ';' )
{ i++; while( buff[i] == ' ' ) i++;
}
}
if( ch > '\0' )
{
if( !wxfound )
{ Error(37, 3, "WX missing in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), *lnum);
}
if( !bfound )
{ Error(37, 4, "B missing in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), *lnum);
}
if( lig[ch] == 1 ) lig[ch] = 0; /* set to known if unknown */
else if( lig[ch] > 1 ) /* add '\0' to end of ligs */
lig[(*ligtop)++] = '\0';
if( BackEnd->uses_font_metrics )
{
fnt[ch].left = llx;
fnt[ch].down = lly - xheight2;
fnt[ch].right = wx;
fnt[ch].up = ury - xheight2;
fnt[ch].last_adjust = (urx==0 || wx==0 || fixed_pitch) ? 0 : urx - wx;
}
else
{
fnt[ch].left = 0;
fnt[ch].down = - PlainCharHeight / 2;
fnt[ch].right = PlainCharWidth;
fnt[ch].up = PlainCharHeight / 2;
fnt[ch].last_adjust = 0;
}
debug6(DFT, DDD, " fnt[%c] = (%d,%d,%d,%d,%d)",ch, fnt[ch].left,
fnt[ch].down, fnt[ch].right, fnt[ch].up, fnt[ch].last_adjust);
}
}
} /* end ReadCharMetrics */
/*****************************************************************************/
/* */
/* ReadCompositeMetrics(face, Extrafilename, extra_fnum, lnum, composite, */
/* cmp, cmptop, fp) */
/* */
/* Read a sequence of composite metrics lines. The font record is face. */
/* The line number is lnum; input is to be read from file fp. */
/* */
/*****************************************************************************/
static void ReadCompositeMetrics(OBJECT face, OBJECT Extrafilename,
FILE_NUM extra_fnum, int *lnum, unsigned short composite[],
COMPOSITE cmp[], int *cmptop, FILE *fp)
{ char *status;
FULL_CHAR buff[MAX_BUFF], composite_name[100], name[100];
int composite_num, x_offset, y_offset, i, count;
FULL_CHAR composite_code, code;
/* build composites */
while( (status = StringFGets(buff, MAX_BUFF, fp)) != (char *) NULL
&& StringBeginsWith(buff, AsciiToFull("CC")) )
{
(*lnum)++;
debug1(DFT, D, " composite: %s", buff);
/* read CC <charname> <number_of_pieces> ; and move i to after it */
if( sscanf((char *)buff, "CC %s %d ", composite_name, &composite_num) != 2 )
Error(37, 5, "syntax error in extra font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
for( i = 0; buff[i] != ';' && buff[i] != '\n' && buff[i] != '\0'; i++ );
if( buff[i] != ';' )
Error(37, 5, "syntax error in extra font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
i++;
/* add entry for this character to composite */
composite_code = MapCharEncoding(composite_name,font_mapping(face));
if( composite_code == (FULL_CHAR) '\0' )
Error(37, 6, "unknown character name %s in font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
composite[composite_code] = *cmptop;
for( count = 0; count < composite_num; count++ )
{
/* read one PCC <charname> <xoffset> <yoffset> ; and move i to after it */
if( sscanf((char *)&buff[i]," PCC %s %d %d",name,&x_offset,&y_offset)!=3 )
Error(37, 5, "syntax error in extra font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
for( ; buff[i] != ';' && buff[i] != '\n' && buff[i] != '\0'; i++ );
if( buff[i] != ';' )
Error(37, 5, "syntax error in extra font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
i++;
/* load this piece into cmp */
if( *cmptop >= MAX_CHARS )
Error(37, 7, "too many composites in file %s (at line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
code = MapCharEncoding(name, font_mapping(face));
cmp[*cmptop].char_code = code;
cmp[*cmptop].x_offset = x_offset;
cmp[*cmptop].y_offset = y_offset;
(*cmptop)++;
}
/* add null terminating component */
if( *cmptop >= MAX_CHARS )
Error(37, 8, "too many composites in file %s (at line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
cmp[*cmptop].char_code = (FULL_CHAR) '\0';
(*cmptop)++;
}
if( status == (char *) NULL ||
!StringBeginsWith(buff, AsciiToFull("EndBuildComposites")) )
Error(37, 9, "missing EndBuildComposites in extra font file %s (line %d)",
FATAL, &fpos(Extrafilename), FileName(extra_fnum), *lnum);
} /* end ReadCompositeMetrics */
/*@::FontRead()@**************************************************************/
/* */
/* static OBJECT FontRead(FULL_CHAR *family_name, *face_name, OBJECT err) */
/* */
/* Search the font databases for a font with this family and face name. */
/* If found, read the font and update this module's data structures, then */
/* return the face object. */
/* */
/* If an error occurs, use fpos(err) for reporting its location if nothing */
/* better suggests itself. */
/* */
/*****************************************************************************/
static OBJECT FontRead(FULL_CHAR *family_name, FULL_CHAR *face_name, OBJECT err)
{
OBJECT cs, link, db, fontdef_obj, y, ylink;
FULL_CHAR tag[100], seq[100];
FILE_NUM dfnum; long dfpos, cont; int dlnum;
BOOLEAN font_name_found;
OBJECT family, face, font_name, AFMfilename, Extrafilename, LCMfilename;
OBJECT recode, first_size;
FULL_CHAR buff[MAX_BUFF], command[MAX_BUFF], ch;
char *status;
int xheight2, i, lnum, ligtop, cmptop;
float fl_xheight2, fl_under_pos, fl_under_thick;
int under_pos, under_thick;
BOOLEAN upfound, utfound, xhfound;
BOOLEAN fixed_pitch = FALSE;
FILE_NUM fnum, extra_fnum; FILE *fp, *extra_fp;
struct metrics *fnt;
FULL_CHAR *lig; unsigned short *composite; COMPOSITE *cmp;
unsigned short *kt; FULL_CHAR *kc; unsigned char *kv; SHORT_LENGTH *ks;
debug2(DFT, D, "FontRead(%s, %s)", family_name, face_name);
/***************************************************************************/
/* */
/* Get the @FontDef object with tag family_name-face_name from databases */
/* */
/***************************************************************************/
/* if no databases available, fatal error */
cs = cross_sym(FontDefSym);
if( cs == nilobj )
{ Error(37, 10, "unable to set font %s %s (no font databases loaded)",
FATAL, no_fpos, family_name, face_name);
}
/* search the databases for @FontDef @Tag { family-face } */
sprintf( (char *) tag, "%s-%s", family_name, face_name);
for( link = NextUp(Up(cs)); link != cs; link = NextUp(link) )
{ Parent(db, link);
if( DbRetrieve(db, FALSE, FontDefSym,tag,seq,&dfnum,&dfpos,&dlnum,&cont) )
break;
}
/* if not found, return nilobj */
if( link == cs )
{ debug0(DFT, D, "FontRead returning nilobj (not in any database)");
return nilobj;
}
/* found it; read @FontDef object from database file */
SwitchScope(nilobj);
fontdef_obj = ReadFromFile(dfnum, dfpos, dlnum);
UnSwitchScope(nilobj);
if( fontdef_obj == nilobj )
Error(37, 11, "cannot read %s for %s", INTERN, no_fpos, KW_FONTDEF, tag);
/***************************************************************************/
/* */
/* Extract the attributes of fontdef_obj, and check that they are OK. */
/* */
/***************************************************************************/
/* extract the various attributes */
family = face = font_name = AFMfilename = nilobj;
Extrafilename = LCMfilename = recode = nilobj;
for( ylink=Down(fontdef_obj); ylink != fontdef_obj; ylink=NextDown(ylink) )
{ Child(y, ylink);
assert( type(y) == PAR, "FontRead: type(y) != PAR!" );
if( actual(y) == fd_tag )
{
/* do nothing with this one */
}
else if( actual(y) == fd_family )
{ Child(family, Down(y));
if( !is_word(type(family)) || !StringEqual(string(family), family_name) )
Error(37, 12, "font family name %s incompatible with %s value %s",
FATAL, &fpos(fontdef_obj), string(family), KW_TAG, tag);
}
else if( actual(y) == fd_face )
{ Child(face, Down(y));
if( !is_word(type(face)) || !StringEqual(string(face), face_name) )
Error(37, 13, "font face name %s incompatible with %s value %s",
FATAL, &fpos(fontdef_obj), string(face), KW_TAG, tag);
}
else if( actual(y) == fd_name )
{ Child(font_name, Down(y));
font_name = ReplaceWithTidy(font_name, TRUE);
if( !is_word(type(font_name)) )
Error(37, 14, "illegal font name (quotes needed?)",
FATAL, &fpos(font_name));
}
else if( actual(y) == fd_metrics )
{ Child(AFMfilename, Down(y));
AFMfilename = ReplaceWithTidy(AFMfilename, TRUE);
if( !is_word(type(AFMfilename)) )
Error(37, 15, "illegal font metrics file name (quotes needed?)",
FATAL, &fpos(AFMfilename));
}
else if( actual(y) == fd_extra_metrics )
{ Child(Extrafilename, Down(y));
Extrafilename = ReplaceWithTidy(Extrafilename, TRUE);
if( !is_word(type(Extrafilename)) )
Error(37, 16, "illegal font extra metrics file name (quotes needed?)",
FATAL, &fpos(Extrafilename));
}
else if( actual(y) == fd_mapping )
{ Child(LCMfilename, Down(y));
LCMfilename = ReplaceWithTidy(LCMfilename, TRUE);
if( !is_word(type(LCMfilename)) )
Error(37, 17, "illegal mapping file name (quotes needed?)",
FATAL, &fpos(LCMfilename));
}
else if( actual(y) == fd_recode )
{ Child(recode, Down(y));
recode = ReplaceWithTidy(recode, TRUE);
if( !is_word(type(recode)) )
Error(37, 18, "illegal value of %s", FATAL, &fpos(recode),
SymName(fd_recode));
}
else
{ assert(FALSE, "FontRead: cannot identify component of FontDef")
}
}
/* check that all the compulsory ones were found */
/* a warning message will have already been given if not */
if( family == nilobj || face == nilobj || font_name == nilobj ||
AFMfilename == nilobj || LCMfilename == nilobj )
{
debug0(DFT, D, "FontRead returning nilobj (missing compulsory)");
return nilobj;
}
/***************************************************************************/
/* */
/* Update font tree to have this family, face and first_size. */
/* */
/***************************************************************************/
/* insert family into font tree if not already present */
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(y, link);
if( StringEqual(string(y), string(family)) )
{ family = y;
break;
}
}
if( link == font_root )
MoveLink(Up(family), font_root, PARENT);
/* insert face into family, or error if already present */
for( link = Down(family); link != family; link = NextDown(link) )
{ Child(y, link);
if( StringEqual(string(y), string(face)) )
{ Error(37, 19, "font %s %s already defined, at%s", WARN, &fpos(face),
string(family), string(face), EchoFilePos(&fpos(y)));
debug0(DFT, D, "FontRead returning: font already defined");
DisposeObject(fontdef_obj);
return y;
}
}
MoveLink(Up(face), family, PARENT);
/* PostScript name and AFM file name are first two children of face */
Link(face, font_name);
Link(face, AFMfilename);
/* AFM file name has extra file name as optional child */
if( Extrafilename != nilobj )
Link(AFMfilename, Extrafilename);
/* load character mapping file */
if( recode != nilobj && StringEqual(string(recode), AsciiToFull("No")) )
{ font_recoded(face) = FALSE;
font_mapping(face) = MapLoad(LCMfilename, FALSE);
}
else if( recode == nilobj || StringEqual(string(recode), AsciiToFull("Yes")) )
{ font_recoded(face) = TRUE;
font_mapping(face) = MapLoad(LCMfilename, TRUE);
}
else Error(37, 20, "expecting either Yes or No here", FATAL, &fpos(recode));
/* say that this font is currently unused on any page */
font_page(face) = 0;
/* get a new number for this (default) font size */
if( ++font_count >= finfo_size )
{ if( font_count > MAX_FONT )
Error(37, 21, "too many different fonts and sizes (maximum is %d)",
FATAL, &fpos(err),MAX_FONT);
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, -1, -finfo_size * sizeof(FONT_INFO)));
finfo_size *= 2;
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 1, finfo_size * sizeof(FONT_INFO)));
finfo = (FONT_INFO *) realloc(finfo, finfo_size * sizeof(FONT_INFO));
if( finfo == (FONT_INFO *) NULL )
Error(37, 22, "run out of memory when increasing font table size",
FATAL, &fpos(err));
}
/* build the first size record, and initialize it with what we know now */
first_size = MakeWordTwo(WORD, AsciiToFull("fnt"), StringInt(++font_seqnum),
no_fpos);
Link(face, first_size);
font_num(first_size) = font_count;
font_size(first_size) = BackEnd->uses_font_metrics ? SZ_DFT : PlainCharHeight;
font_recoded(first_size) = font_recoded(face);
font_mapping(first_size) = font_mapping(face);
font_num(face) = font_num(first_size); /* Uwe's suggestion, helps PDF */
/* leaves font_xheight2 and font_spacewidth still to do */
/***************************************************************************/
/* */
/* Read the Adobe font metrics file, and record what's in it. */
/* */
/***************************************************************************/
/* open the Adobe font metrics (AFM) file of the font */
debug0(DFS, D, " calling DefineFile from FontRead");
fnum = DefineFile(string(AFMfilename), STR_EMPTY, &fpos(AFMfilename),
FONT_FILE, FONT_PATH);
fp = OpenFile(fnum, FALSE, FALSE);
if( fp == NULL )
Error(37, 23, "cannot open font file %s", FATAL, &fpos(AFMfilename),
FileName(fnum));
/* check that the AFM file begins, as it should, with "StartFontMetrics" */
if( StringFGets(buff, MAX_BUFF, fp) == NULL ||
sscanf( (char *) buff, "%s", command) != 1 ||
!StringEqual(command, "StartFontMetrics") )
{ debug1(DFT, DD, "first line of AFM file:%s", buff);
debug1(DFT, DD, "command:%s", command);
Error(37, 24, "font file %s does not begin with StartFontMetrics",
FATAL, &fpos(AFMfilename), FileName(fnum));
}
/* initialise font metrics table for the new font */
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 1, MAX_CHARS * sizeof(struct metrics)));
fnt = (struct metrics *) malloc(MAX_CHARS * sizeof(struct metrics));
if( fnt == (struct metrics *) NULL )
Error(37, 25, "run out of memory while reading font file %s",
FATAL, &fpos(err), FileName(fnum));
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 0, 2*MAX_CHARS*sizeof(FULL_CHAR)));
/* initialise ligature table for the new font */
lig = (FULL_CHAR *) malloc(2*MAX_CHARS*sizeof(FULL_CHAR));
if( lig == (FULL_CHAR *) NULL )
Error(37, 25, "run out of memory while reading font file %s",
FATAL, &fpos(err), FileName(fnum));
for( i = 0; i < MAX_CHARS; i++ ) lig[i] = 1; /* i.e. char unknown */
ligtop = MAX_CHARS+2; /* must avoid ligtop - MAX_CHARS == 0 or 1 */
/* initialise composites table for the new font */
composite = (unsigned short *) malloc(MAX_CHARS * sizeof(unsigned short));
if( composite == (unsigned short *) NULL )
Error(37, 25, "run out of memory while reading font file %s",
FATAL, &fpos(err), FileName(fnum));
cmp = (COMPOSITE *) malloc(MAX_CHARS * sizeof(COMPOSITE));
if( cmp == (COMPOSITE *) NULL )
Error(37, 25, "run out of memory while reading font file %s",
FATAL, &fpos(err), FileName(fnum));
for( i = 0; i < MAX_CHARS; i++ ) composite[i] = 0; /* i.e. not composite */
cmptop = 1; /* must avoid cmptop == 0 */
/* initialise kerning table for the new font */
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 0, MAX_CHARS * sizeof(unsigned short)));
kt = (unsigned short *) malloc(MAX_CHARS * sizeof(unsigned short));
if( kt == (unsigned short *) NULL )
Error(37, 25, "run out of memory while reading font file %s",
FATAL, &fpos(err), FileName(fnum));
for( i = 0; i < MAX_CHARS; i++ ) kt[i] = 0; /* i.e. no kerns */
ks = (SHORT_LENGTH *) NULL; /* i.e. no kern sizes */
/* read font metrics file fp */
xhfound = upfound = utfound = FALSE;
xheight2 = under_thick = under_pos = 0;
kc = (FULL_CHAR *) NULL;
kv = (unsigned char *) NULL;
ks = (SHORT_LENGTH *) NULL;
font_name_found = FALSE; lnum = 1;
while( (status = StringFGets(buff, MAX_BUFF, fp)) != (char *) NULL &&
!(buff[0] == 'E' && StringEqual(buff, AsciiToFull("EndFontMetrics\n"))) )
{
lnum++;
sscanf( (char *) buff, "%s", command);
switch( command[0] )
{
case 'U':
if( StringEqual(command, AsciiToFull("UnderlinePosition")) )
{ if( upfound )
{ Error(37, 26, "UnderlinePosition found twice in font file (line %d)",
FATAL, &fpos(AFMfilename), lnum);
}
sscanf( (char *) buff, "UnderlinePosition %f", &fl_under_pos);
under_pos = fl_under_pos;
upfound = TRUE;
}
else if( StringEqual(command, AsciiToFull("UnderlineThickness")) )
{ if( utfound )
{ Error(37, 27, "UnderlineThickness found twice in font file (line %d)",
FATAL, &fpos(AFMfilename), lnum);
}
sscanf( (char *) buff, "UnderlineThickness %f", &fl_under_thick);
under_thick = fl_under_thick;
utfound = TRUE;
}
break;
case 'X':
if( StringEqual(command, AsciiToFull("XHeight")) )
{ if( xhfound )
{ Error(37, 28, "XHeight found twice in font file (line %d)",
FATAL, &fpos(AFMfilename), lnum);
}
sscanf( (char *) buff, "XHeight %f", &fl_xheight2);
xheight2 = fl_xheight2 / 2;
xhfound = TRUE;
}
break;
case 'F':
if( StringEqual(command, AsciiToFull("FontName")) )
{ if( font_name_found )
{ Error(37, 29, "FontName found twice in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), lnum);
}
sscanf( (char *) buff, "FontName %s", command);
if( StringEqual(command, STR_EMPTY) )
{ Error(37, 30, "FontName empty in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), lnum);
}
Child(y, Down(face));
if( !StringEqual(command, string(y)) )
Error(37, 31, "FontName in font file (%s) and %s (%s) disagree",
WARN, &fpos(AFMfilename), command, KW_FONTDEF, string(y));
font_name_found = TRUE;
}
break;
case 'I':
if( StringEqual(command, AsciiToFull("IsFixedPitch")) )
{
sscanf( (char *) buff, "IsFixedPitch %s", command);
if( StringEqual(command, AsciiToFull("true")) )
{ fixed_pitch = TRUE;
}
}
break;
case 'S':
if( StringEqual(command, AsciiToFull("StartCharMetrics")) )
{
if( !font_name_found )
Error(37, 32, "FontName missing in file %s",
FATAL, &fpos(AFMfilename), FileName(fnum));
if( !xhfound ) xheight2 = DEFAULT_XHEIGHT / 2;
ReadCharMetrics(face, fixed_pitch, xheight2, lig, &ligtop,
fnum, fnt, &lnum, fp);
}
else if( BackEnd->uses_font_metrics && Kern &&
StringEqual(command, AsciiToFull("StartKernPairs")) )
{ FULL_CHAR ch1, ch2, last_ch1;
FULL_CHAR name1[30], name2[30];
int kc_top, ks_top, pos, num_pairs, ksize; float fl_ksize;
if( sscanf( (char *) buff, "StartKernPairs %d", &num_pairs) != 1 )
Error(37, 33, "syntax error on StartKernPairs line in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), lnum);
kc_top = 1; ks_top = 1;
ifdebug(DMA, D,
DebugRegisterUsage(MEM_FONTS, 0, 2*num_pairs * sizeof(FULL_CHAR)));
kc = (FULL_CHAR *) malloc(2 * num_pairs * sizeof(FULL_CHAR));
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, 0,
2 * num_pairs * sizeof(unsigned char)));
kv = (unsigned char *) malloc(2 * num_pairs * sizeof(unsigned char));
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, 0,
num_pairs * sizeof(SHORT_LENGTH)));
ks = (SHORT_LENGTH *) malloc(num_pairs * sizeof(SHORT_LENGTH));
last_ch1 = '\0';
while( StringFGets(buff, MAX_BUFF, fp) == (char *) buff &&
!StringBeginsWith(buff, AsciiToFull("EndKernPairs")) )
{
debug1(DFT, DD, "FontRead reading %s", buff);
lnum++;
if( StringBeginsWith(buff, AsciiToFull("KPX")) )
{
/* get the two character names and kern size from buff */
if( sscanf((char *)buff, "KPX %s %s %f",name1,name2,&fl_ksize)!=3 )
Error(37, 34, "syntax error in font file %s (line %d): %s",
FATAL, &fpos(AFMfilename), FileName(fnum), lnum, buff);
/* ignore size 0 kern pairs (they are frequent, why?) */
ksize = fl_ksize;
if( ksize == 0 ) continue;
/* check that both characters are encoded */
ch1 = MapCharEncoding(name1, font_mapping(face));
if( ch1 == '\0' )
{
continue;
}
ch2 = MapCharEncoding(name2, font_mapping(face));
if( ch2 == '\0' )
{
continue;
}
/* check that ch1 is contiguous with previous occurrences */
if( ch1 != last_ch1 && kt[ch1] != 0 )
{ Error(37, 35, "non-contiguous kerning pair %s %s in font file %s (line %d)",
WARN, &fpos(AFMfilename), name1, name2, FileName(fnum), lnum);
continue;
}
last_ch1 = ch1;
/* if ch1 never seen before, make new entry in kt[] and kc[] */
if( kt[ch1] == 0 )
{ debug2(DFT, DD, " kt[%d] = %d", ch1, kc_top);
kt[ch1] = kc_top;
kc[kc_top] = (FULL_CHAR) '\0';
kv[kc_top] = 0;
kc_top++;
}
/* find kerning size in ks[] or else add it to the end */
for( pos = 1; pos < ks_top; pos++ )
{ if( ks[pos] == ksize ) break;
}
if( pos == ks_top )
{ if( ks_top == num_pairs )
Error(37, 36, "too many kerning pairs in font file %s (line %d)",
FATAL, &fpos(AFMfilename), FileName(fnum), lnum);
debug2(DFT, DD, " ks[%d] = %d", pos, ksize);
ks[pos] = ksize;
ks_top++;
}
/* insert ch2 into the kc entries (sorted decreasing) for ch1 */
for( i = kc_top-1; i >= kt[ch1] && kc[i] < ch2; i-- )
{ kc[i+1] = kc[i];
kv[i+1] = kv[i];
}
if( i >= kt[ch1] && kc[i] == ch2 )
Error(37, 37, "kerning pair %s %s appears twice in font file %s (line %d)",
FATAL, &fpos(AFMfilename), name1, name2, FileName(fnum), lnum);
kc[i+1] = ch2;
kv[i+1] = pos;
kc_top++;
}
}
ks[0] = ks_top;
}
break;
default:
break;
}
}
/* make sure we terminated the font metrics file gracefully */
if( status == (char *) NULL )
Error(37, 38, "EndFontMetrics missing from font file %s",
FATAL, &fpos(AFMfilename), FileName(fnum));
fclose(fp);
fp = (FILE *) NULL;
/* complete the initialization of first_size */
font_xheight2(first_size) =
BackEnd->uses_font_metrics ? xheight2 : PlainCharHeight / 4;
ch = MapCharEncoding(STR_PS_SPACENAME, font_mapping(first_size));
font_spacewidth(first_size) = ch == '\0' ? 0 : fnt[ch].right;
/***************************************************************************/
/* */
/* Read the optional Extra font metrics file, and record what's in it. */
/* */
/***************************************************************************/
if( Extrafilename != nilobj )
{ debug0(DFS, D, " calling DefineFile from FontRead (extra_filename)");
extra_fnum = DefineFile(string(Extrafilename), STR_EMPTY,
&fpos(Extrafilename), FONT_FILE, FONT_PATH);
extra_fp = OpenFile(extra_fnum, FALSE, FALSE);
if( extra_fp == NULL )
Error(37, 39, "cannot open extra font file %s", FATAL,
&fpos(Extrafilename), FileName(extra_fnum));
lnum = 0;
while( StringFGets(buff, MAX_BUFF, extra_fp) != (char *) NULL )
{
debug1(DFT, D, " Extra: %s", buff);
lnum++;
sscanf( (char *) buff, "%s", command);
if( command[0] == 'S' )
{
if( StringEqual(command, AsciiToFull("StartExtraCharMetrics")) )
{
/* get extra character metrics, just like the others */
debug0(DFT, D, " StartExtraCharMetrics calling ReadCharMetrics");
ReadCharMetrics(face, fixed_pitch, xheight2, lig, &ligtop,
extra_fnum, fnt, &lnum, extra_fp);
}
else if( StringEqual(command, AsciiToFull("StartBuildComposites")) )
{
/* build composites */
debug0(DFT, D, " StartBuildComposites");
ReadCompositeMetrics(face, Extrafilename, extra_fnum, &lnum,
composite, cmp, &cmptop, extra_fp);
}
}
}
fclose(extra_fp);
extra_fp = (FILE *) NULL;
}
/***************************************************************************/
/* */
/* Set finfo[fontcount] and exit. */
/* */
/***************************************************************************/
finfo[font_count].font_table = first_size;
finfo[font_count].original_face = face;
finfo[font_count].underline_pos = xheight2 - under_pos;
finfo[font_count].underline_thick = under_thick;
finfo[font_count].size_table = fnt;
finfo[font_count].lig_table = lig;
finfo[font_count].composite = composite;
finfo[font_count].cmp_table = cmp;
finfo[font_count].cmp_top = cmptop;
finfo[font_count].kern_table = kt;
finfo[font_count].kern_chars = kc;
finfo[font_count].kern_value = kv;
finfo[font_count].kern_sizes = ks;
ifdebug(DFT, DD, DebugKernTable(font_count));
debug4(DFT, D, "FontRead returning: %d, name %s, fs %d, xh2 %d",
font_count, string(first_size), font_size(first_size), xheight2);
return face;
} /* end FontRead */
/*@::FontChange()@************************************************************/
/* */
/* FontChange(style, x) */
/* */
/* Returns an internal font number which is the current font changed */
/* according to word object x. e.g. if current font is Roman 12p and x is */
/* "-3p", then FontChange returns the internal font number of Roman 9p. */
/* */
/* FontChange permits empty and null objects within x; these have no */
/* effect. */
/* */
/*****************************************************************************/
void FontChange(STYLE *style, OBJECT x)
{ /* register */ int i;
OBJECT requested_family, requested_face, requested_size;
OBJECT par[3], family, face, fsize, y, link, new, old, tmpf;
GAP gp; SHORT_LENGTH flen; int num, c; unsigned inc;
struct metrics *newfnt, *oldfnt;
FULL_CHAR *lig;
int cmptop;
COMPOSITE *oldcmp, *newcmp;
SHORT_LENGTH *oldks, *newks; int klen;
debug2(DFT, D, "FontChange( %s, %s )", EchoStyle(style), EchoObject(x));
assert( font(*style) <= font_count, "FontChange: font_count!");
ifdebug(DFT, DD, FontDebug());
/***************************************************************************/
/* */
/* Analyse x, doing any small-caps style changes immediately, and putting */
/* all the other words of x into par[0 .. num-1] for further analysis. */
/* */
/***************************************************************************/
num = 0;
if( type(x) == NULL_CLOS )
{ /* acceptable, but do nothing */
}
else if( is_word(type(x)) )
{
if( StringEqual(string(x), STR_SMALL_CAPS_ON) )
small_caps(*style) = SMALL_CAPS_ON;
else if( StringEqual(string(x), STR_SMALL_CAPS_OFF) )
small_caps(*style) = SMALL_CAPS_OFF;
else if( !StringEqual(string(x), STR_EMPTY) )
par[num++] = x;
}
else if( type(x) == ACAT )
{ for( link = Down(x); link != x; link = NextDown(link) )
{ Child(y, link);
debug1(DFT, DDD, " pars examining y = %s", EchoObject(y));
if( type(y) == GAP_OBJ || type(y) == NULL_CLOS ) continue;
if( is_word(type(y)) )
{
if( StringEqual(string(y), STR_SMALL_CAPS_ON) )
small_caps(*style) = SMALL_CAPS_ON;
else if( StringEqual(string(y), STR_SMALL_CAPS_OFF) )
small_caps(*style) = SMALL_CAPS_OFF;
else if( !StringEqual(string(y), STR_EMPTY) )
{
if( num >= 3 )
{ Error(37, 40, "error in left parameter of %s",
WARN, &fpos(x), KW_FONT);
debug0(DFT, D, "FontChange returning: ACAT children");
return;
}
par[num++] = y;
}
}
else
{ Error(37, 41, "error in left parameter of %s",
WARN, &fpos(x), KW_FONT);
debug0(DFT, D, "FontChange returning: ACAT children");
return;
}
}
}
else
{ Error(37, 42, "error in left parameter of %s", WARN, &fpos(x), KW_FONT);
debug0(DFT, D, "FontChange returning: wrong type");
return;
}
debug1(DFT, DDD, " found pars, num = %d", num);
if( num == 0 )
{ debug1(DFT, D, "FontChange returning %s", EchoStyle(style));
return;
}
/***************************************************************************/
/* */
/* Extract size, family, and face changes (if any) from par[0 .. num-1]. */
/* */
/***************************************************************************/
/* extract fsize parameter, if any */
assert( num >= 1 && num <= 3, "FontChange: num!" );
requested_size = nilobj;
for( i = 0; i < num; i++ )
{
c = string(par[i])[0];
if( c == CH_INCGAP || c == CH_DECGAP || decimaldigit(c) )
{
/* extract fsize, shuffle the rest down */
requested_size = par[i];
for( i = i + 1; i < num; i++ )
par[i-1] = par[i];
num--;
}
}
/* what remains must be family and face */
switch( num )
{
case 0:
requested_family = requested_face = nilobj;
break;
case 1:
requested_family = nilobj;
requested_face = par[0];
break;
case 2:
requested_family = par[0];
requested_face = par[1];
break;
default:
Error(37, 43, "error in left parameter of %s", WARN, &fpos(x), KW_FONT);
debug0(DFT, D, "FontChange returning: too many parameters");
return;
break;
}
/* check for initial font case: must have family, face, and size */
if( font(*style) == NO_FONT && (requested_size == nilobj ||
requested_family == nilobj || requested_face == nilobj) )
Error(37, 44, "initial font must have family, face and size",
FATAL, &fpos(x));
/***************************************************************************/
/* */
/* Either find the family and face already existing, or load them. */
/* */
/***************************************************************************/
/* get font family */
family = nilobj;
if( requested_family != nilobj )
{
/* search for this family */
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(y, link);
if( StringEqual(string(requested_family), string(y)) ) break;
}
if( link != font_root )
family = y;
}
else
{
/* preserve current family */
assert( Up(finfo[font(*style)].font_table)!=finfo[font(*style)].font_table,
"FontChange: Up(finfo[font(*style)].font_table) !" );
Parent(tmpf, Up(finfo[font(*style)].font_table));
assert( is_word(type(tmpf)), "FontChange: type(tmpf)!" );
assert( Up(tmpf) != tmpf, "FontChange: Up(tmpf)!" );
Parent(family, Up(tmpf));
assert( is_word(type(family)), "FontChange: type(family)!" );
}
/* get font face, if have family */
face = nilobj;
if( family != nilobj )
{
if( requested_face != nilobj )
{
/* search for this face in family */
for( link = Down(family); link != family; link = NextDown(link) )
{ Child(y, link);
if( StringEqual(string(requested_face), string(y)) ) break;
}
if( link != family )
face = y;
}
else
{
/* preserve current face */
Parent(face, Up(finfo[font(*style)].font_table));
assert( is_word(type(face)), "FontChange: type(face)!" );
assert( Up(face) != face, "FontChange: Up(face)!" );
}
}
if( face == nilobj )
{
/* face not loaded, try the font databases */
assert( family != nilobj || requested_family != nilobj, "FontChange fr!" );
assert( requested_face != nilobj, "FontChange requested_face!");
if( family != nilobj )
requested_family = family;
face = FontRead(string(requested_family), string(requested_face), x);
if( face == nilobj )
{
/* missing face name error; check whether a family name was intended */
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(y, link);
if( StringEqual(string(y), string(requested_face)) ) break;
}
if( link != font_root )
Error(37, 45, "font family name %s must be followed by a face name",
WARN, &fpos(requested_face), string(requested_face));
else
Error(37, 46, "there is no font with family name %s and face name %s",
WARN, &fpos(requested_face), string(requested_family),
string(requested_face));
debug0(DFT, D, "FontChange returning (unable to set face)");
return;
}
}
assert( Down(face) != face, "FontChange: no children!" );
assert( NextDown(Down(face)) != face, "FontChange: 1 child!" );
assert( NextDown(NextDown(Down(face))) != face, "FontChange: 2 children!" );
/***************************************************************************/
/* */
/* Now have family and face; search for size and return it if found. */
/* */
/***************************************************************************/
/* get font size as integer flen */
if( requested_size == nilobj )
flen = font_size(finfo[font(*style)].font_table);
else
{ GetGap(requested_size, style, &gp, &inc);
if( mode(gp) != EDGE_MODE || units(gp) != FIXED_UNIT )
{ Error(37, 47, "syntax error in font size %s; ignoring it",
WARN, &fpos(requested_size), string(requested_size));
flen = font_size(finfo[font(*style)].font_table);
}
else if( inc == GAP_ABS )
flen = width(gp);
else if( font(*style) == NO_FONT )
{ Error(37, 48, "no current font on which to base size change %s",
FATAL, &fpos(requested_size), string(requested_size));
}
else if( inc == GAP_INC )
flen = font_size(finfo[font(*style)].font_table) + width(gp);
else if( inc == GAP_DEC )
flen = font_size(finfo[font(*style)].font_table) - width(gp);
else Error(37, 49, "FontChange: %d", INTERN, &fpos(x), inc);
}
if( flen <= 0 )
{ Error(37, 50, "%s %s ignored (result is not positive)",
WARN, &fpos(requested_size), string(requested_size), KW_FONT);
debug0(DFT, D,"FontChange returning (non-positive size)");
return;
}
/* search fonts of face for desired size; return if already present */
if( !(BackEnd->uses_font_metrics) ) flen = PlainCharHeight;
for( link=NextDown(NextDown(Down(face))); link!=face; link = NextDown(link) )
{ Child(fsize, link);
if( font_size(fsize) == flen )
{ font(*style) = font_num(fsize);
SetGap(space_gap(*style), nobreak(space_gap(*style)), FALSE, TRUE,
FIXED_UNIT, EDGE_MODE, font_spacewidth(fsize));
debug2(DFT, D,"FontChange returning (old) %d (XHeight2 = %d)",
font(*style), font_xheight2(finfo[font(*style)].font_table));
return;
}
}
/***************************************************************************/
/* */
/* No suitable size right now, so scale the original size and exit. */
/* */
/***************************************************************************/
/* get a new number for this new size */
if( ++font_count >= finfo_size )
{ if( font_count > MAX_FONT )
Error(37, 51, "too many different fonts and sizes (max is %d)",
FATAL, &fpos(x), MAX_FONT);
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, -1,
-finfo_size * sizeof(FONT_INFO)));
finfo_size *= 2;
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, 1,
finfo_size * sizeof(FONT_INFO)));
finfo = (FONT_INFO *) realloc(finfo, finfo_size * sizeof(FONT_INFO));
if( finfo == (FONT_INFO *) NULL )
Error(37, 52, "run out of memory when increasing font table size",
FATAL, &fpos(x));
}
/* create a new sized font record */
Child(old, NextDown(NextDown(Down(face))));
assert( is_word(type(old)), "FontChange: old!" );
new = MakeWord(WORD, string(old), no_fpos);
Link(face, new);
font_num(new) = font_count;
font_size(new) = BackEnd->uses_font_metrics ? flen : font_size(old);
font_xheight2(new) = font_xheight2(old) * font_size(new) / font_size(old);
font_recoded(new) = font_recoded(old);
font_mapping(new) = font_mapping(old);
font_spacewidth(new) = font_spacewidth(old) * font_size(new)/font_size(old);
finfo[font_count].font_table = new;
finfo[font_count].original_face = face;
finfo[font_count].underline_pos =
(finfo[font_num(old)].underline_pos * font_size(new)) / font_size(old);
finfo[font_count].underline_thick =
(finfo[font_num(old)].underline_thick * font_size(new)) / font_size(old);
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, 1,
MAX_CHARS * sizeof(struct metrics)));
finfo[font_count].size_table =
(struct metrics *) malloc(MAX_CHARS * sizeof(struct metrics));
if( finfo[font_count].size_table == (struct metrics *) NULL )
Error(37, 53, "run out of memory when changing font or font size",
FATAL, &fpos(x));
finfo[font_count].lig_table = lig = finfo[font_num(old)].lig_table;
/* scale old font to new size */
newfnt = finfo[font_num(new)].size_table;
oldfnt = finfo[font_num(old)].size_table;
for( i = 0; i < MAX_CHARS; i++ ) if( lig[i] != 1 )
{ newfnt[i].left = (oldfnt[i].left * font_size(new)) / font_size(old);
newfnt[i].right = (oldfnt[i].right * font_size(new)) / font_size(old);
newfnt[i].down = (oldfnt[i].down * font_size(new)) / font_size(old);
newfnt[i].up = (oldfnt[i].up * font_size(new)) / font_size(old);
newfnt[i].last_adjust = (oldfnt[i].last_adjust * font_size(new)) / font_size(old);
}
/* copy and scale composite table */
finfo[font_count].composite = finfo[font_num(old)].composite;
finfo[font_count].cmp_top = cmptop = finfo[font_num(old)].cmp_top;
oldcmp = finfo[font_num(old)].cmp_table;
newcmp = (COMPOSITE *) malloc(cmptop*sizeof(COMPOSITE));
if( newcmp == (COMPOSITE *) NULL )
Error(37, 54, "run out of memory when changing font or font size",
FATAL, &fpos(x));
for( i = 1; i < cmptop; i++ ) /* NB position 0 is unused */
{ newcmp[i].char_code = oldcmp[i].char_code;
if( newcmp[i].char_code != (FULL_CHAR) '\0' )
{ newcmp[i].x_offset = (oldcmp[i].x_offset*font_size(new)) / font_size(old);
newcmp[i].y_offset = (oldcmp[i].y_offset*font_size(new)) / font_size(old);
debug5(DFT, D, "FontChange scales composite %d from (%d, %d) to (%d, %d)",
(int) newcmp[i].char_code, oldcmp[i].x_offset, oldcmp[i].y_offset,
newcmp[i].x_offset, newcmp[i].y_offset);
}
}
finfo[font_count].cmp_table = newcmp;
/* copy and scale kerning tables */
finfo[font_count].kern_table = finfo[font_num(old)].kern_table;
finfo[font_count].kern_chars = finfo[font_num(old)].kern_chars;
finfo[font_count].kern_value = finfo[font_num(old)].kern_value;
oldks = finfo[font_num(old)].kern_sizes;
if( oldks != (SHORT_LENGTH *) NULL )
{ klen = oldks[0];
ifdebug(DMA, D, DebugRegisterUsage(MEM_FONTS, 0, klen * sizeof(SHORT_LENGTH)));
finfo[font_count].kern_sizes = newks =
(SHORT_LENGTH *) malloc(klen * sizeof(SHORT_LENGTH));
if( newks == (SHORT_LENGTH *) NULL )
Error(37, 55, "run out of memory when changing font or font size",
FATAL, &fpos(x));
newks[0] = klen;
for( i = 1; i < klen; i++ )
newks[i] = (oldks[i] * font_size(new)) / font_size(old);
}
else finfo[font_count].kern_sizes = (SHORT_LENGTH *) NULL;
/* return new font number and exit */
font(*style) = font_count;
SetGap(space_gap(*style), nobreak(space_gap(*style)), FALSE, TRUE,
FIXED_UNIT, EDGE_MODE, font_spacewidth(new));
debug2(DFT, D,"FontChange returning (scaled) %d (XHeight2 = %d)",
font(*style), font_xheight2(finfo[font(*style)].font_table));
/* FontDebug(); */
} /* end FontChange */
/*****************************************************************************/
/* */
/* KernLength(fnum, ch1, ch2, res) */
/* */
/* Set res to the kern length between ch1 and ch2 in font fnum, or 0 if */
/* none. Actually we first convert ch1 and ch2 to corresponding unaccented */
/* characters, because metrics files don't seem to contain kerning pairs */
/* for accented characters. */
/* */
/*****************************************************************************/
#define KernLength(fnum, mp, ch1, ch2, res) \
{ int ua_ch1 = mp[ch1]; \
int ua_ch2 = mp[ch2]; \
int i = finfo[fnum].kern_table[ua_ch1], j; \
if( i == 0 ) res = 0; \
else \
{ FULL_CHAR *kc = finfo[fnum].kern_chars; \
for( j = i; kc[j] > ua_ch2; j++ ); \
res = (kc[j] == ua_ch2) ? \
finfo[fnum].kern_sizes[finfo[fnum].kern_value[j]] : 0; \
} \
} /* end KernLength */
/*@::FontWordSize()@**********************************************************/
/* */
/* FontWordSize(x) */
/* */
/* Calculate the horizontal and vertical size of WORD or QWORD x, including */
/* the effect of ligature sequences but not replacing them with ligatures. */
/* */
/*****************************************************************************/
void FontWordSize(OBJECT x)
{ FULL_CHAR *p, *q, *a, *b, *lig, *unacc, *acc; OBJECT tmp;
FULL_CHAR buff[MAX_BUFF]; MAPPING m;
int r, u, d, ksize; struct metrics *fnt;
debug2(DFT, D, "FontWordSize( %s ), font = %d", string(x), word_font(x));
assert( is_word(type(x)), "FontWordSize: !is_word(type(x))!" );
p = string(x);
q = buff;
if( *p )
{ if( word_font(x) < 1 || word_font(x) > font_count )
Error(37, 56, "no current font at word %s", FATAL, &fpos(x), string(x));
if( word_colour(x) == 0 && BackEnd->colour_avail )
Error(37, 57, "no current colour at word %s", FATAL, &fpos(x), string(x));
if( word_language(x) == 0 )
Error(37, 58, "no current language at word %s", FATAL,&fpos(x),string(x));
fnt = finfo[word_font(x)].size_table;
lig = finfo[word_font(x)].lig_table;
m = font_mapping(finfo[word_font(x)].font_table);
unacc = MapTable[m]->map[MAP_UNACCENTED];
acc = MapTable[m]->map[MAP_ACCENT];
d = u = r = 0;
do
{
/* check for missing glyph (lig[] == 1) or ligatures (lig[] > 1) */
debug2(DFT, D, " examining `%c' lig = %d", *p, lig[*p]);
if( lig[*q = *p++] )
{
if( lig[*q] == 1 )
{ tmp = MakeWord(QWORD, STR_SPACE, &fpos(x));
string(tmp)[0] = *q;
/* bug fix: unaccented version exists if unacc differs from self */
if( unacc[*q] != *q )
{
/* *** this is acceptable now, let this char through
Error(37, 59, "accent dropped from character %s (it has no glyph in font %s)",
WARN, &fpos(x),
StringQuotedWord(tmp), FontFamilyAndFace(word_font(x)));
*(p-1) = *q = unacc[*q];
*** */
debug2(DFT, D, " unacc[%c] = `%c'", *q, unacc[*q]);
fnt[*q].up = fnt[unacc[*q]].up;
fnt[*q].down = fnt[unacc[*q]].down;
fnt[*q].left = fnt[unacc[*q]].left;
fnt[*q].right = fnt[unacc[*q]].right;
fnt[*q].last_adjust = fnt[unacc[*q]].last_adjust;
lig[*q] = 0;
}
else
{
debug1(DFT, D, " unacc[%c] = 0, replacing by space", *q);
Error(37, 60, "character %s replaced by space (it has no glyph in font %s)",
WARN, &fpos(x),
StringQuotedWord(tmp), FontFamilyAndFace(word_font(x)));
*(p-1) = *q = CH_SPACE;
}
Dispose(tmp);
}
else
{
debug1(DFT, D, " processing ligature beginning at %c", *q);
a = &lig[ lig[*(p-1)] + MAX_CHARS ];
while( *a++ == *(p-1) )
{ b = p;
while( *a == *b && *(a+1) != '\0' && *b != '\0' ) a++, b++;
if( *(a+1) == '\0' )
{ *q = *a;
p = b;
break;
}
else
{ while( *++a );
a++;
}
}
}
}
/* accumulate size of *q */
if( fnt[*q].up > u ) u = fnt[*q].up;
if( fnt[*q].down < d ) d = fnt[*q].down;
r += fnt[*q++].right;
} while( *p );
*q = '\0';
/* adjust for last character */
r += fnt[*(q-1)].last_adjust;
/* add kern lengths to r */
for( p = buff, q = p+1; *q; p++, q++ )
{ KernLength(word_font(x), unacc, *p, *q, ksize);
debugcond3(DFT, D, ksize != 0, " KernLength(fnum, %c, %c) = %d",
*p, *q, ksize);
r += ksize;
}
/* set sizes of x */
back(x, COLM) = 0;
fwd(x, COLM) = r;
back(x, ROWM) = u;
fwd(x, ROWM) = -d;
}
else back(x, COLM) = fwd(x, COLM) = back(x, ROWM) = fwd(x, ROWM) = 0;
debug4(DFT, D, "FontWordSize returning %hd %hd %hd %hd",
back(x, COLM), fwd(x, COLM), back(x, ROWM), fwd(x, ROWM));
} /* end FontWordSize */
/*@::FontSize(), FontHalfXHeight(), FontEncoding(), FontName()@***************/
/* */
/* FULL_LENGTH FontSize(fnum, x) */
/* */
/* Return the size of this font. x is for error messages only. */
/* */
/*****************************************************************************/
FULL_LENGTH FontSize(FONT_NUM fnum, OBJECT x)
{ debug1(DFT, D, "FontSize( %d )", fnum);
assert( fnum <= font_count, "FontSize!" );
if( fnum <= 0 )
Error(37, 61, "no current font at this point", FATAL, &fpos(x));
debug1(DFT, D, "FontSize returning %d", font_size(finfo[fnum].font_table));
return font_size(finfo[fnum].font_table);
} /* end FontSize */
/*****************************************************************************/
/* */
/* FULL_LENGTH FontHalfXHeight(fnum) */
/* */
/* Return the xheight2 value of this font. */
/* */
/*****************************************************************************/
FULL_LENGTH FontHalfXHeight(FONT_NUM fnum)
{ debug1(DFT, DD, "FontHalfXHeight( %d )", fnum);
assert( fnum <= font_count, "FontHalfXHeight!" );
debug1(DFT, DD, "FontHalfXHeight returning %d",
font_xheight2(finfo[fnum].font_table));
return font_xheight2(finfo[fnum].font_table);
} /* end FontHalfXHeight */
/*****************************************************************************/
/* */
/* MAPPING FontMapping(fnum, xfpos) */
/* */
/* Return the character mapping of this font, to use for small caps, etc. */
/* xfpos is the file position for error messages. */
/* */
/*****************************************************************************/
MAPPING FontMapping(FONT_NUM fnum, FILE_POS *xfpos)
{ debug1(DFT, DD, "FontMapping( %d )", fnum);
assert( fnum <= font_count, "FontMapping!" );
if( fnum <= 0 )
Error(37, 62, "no current font at this point", FATAL, xfpos);
debug1(DFT, DD, "FontMapping returning %d",
font_mapping(finfo[fnum].font_table));
return font_mapping(finfo[fnum].font_table);
} /* end FontMapping */
/*****************************************************************************/
/* */
/* FULL_CHAR *FontName(fnum) */
/* */
/* Return the short PostScript name of this font. */
/* */
/*****************************************************************************/
FULL_CHAR *FontName(FONT_NUM fnum)
{ debug1(DFT, D, "FontName( %d )", fnum);
assert( fnum <= font_count, "FontName!" );
debug1(DFT, D, "FontName returning %s", string(finfo[fnum].font_table));
return string(finfo[fnum].font_table);
} /* end FontName */
/*@::FontFamily(), FontFace@**************************************************/
/* */
/* FULL_CHAR *FontFamilyAndFace(fnum) */
/* */
/* Return a static string of the current font family and face. */
/* */
/*****************************************************************************/
FULL_CHAR *FontFamily(FONT_NUM fnum)
{ OBJECT face, family;
debug1(DFT, D, "FontFamily( %d )", fnum);
assert( fnum <= font_count, "FontFamiliy!" );
Parent(face, Up(finfo[fnum].font_table));
Parent(family, Up(face));
debug1(DFT, D, "FontFamily returning %s", string(family));
return string(family);
} /* end FontFamilyAndFace */
FULL_CHAR *FontFace(FONT_NUM fnum)
{ OBJECT face, family;
debug1(DFT, D, "FontFacec( %d )", fnum);
assert( fnum <= font_count, "FontFamiliy!" );
Parent(face, Up(finfo[fnum].font_table));
Parent(family, Up(face));
debug1(DFT, D, "FontFace returning %s", string(face));
return string(face);
} /* end FontFamilyAndFace */
/*@::FontFamilyAndFace(), FontPrintAll()@*************************************/
/* */
/* FULL_CHAR *FontFamilyAndFace(fnum) */
/* */
/* Return a static string of the current font family and face. */
/* */
/*****************************************************************************/
FULL_CHAR *FontFamilyAndFace(FONT_NUM fnum)
{ OBJECT face, family; static FULL_CHAR buff[80];
debug1(DFT, D, "FontFamilyAndFace( %d )", fnum);
assert( fnum <= font_count, "FontName!" );
Parent(face, Up(finfo[fnum].font_table));
Parent(family, Up(face));
if( StringLength(string(family)) + StringLength(string(face)) + 1 > 80 )
Error(37, 63, "family and face names %s %s are too long",
FATAL, no_fpos, string(family), string(face));
StringCopy(buff, string(family));
StringCat(buff, STR_SPACE);
StringCat(buff, string(face));
debug1(DFT, D, "FontName returning %s", buff);
return buff;
} /* end FontFamilyAndFace */
/*****************************************************************************/
/* */
/* FontPrintAll(fp) */
/* */
/* Print all font encoding commands on output file fp */
/* */
/*****************************************************************************/
void FontPrintAll(FILE *fp)
{ OBJECT family, face, first_size, ps_name, link, flink;
assert(font_root!=nilobj && type(font_root)==ACAT, "FontDebug: font_root!");
debug0(DFT, DD, "FontPrintAll(fp)");
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(family, link);
assert( is_word(type(family)), "FontPrintAll: family!" );
for( flink = Down(family); flink != family; flink = NextDown(flink) )
{ Child(face, flink);
assert( is_word(type(face)), "FontPrintAll: face!" );
assert( Down(face) != face && NextDown(Down(face)) != face &&
NextDown(NextDown(Down(face))) != face, "FontDebug: Down(face)!");
Child(ps_name, Down(face));
assert( is_word(type(ps_name)), "FontPrintAll: ps_name!" );
Child(first_size, NextDown(NextDown(Down(face))));
assert( is_word(type(first_size)), "FontPrintAll: first_size!" );
if( font_recoded(face) )
{ fprintf(fp, "/%s%s %s /%s LoutRecode\n",
string(ps_name), string(first_size),
MapEncodingName(font_mapping(face)), string(ps_name));
fprintf(fp, "/%s { /%s%s LoutFont } def\n", string(first_size),
string(ps_name), string(first_size));
}
else fprintf(fp, "/%s { /%s LoutFont } def\n", string(first_size),
string(ps_name));
}
}
fputs("\n", fp);
debug0(DFT, DD, "FontPrintAll returning.");
} /* end FontPrintAll */
/*@@**************************************************************************/
/* */
/* FontPrintPageSetup(fp) */
/* */
/* Print all font encoding commands needed for the current page onto fp. */
/* */
/*****************************************************************************/
void FontPrintPageSetup(FILE *fp)
{ OBJECT face, first_size, ps_name, link;
assert(font_root!=nilobj && type(font_root)==ACAT, "FontDebug: font_root!");
assert(font_used!=nilobj && type(font_used)==ACAT, "FontDebug: font_used!");
debug0(DFT, DD, "FontPrintPageSetup(fp)");
for( link = Down(font_used); link != font_used; link = NextDown(link) )
{
Child(face, link);
assert( is_word(type(face)), "FontPrintPageSetup: face!" );
assert( Down(face) != face, "FontDebug: Down(face)!");
/* print font encoding command */
Child(first_size, NextDown(NextDown(Down(face))));
assert( is_word(type(first_size)), "FontPrintPageSetup: first_size!" );
Child(ps_name, Down(face));
assert( is_word(type(ps_name)), "FontPrintPageSetup: ps_name!" );
BackEnd->PrintPageSetupForFont(face, font_curr_page,
string(ps_name), string(first_size));
}
debug0(DFT, DD, "FontPrintPageSetup returning.");
} /* end FontPrintPageSetup */
/*@@**************************************************************************/
/* */
/* FontPrintPageResources(fp) */
/* */
/* Print all page resources (i.e. fonts needed or supplied) onto fp. */
/* */
/*****************************************************************************/
void FontPrintPageResources(FILE *fp)
{ OBJECT face, ps_name, link, pface, pname, plink;
BOOLEAN first;
assert(font_root!=nilobj && type(font_root)==ACAT, "FontDebug: font_root!");
assert(font_used!=nilobj && type(font_used)==ACAT, "FontDebug: font_used!");
debug0(DFT, DD, "FontPrintPageResources(fp)");
first = TRUE;
for( link = Down(font_used); link != font_used; link = NextDown(link) )
{
Child(face, link);
assert( is_word(type(face)), "FontPrintPageResources: face!" );
assert( Down(face) != face, "FontDebug: Down(face)!");
Child(ps_name, Down(face));
assert( is_word(type(ps_name)), "FontPrintPageResources: ps_name!" );
/* make sure this ps_name has not been printed before (ugly, I know). */
/* Repeats arise when the font appears twice in the database under */
/* different family-face names, perhaps because of sysnonyms like */
/* Italic and Slope, or perhaps because of different encoding vectors */
for( plink = Down(font_used); plink != link; plink = NextDown(plink) )
{
Child(pface, plink);
Child(pname, Down(pface));
if( StringEqual(string(pname), string(ps_name)) )
break;
}
if( plink == link )
{
/* not seen before, so print it */
BackEnd->PrintPageResourceForFont(string(ps_name), first);
first = FALSE;
}
}
debug0(DFT, DD, "FontPrintPageResources returning.");
} /* end FontPrintPageResources */
/*@@**************************************************************************/
/* */
/* FontAdvanceCurrentPage() */
/* */
/* Advance the current page. */
/* */
/*****************************************************************************/
void FontAdvanceCurrentPage(void)
{ debug0(DFT, DD, "FontAdvanceCurrentPage()");
while( Down(font_used) != font_used ) DeleteLink(Down(font_used));
font_curr_page++;
debug0(DFT, DD, "FontAdvanceCurrentPage() returning.");
} /* end FontAdvanceCurrentPage */
/*@::FontPageUsed()@**********************************************************/
/* */
/* OBJECT FontPageUsed(face) */
/* */
/* Declares that font face is used on the current page. */
/* */
/*****************************************************************************/
void FontPageUsed(OBJECT face)
{ debug1(DFT, DD, "FontPageUsed(%d)", font_num(face));
assert( font_page(face) < font_curr_page, "FontPageUsed!" );
Link(font_used, face);
font_page(face) = font_curr_page;
debug0(DFT, DD, "FontPageUsed returning");
} /* end FontPageUsed */
/*@::FontNeeded()@************************************************************/
/* */
/* OBJECT FontNeeded(fp) */
/* */
/* Writes font needed resources onto file out_fp. Returns TRUE if none. */
/* Now that we are using a database, every font that is actually loaded */
/* is really needed. */
/* */
/*****************************************************************************/
BOOLEAN FontNeeded(FILE *fp)
{ BOOLEAN first_need = TRUE;
OBJECT link, flink, family, face, ps_name;
for( link = Down(font_root); link != font_root; link = NextDown(link) )
{ Child(family, link);
for( flink = Down(family); flink != family; flink = NextDown(flink) )
{ Child(face, flink);
Child(ps_name, Down(face));
assert( is_word(type(ps_name)), "FontPrintPageResources: ps_name!" );
fprintf(fp, "%s font %s\n",
first_need ? "%%DocumentNeededResources:" : "%%+", string(ps_name));
first_need = FALSE;
}
}
return first_need;
} /* end FontNeeded */
| {
"content_hash": "c00be1abbb3b06b56a58a265542bd71c",
"timestamp": "",
"source": "github",
"line_count": 1874,
"max_line_length": 87,
"avg_line_length": 43.25186766275347,
"alnum_prop": 0.45980457472795916,
"repo_name": "shaotuanchen/sunflower_exp",
"id": "db5d25cb4ecba32f1fc172ade6fce4f2d45da58e",
"size": "81054",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "benchmarks/source/shaotuanb/consumer/typeset/lout-3.24/z37.c",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "459993"
},
{
"name": "Awk",
"bytes": "6562"
},
{
"name": "Batchfile",
"bytes": "9028"
},
{
"name": "C",
"bytes": "50326113"
},
{
"name": "C++",
"bytes": "2040239"
},
{
"name": "CSS",
"bytes": "2355"
},
{
"name": "Clarion",
"bytes": "2484"
},
{
"name": "Coq",
"bytes": "61440"
},
{
"name": "DIGITAL Command Language",
"bytes": "69150"
},
{
"name": "Emacs Lisp",
"bytes": "186910"
},
{
"name": "Fortran",
"bytes": "5364"
},
{
"name": "HTML",
"bytes": "2171356"
},
{
"name": "JavaScript",
"bytes": "27164"
},
{
"name": "Logos",
"bytes": "159114"
},
{
"name": "M",
"bytes": "109006"
},
{
"name": "M4",
"bytes": "100614"
},
{
"name": "Makefile",
"bytes": "5409865"
},
{
"name": "Mercury",
"bytes": "702"
},
{
"name": "Module Management System",
"bytes": "56956"
},
{
"name": "OCaml",
"bytes": "253115"
},
{
"name": "Objective-C",
"bytes": "57800"
},
{
"name": "Papyrus",
"bytes": "3298"
},
{
"name": "Perl",
"bytes": "70992"
},
{
"name": "Perl 6",
"bytes": "693"
},
{
"name": "PostScript",
"bytes": "3440120"
},
{
"name": "Python",
"bytes": "40729"
},
{
"name": "Redcode",
"bytes": "1140"
},
{
"name": "Roff",
"bytes": "3794721"
},
{
"name": "SAS",
"bytes": "56770"
},
{
"name": "SRecode Template",
"bytes": "540157"
},
{
"name": "Shell",
"bytes": "1560436"
},
{
"name": "Smalltalk",
"bytes": "10124"
},
{
"name": "Standard ML",
"bytes": "1212"
},
{
"name": "TeX",
"bytes": "385584"
},
{
"name": "WebAssembly",
"bytes": "52904"
},
{
"name": "Yacc",
"bytes": "510934"
}
],
"symlink_target": ""
} |
import React,{Component,PropTypes} from 'react'
import {formatDate} from '../../utils'
export default class Reply extends Component{
constructor(props){
super(props)
}
static propTypes = {
replys: PropTypes.array.isRequired,
k: PropTypes.number.isRequired,
showReply: PropTypes.func.isRequired
}
render(){
const {replys,k,showReply} = this.props
return(
<div className="reply-list">
{replys.map((reply,i)=>
<div className="reply-item" key={i}>
<p className="reply-content">
<a className="reply-user link-light">{reply.user_info.nickname}</a>:
{reply.content}
</p>
<div className="reply-footer text-right">
<a className="reply" href="javascript:;" onClick={e=>showReply(e,k,reply.user_info.nickname)} >回复</a>
<span className="reply-time pull-left">{formatDate(reply.created)}</span>
</div>
</div>
)}
</div>
)
}
} | {
"content_hash": "fcef7d6f0484dbf71ca4dfacbcec291f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 114,
"avg_line_length": 29.939393939393938,
"alnum_prop": 0.5951417004048583,
"repo_name": "icezeros/ice-react",
"id": "e554875a1c6cce5993c8fcf2a012e8029248e559",
"size": "994",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/Article/reply.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16"
},
{
"name": "HTML",
"bytes": "1246"
},
{
"name": "JavaScript",
"bytes": "91929"
}
],
"symlink_target": ""
} |
using namespace llvm;
using namespace gsym;
GsymCreator::GsymCreator() : StrTab(StringTableBuilder::ELF) {
insertFile(StringRef());
}
uint32_t GsymCreator::insertFile(StringRef Path,
llvm::sys::path::Style Style) {
llvm::StringRef directory = llvm::sys::path::parent_path(Path, Style);
llvm::StringRef filename = llvm::sys::path::filename(Path, Style);
FileEntry FE(insertString(directory), insertString(filename));
std::lock_guard<std::recursive_mutex> Guard(Mutex);
const auto NextIndex = Files.size();
// Find FE in hash map and insert if not present.
auto R = FileEntryToIndex.insert(std::make_pair(FE, NextIndex));
if (R.second)
Files.emplace_back(FE);
return R.first->second;
}
llvm::Error GsymCreator::save(StringRef Path,
llvm::support::endianness ByteOrder) const {
std::error_code EC;
raw_fd_ostream OutStrm(Path, EC);
if (EC)
return llvm::errorCodeToError(EC);
FileWriter O(OutStrm, ByteOrder);
return encode(O);
}
llvm::Error GsymCreator::encode(FileWriter &O) const {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
if (Funcs.empty())
return createStringError(std::errc::invalid_argument,
"no functions to encode");
if (!Finalized)
return createStringError(std::errc::invalid_argument,
"GsymCreator wasn't finalized prior to encoding");
if (Funcs.size() > UINT32_MAX)
return createStringError(std::errc::invalid_argument,
"too many FunctionInfos");
const uint64_t MinAddr = Funcs.front().startAddress();
const uint64_t MaxAddr = Funcs.back().startAddress();
const uint64_t AddrDelta = MaxAddr - MinAddr;
Header Hdr;
Hdr.Magic = GSYM_MAGIC;
Hdr.Version = GSYM_VERSION;
Hdr.AddrOffSize = 0;
Hdr.UUIDSize = static_cast<uint8_t>(UUID.size());
Hdr.BaseAddress = MinAddr;
Hdr.NumAddresses = static_cast<uint32_t>(Funcs.size());
Hdr.StrtabOffset = 0; // We will fix this up later.
Hdr.StrtabOffset = 0; // We will fix this up later.
memset(Hdr.UUID, 0, sizeof(Hdr.UUID));
if (UUID.size() > sizeof(Hdr.UUID))
return createStringError(std::errc::invalid_argument,
"invalid UUID size %u", (uint32_t)UUID.size());
// Set the address offset size correctly in the GSYM header.
if (AddrDelta <= UINT8_MAX)
Hdr.AddrOffSize = 1;
else if (AddrDelta <= UINT16_MAX)
Hdr.AddrOffSize = 2;
else if (AddrDelta <= UINT32_MAX)
Hdr.AddrOffSize = 4;
else
Hdr.AddrOffSize = 8;
// Copy the UUID value if we have one.
if (UUID.size() > 0)
memcpy(Hdr.UUID, UUID.data(), UUID.size());
// Write out the header.
llvm::Error Err = Hdr.encode(O);
if (Err)
return Err;
// Write out the address offsets.
O.alignTo(Hdr.AddrOffSize);
for (const auto &FuncInfo : Funcs) {
uint64_t AddrOffset = FuncInfo.startAddress() - Hdr.BaseAddress;
switch(Hdr.AddrOffSize) {
case 1: O.writeU8(static_cast<uint8_t>(AddrOffset)); break;
case 2: O.writeU16(static_cast<uint16_t>(AddrOffset)); break;
case 4: O.writeU32(static_cast<uint32_t>(AddrOffset)); break;
case 8: O.writeU64(AddrOffset); break;
}
}
// Write out all zeros for the AddrInfoOffsets.
O.alignTo(4);
const off_t AddrInfoOffsetsOffset = O.tell();
for (size_t i = 0, n = Funcs.size(); i < n; ++i)
O.writeU32(0);
// Write out the file table
O.alignTo(4);
assert(!Files.empty());
assert(Files[0].Dir == 0);
assert(Files[0].Base == 0);
size_t NumFiles = Files.size();
if (NumFiles > UINT32_MAX)
return createStringError(std::errc::invalid_argument,
"too many files");
O.writeU32(static_cast<uint32_t>(NumFiles));
for (auto File: Files) {
O.writeU32(File.Dir);
O.writeU32(File.Base);
}
// Write out the sting table.
const off_t StrtabOffset = O.tell();
StrTab.write(O.get_stream());
const off_t StrtabSize = O.tell() - StrtabOffset;
std::vector<uint32_t> AddrInfoOffsets;
// Write out the address infos for each function info.
for (const auto &FuncInfo : Funcs) {
if (Expected<uint64_t> OffsetOrErr = FuncInfo.encode(O))
AddrInfoOffsets.push_back(OffsetOrErr.get());
else
return OffsetOrErr.takeError();
}
// Fixup the string table offset and size in the header
O.fixup32((uint32_t)StrtabOffset, offsetof(Header, StrtabOffset));
O.fixup32((uint32_t)StrtabSize, offsetof(Header, StrtabSize));
// Fixup all address info offsets
uint64_t Offset = 0;
for (auto AddrInfoOffset: AddrInfoOffsets) {
O.fixup32(AddrInfoOffset, AddrInfoOffsetsOffset + Offset);
Offset += 4;
}
return ErrorSuccess();
}
llvm::Error GsymCreator::finalize(llvm::raw_ostream &OS) {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
if (Finalized)
return createStringError(std::errc::invalid_argument,
"already finalized");
Finalized = true;
// Sort function infos so we can emit sorted functions.
llvm::sort(Funcs.begin(), Funcs.end());
// Don't let the string table indexes change by finalizing in order.
StrTab.finalizeInOrder();
// Remove duplicates function infos that have both entries from debug info
// (DWARF or Breakpad) and entries from the SymbolTable.
//
// Also handle overlapping function. Usually there shouldn't be any, but they
// can and do happen in some rare cases.
//
// (a) (b) (c)
// ^ ^ ^ ^
// |X |Y |X ^ |X
// | | | |Y | ^
// | | | v v |Y
// v v v v
//
// In (a) and (b), Y is ignored and X will be reported for the full range.
// In (c), both functions will be included in the result and lookups for an
// address in the intersection will return Y because of binary search.
//
// Note that in case of (b), we cannot include Y in the result because then
// we wouldn't find any function for range (end of Y, end of X)
// with binary search
auto NumBefore = Funcs.size();
auto Curr = Funcs.begin();
auto Prev = Funcs.end();
while (Curr != Funcs.end()) {
// Can't check for overlaps or same address ranges if we don't have a
// previous entry
if (Prev != Funcs.end()) {
if (Prev->Range.intersects(Curr->Range)) {
// Overlapping address ranges.
if (Prev->Range == Curr->Range) {
// Same address range. Check if one is from debug info and the other
// is from a symbol table. If so, then keep the one with debug info.
// Our sorting guarantees that entries with matching address ranges
// that have debug info are last in the sort.
if (*Prev == *Curr) {
// FunctionInfo entries match exactly (range, lines, inlines)
OS << "warning: duplicate function info entries, removing "
"duplicate:\n"
<< *Curr << '\n';
Curr = Funcs.erase(Prev);
} else {
if (!Prev->hasRichInfo() && Curr->hasRichInfo()) {
// Same address range, one with no debug info (symbol) and the
// next with debug info. Keep the latter.
Curr = Funcs.erase(Prev);
} else {
OS << "warning: same address range contains different debug "
<< "info. Removing:\n"
<< *Prev << "\nIn favor of this one:\n"
<< *Curr << "\n";
Curr = Funcs.erase(Prev);
}
}
} else {
// print warnings about overlaps
OS << "warning: function ranges overlap:\n"
<< *Prev << "\n"
<< *Curr << "\n";
}
} else if (Prev->Range.size() == 0 &&
Curr->Range.contains(Prev->Range.Start)) {
OS << "warning: removing symbol:\n"
<< *Prev << "\nKeeping:\n"
<< *Curr << "\n";
Curr = Funcs.erase(Prev);
}
}
if (Curr == Funcs.end())
break;
Prev = Curr++;
}
OS << "Pruned " << NumBefore - Funcs.size() << " functions, ended with "
<< Funcs.size() << " total\n";
return Error::success();
}
uint32_t GsymCreator::insertString(StringRef S) {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
if (S.empty())
return 0;
return StrTab.add(S);
}
void GsymCreator::addFunctionInfo(FunctionInfo &&FI) {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
Funcs.emplace_back(FI);
}
void GsymCreator::forEachFunctionInfo(
std::function<bool(FunctionInfo &)> const &Callback) {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
for (auto &FI : Funcs) {
if (!Callback(FI))
break;
}
}
void GsymCreator::forEachFunctionInfo(
std::function<bool(const FunctionInfo &)> const &Callback) const {
std::lock_guard<std::recursive_mutex> Guard(Mutex);
for (const auto &FI : Funcs) {
if (!Callback(FI))
break;
}
}
| {
"content_hash": "8fdb2655d36491d6d928551247c18789",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 79,
"avg_line_length": 35.15234375,
"alnum_prop": 0.6092899211023447,
"repo_name": "llvm-mirror/llvm",
"id": "f371426f201042bb2edbd949864e9a66d13f0396",
"size": "9698",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "lib/DebugInfo/GSYM/GsymCreator.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "53797008"
},
{
"name": "Batchfile",
"bytes": "9834"
},
{
"name": "C",
"bytes": "852170"
},
{
"name": "C++",
"bytes": "86305007"
},
{
"name": "CMake",
"bytes": "536242"
},
{
"name": "CSS",
"bytes": "12605"
},
{
"name": "Dockerfile",
"bytes": "5884"
},
{
"name": "Emacs Lisp",
"bytes": "10556"
},
{
"name": "Go",
"bytes": "149205"
},
{
"name": "HTML",
"bytes": "37873"
},
{
"name": "LLVM",
"bytes": "139035668"
},
{
"name": "Logos",
"bytes": "28"
},
{
"name": "OCaml",
"bytes": "306665"
},
{
"name": "Objective-C",
"bytes": "10226"
},
{
"name": "PHP",
"bytes": "2667"
},
{
"name": "Perl",
"bytes": "25574"
},
{
"name": "Python",
"bytes": "1014377"
},
{
"name": "Roff",
"bytes": "39"
},
{
"name": "Shell",
"bytes": "97425"
},
{
"name": "Swift",
"bytes": "271"
},
{
"name": "Vim script",
"bytes": "17497"
}
],
"symlink_target": ""
} |
package ee.ria.xroad.proxy.clientproxy;
import ee.ria.xroad.common.CodedException;
import ee.ria.xroad.common.ErrorCodes;
/**
* This is exception for errors caused by the client, for example,
* client auth failure, invalid XML, etc.
*/
class ClientException extends CodedException {
ClientException(CodedException ex) {
super(ex.getFaultCode(), ex.getFaultString());
faultActor = ex.getFaultActor();
faultDetail = ex.getFaultDetail();
// All the client messages have prefix Client...
withPrefix(ErrorCodes.CLIENT_X);
}
ClientException(String faultCode, Throwable cause) {
super(faultCode, cause);
// All the client messages have prefix Client...
withPrefix(ErrorCodes.CLIENT_X);
}
ClientException(String faultCode, String format, Object... args) {
super(faultCode, format, args);
// All the client messages have prefix Client...
withPrefix(ErrorCodes.CLIENT_X);
}
}
| {
"content_hash": "345124c9cca2de324ea50391f88365ac",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 70,
"avg_line_length": 27.52777777777778,
"alnum_prop": 0.6750756811301716,
"repo_name": "vrk-kpa/X-Road",
"id": "d6c468d099d7f050d25d857d4512b5051cb22d7c",
"size": "2192",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "src/proxy/src/main/java/ee/ria/xroad/proxy/clientproxy/ClientException.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AMPL",
"bytes": "1602"
},
{
"name": "C",
"bytes": "54401"
},
{
"name": "CSS",
"bytes": "62804"
},
{
"name": "HTML",
"bytes": "135934"
},
{
"name": "Java",
"bytes": "4233585"
},
{
"name": "JavaScript",
"bytes": "407789"
},
{
"name": "Makefile",
"bytes": "4405"
},
{
"name": "Perl",
"bytes": "1376"
},
{
"name": "Python",
"bytes": "238897"
},
{
"name": "Roff",
"bytes": "2170"
},
{
"name": "Ruby",
"bytes": "1007868"
},
{
"name": "Scala",
"bytes": "14352"
},
{
"name": "Shell",
"bytes": "142230"
},
{
"name": "XSLT",
"bytes": "1244"
}
],
"symlink_target": ""
} |
<?php
/**
* @ingroup API
*/
class ApiUpload extends ApiBase {
/** @var UploadBase|UploadFromChunks */
protected $mUpload = null;
protected $mParams;
public function execute() {
// Check whether upload is enabled
if ( !UploadBase::isEnabled() ) {
$this->dieUsageMsg( 'uploaddisabled' );
}
$user = $this->getUser();
// Parameter handling
$this->mParams = $this->extractRequestParams();
$request = $this->getMain()->getRequest();
// Check if async mode is actually supported (jobs done in cli mode)
$this->mParams['async'] = ( $this->mParams['async'] && $this->getConfig()->get( 'EnableAsyncUploads' ) );
// Add the uploaded file to the params array
$this->mParams['file'] = $request->getFileName( 'file' );
$this->mParams['chunk'] = $request->getFileName( 'chunk' );
// Copy the session key to the file key, for backward compatibility.
if ( !$this->mParams['filekey'] && $this->mParams['sessionkey'] ) {
$this->logFeatureUsage( 'action=upload&sessionkey' );
$this->mParams['filekey'] = $this->mParams['sessionkey'];
}
// Select an upload module
try {
if ( !$this->selectUploadModule() ) {
return; // not a true upload, but a status request or similar
} elseif ( !isset( $this->mUpload ) ) {
$this->dieUsage( 'No upload module set', 'nomodule' );
}
} catch ( UploadStashException $e ) { // XXX: don't spam exception log
$this->handleStashException( $e );
}
// First check permission to upload
$this->checkPermissions( $user );
// Fetch the file (usually a no-op)
/** @var $status Status */
$status = $this->mUpload->fetchFile();
if ( !$status->isGood() ) {
$errors = $status->getErrorsArray();
$error = array_shift( $errors[0] );
$this->dieUsage( 'Error fetching file from remote source', $error, 0, $errors[0] );
}
// Check if the uploaded file is sane
if ( $this->mParams['chunk'] ) {
$maxSize = UploadBase::getMaxUploadSize();
if ( $this->mParams['filesize'] > $maxSize ) {
$this->dieUsage( 'The file you submitted was too large', 'file-too-large' );
}
if ( !$this->mUpload->getTitle() ) {
$this->dieUsage( 'Invalid file title supplied', 'internal-error' );
}
} elseif ( $this->mParams['async'] && $this->mParams['filekey'] ) {
// defer verification to background process
} else {
wfDebug( __METHOD__ . " about to verify\n" );
$this->verifyUpload();
}
// Check if the user has the rights to modify or overwrite the requested title
// (This check is irrelevant if stashing is already requested, since the errors
// can always be fixed by changing the title)
if ( !$this->mParams['stash'] ) {
$permErrors = $this->mUpload->verifyTitlePermissions( $user );
if ( $permErrors !== true ) {
$this->dieRecoverableError( $permErrors[0], 'filename' );
}
}
// Get the result based on the current upload context:
try {
$result = $this->getContextResult();
if ( $result['result'] === 'Success' ) {
$result['imageinfo'] = $this->mUpload->getImageInfo( $this->getResult() );
}
} catch ( UploadStashException $e ) { // XXX: don't spam exception log
$this->handleStashException( $e );
}
$this->getResult()->addValue( null, $this->getModuleName(), $result );
// Cleanup any temporary mess
$this->mUpload->cleanupTempFile();
}
/**
* Get an upload result based on upload context
* @return array
*/
private function getContextResult() {
$warnings = $this->getApiWarnings();
if ( $warnings && !$this->mParams['ignorewarnings'] ) {
// Get warnings formatted in result array format
return $this->getWarningsResult( $warnings );
} elseif ( $this->mParams['chunk'] ) {
// Add chunk, and get result
return $this->getChunkResult( $warnings );
} elseif ( $this->mParams['stash'] ) {
// Stash the file and get stash result
return $this->getStashResult( $warnings );
}
// Check throttle after we've handled warnings
if ( UploadBase::isThrottled( $this->getUser() )
) {
$this->dieUsageMsg( 'actionthrottledtext' );
}
// This is the most common case -- a normal upload with no warnings
// performUpload will return a formatted properly for the API with status
return $this->performUpload( $warnings );
}
/**
* Get Stash Result, throws an exception if the file could not be stashed.
* @param array $warnings Array of Api upload warnings
* @return array
*/
private function getStashResult( $warnings ) {
$result = array();
// Some uploads can request they be stashed, so as not to publish them immediately.
// In this case, a failure to stash ought to be fatal
try {
$result['result'] = 'Success';
$result['filekey'] = $this->performStash();
$result['sessionkey'] = $result['filekey']; // backwards compatibility
if ( $warnings && count( $warnings ) > 0 ) {
$result['warnings'] = $warnings;
}
} catch ( UploadStashException $e ) {
$this->handleStashException( $e );
} catch ( Exception $e ) {
$this->dieUsage( $e->getMessage(), 'stashfailed' );
}
return $result;
}
/**
* Get Warnings Result
* @param array $warnings Array of Api upload warnings
* @return array
*/
private function getWarningsResult( $warnings ) {
$result = array();
$result['result'] = 'Warning';
$result['warnings'] = $warnings;
// in case the warnings can be fixed with some further user action, let's stash this upload
// and return a key they can use to restart it
try {
$result['filekey'] = $this->performStash();
$result['sessionkey'] = $result['filekey']; // backwards compatibility
} catch ( Exception $e ) {
$result['warnings']['stashfailed'] = $e->getMessage();
}
return $result;
}
/**
* Get the result of a chunk upload.
* @param array $warnings Array of Api upload warnings
* @return array
*/
private function getChunkResult( $warnings ) {
$result = array();
if ( $warnings && count( $warnings ) > 0 ) {
$result['warnings'] = $warnings;
}
$request = $this->getMain()->getRequest();
$chunkPath = $request->getFileTempname( 'chunk' );
$chunkSize = $request->getUpload( 'chunk' )->getSize();
$totalSoFar = $this->mParams['offset'] + $chunkSize;
$minChunkSize = $this->getConfig()->get( 'MinUploadChunkSize' );
// Sanity check sizing
if ( $totalSoFar > $this->mParams['filesize'] ) {
$this->dieUsage(
'Offset plus current chunk is greater than claimed file size', 'invalid-chunk'
);
}
// Enforce minimum chunk size
if ( $totalSoFar != $this->mParams['filesize'] && $chunkSize < $minChunkSize ) {
$this->dieUsage(
"Minimum chunk size is $minChunkSize bytes for non-final chunks", 'chunk-too-small'
);
}
if ( $this->mParams['offset'] == 0 ) {
try {
$filekey = $this->performStash();
} catch ( UploadStashException $e ) {
$this->handleStashException( $e );
} catch ( Exception $e ) {
// FIXME: Error handling here is wrong/different from rest of this
$this->dieUsage( $e->getMessage(), 'stashfailed' );
}
} else {
$filekey = $this->mParams['filekey'];
// Don't allow further uploads to an already-completed session
$progress = UploadBase::getSessionStatus( $this->getUser(), $filekey );
if ( !$progress ) {
// Probably can't get here, but check anyway just in case
$this->dieUsage( 'No chunked upload session with this key', 'stashfailed' );
} elseif ( $progress['result'] !== 'Continue' || $progress['stage'] !== 'uploading' ) {
$this->dieUsage(
'Chunked upload is already completed, check status for details', 'stashfailed'
);
}
$status = $this->mUpload->addChunk(
$chunkPath, $chunkSize, $this->mParams['offset'] );
if ( !$status->isGood() ) {
$extradata = array(
'offset' => $this->mUpload->getOffset(),
);
$this->dieUsage( $status->getWikiText(), 'stashfailed', 0, $extradata );
}
}
// Check we added the last chunk:
if ( $totalSoFar == $this->mParams['filesize'] ) {
if ( $this->mParams['async'] ) {
UploadBase::setSessionStatus(
$this->getUser(),
$filekey,
array( 'result' => 'Poll',
'stage' => 'queued', 'status' => Status::newGood() )
);
JobQueueGroup::singleton()->push( new AssembleUploadChunksJob(
Title::makeTitle( NS_FILE, $filekey ),
array(
'filename' => $this->mParams['filename'],
'filekey' => $filekey,
'session' => $this->getContext()->exportSession()
)
) );
$result['result'] = 'Poll';
$result['stage'] = 'queued';
} else {
$status = $this->mUpload->concatenateChunks();
if ( !$status->isGood() ) {
UploadBase::setSessionStatus(
$this->getUser(),
$filekey,
array( 'result' => 'Failure', 'stage' => 'assembling', 'status' => $status )
);
$this->dieUsage( $status->getWikiText(), 'stashfailed' );
}
// The fully concatenated file has a new filekey. So remove
// the old filekey and fetch the new one.
UploadBase::setSessionStatus( $this->getUser(), $filekey, false );
$this->mUpload->stash->removeFile( $filekey );
$filekey = $this->mUpload->getLocalFile()->getFileKey();
$result['result'] = 'Success';
}
} else {
UploadBase::setSessionStatus(
$this->getUser(),
$filekey,
array(
'result' => 'Continue',
'stage' => 'uploading',
'offset' => $totalSoFar,
'status' => Status::newGood(),
)
);
$result['result'] = 'Continue';
$result['offset'] = $totalSoFar;
}
$result['filekey'] = $filekey;
return $result;
}
/**
* Stash the file and return the file key
* Also re-raises exceptions with slightly more informative message strings (useful for API)
* @throws MWException
* @return string File key
*/
private function performStash() {
try {
$stashFile = $this->mUpload->stashFile( $this->getUser() );
if ( !$stashFile ) {
throw new MWException( 'Invalid stashed file' );
}
$fileKey = $stashFile->getFileKey();
} catch ( Exception $e ) {
$message = 'Stashing temporary file failed: ' . get_class( $e ) . ' ' . $e->getMessage();
wfDebug( __METHOD__ . ' ' . $message . "\n" );
$className = get_class( $e );
throw new $className( $message );
}
return $fileKey;
}
/**
* Throw an error that the user can recover from by providing a better
* value for $parameter
*
* @param array $error Error array suitable for passing to dieUsageMsg()
* @param string $parameter Parameter that needs revising
* @param array $data Optional extra data to pass to the user
* @throws UsageException
*/
private function dieRecoverableError( $error, $parameter, $data = array() ) {
try {
$data['filekey'] = $this->performStash();
$data['sessionkey'] = $data['filekey'];
} catch ( Exception $e ) {
$data['stashfailed'] = $e->getMessage();
}
$data['invalidparameter'] = $parameter;
$parsed = $this->parseMsg( $error );
$this->dieUsage( $parsed['info'], $parsed['code'], 0, $data );
}
/**
* Select an upload module and set it to mUpload. Dies on failure. If the
* request was a status request and not a true upload, returns false;
* otherwise true
*
* @return bool
*/
protected function selectUploadModule() {
$request = $this->getMain()->getRequest();
// chunk or one and only one of the following parameters is needed
if ( !$this->mParams['chunk'] ) {
$this->requireOnlyOneParameter( $this->mParams,
'filekey', 'file', 'url', 'statuskey' );
}
// Status report for "upload to stash"/"upload from stash"
if ( $this->mParams['filekey'] && $this->mParams['checkstatus'] ) {
$progress = UploadBase::getSessionStatus( $this->getUser(), $this->mParams['filekey'] );
if ( !$progress ) {
$this->dieUsage( 'No result in status data', 'missingresult' );
} elseif ( !$progress['status']->isGood() ) {
$this->dieUsage( $progress['status']->getWikiText(), 'stashfailed' );
}
if ( isset( $progress['status']->value['verification'] ) ) {
$this->checkVerification( $progress['status']->value['verification'] );
}
unset( $progress['status'] ); // remove Status object
$this->getResult()->addValue( null, $this->getModuleName(), $progress );
return false;
}
if ( $this->mParams['statuskey'] ) {
$this->checkAsyncDownloadEnabled();
// Status request for an async upload
$sessionData = UploadFromUrlJob::getSessionData( $this->mParams['statuskey'] );
if ( !isset( $sessionData['result'] ) ) {
$this->dieUsage( 'No result in session data', 'missingresult' );
}
if ( $sessionData['result'] == 'Warning' ) {
$sessionData['warnings'] = $this->transformWarnings( $sessionData['warnings'] );
$sessionData['sessionkey'] = $this->mParams['statuskey'];
}
$this->getResult()->addValue( null, $this->getModuleName(), $sessionData );
return false;
}
// The following modules all require the filename parameter to be set
if ( is_null( $this->mParams['filename'] ) ) {
$this->dieUsageMsg( array( 'missingparam', 'filename' ) );
}
if ( $this->mParams['chunk'] ) {
// Chunk upload
$this->mUpload = new UploadFromChunks();
if ( isset( $this->mParams['filekey'] ) ) {
if ( $this->mParams['offset'] === 0 ) {
$this->dieUsage( 'Cannot supply a filekey when offset is 0', 'badparams' );
}
// handle new chunk
$this->mUpload->continueChunks(
$this->mParams['filename'],
$this->mParams['filekey'],
$request->getUpload( 'chunk' )
);
} else {
if ( $this->mParams['offset'] !== 0 ) {
$this->dieUsage( 'Must supply a filekey when offset is non-zero', 'badparams' );
}
// handle first chunk
$this->mUpload->initialize(
$this->mParams['filename'],
$request->getUpload( 'chunk' )
);
}
} elseif ( isset( $this->mParams['filekey'] ) ) {
// Upload stashed in a previous request
if ( !UploadFromStash::isValidKey( $this->mParams['filekey'] ) ) {
$this->dieUsageMsg( 'invalid-file-key' );
}
$this->mUpload = new UploadFromStash( $this->getUser() );
// This will not download the temp file in initialize() in async mode.
// We still have enough information to call checkWarnings() and such.
$this->mUpload->initialize(
$this->mParams['filekey'], $this->mParams['filename'], !$this->mParams['async']
);
} elseif ( isset( $this->mParams['file'] ) ) {
$this->mUpload = new UploadFromFile();
$this->mUpload->initialize(
$this->mParams['filename'],
$request->getUpload( 'file' )
);
} elseif ( isset( $this->mParams['url'] ) ) {
// Make sure upload by URL is enabled:
if ( !UploadFromUrl::isEnabled() ) {
$this->dieUsageMsg( 'copyuploaddisabled' );
}
if ( !UploadFromUrl::isAllowedHost( $this->mParams['url'] ) ) {
$this->dieUsageMsg( 'copyuploadbaddomain' );
}
if ( !UploadFromUrl::isAllowedUrl( $this->mParams['url'] ) ) {
$this->dieUsageMsg( 'copyuploadbadurl' );
}
$async = false;
if ( $this->mParams['asyncdownload'] ) {
$this->checkAsyncDownloadEnabled();
if ( $this->mParams['leavemessage'] && !$this->mParams['ignorewarnings'] ) {
$this->dieUsage( 'Using leavemessage without ignorewarnings is not supported',
'missing-ignorewarnings' );
}
if ( $this->mParams['leavemessage'] ) {
$async = 'async-leavemessage';
} else {
$async = 'async';
}
}
$this->mUpload = new UploadFromUrl;
$this->mUpload->initialize( $this->mParams['filename'],
$this->mParams['url'], $async );
}
return true;
}
/**
* Checks that the user has permissions to perform this upload.
* Dies with usage message on inadequate permissions.
* @param User $user The user to check.
*/
protected function checkPermissions( $user ) {
// Check whether the user has the appropriate permissions to upload anyway
$permission = $this->mUpload->isAllowed( $user );
if ( $permission !== true ) {
if ( !$user->isLoggedIn() ) {
$this->dieUsageMsg( array( 'mustbeloggedin', 'upload' ) );
}
$this->dieUsageMsg( 'badaccess-groups' );
}
}
/**
* Performs file verification, dies on error.
*/
protected function verifyUpload() {
$verification = $this->mUpload->verifyUpload();
if ( $verification['status'] === UploadBase::OK ) {
return;
}
$this->checkVerification( $verification );
}
/**
* Performs file verification, dies on error.
* @param array $verification
*/
protected function checkVerification( array $verification ) {
// @todo Move them to ApiBase's message map
switch ( $verification['status'] ) {
// Recoverable errors
case UploadBase::MIN_LENGTH_PARTNAME:
$this->dieRecoverableError( 'filename-tooshort', 'filename' );
break;
case UploadBase::ILLEGAL_FILENAME:
$this->dieRecoverableError( 'illegal-filename', 'filename',
array( 'filename' => $verification['filtered'] ) );
break;
case UploadBase::FILENAME_TOO_LONG:
$this->dieRecoverableError( 'filename-toolong', 'filename' );
break;
case UploadBase::FILETYPE_MISSING:
$this->dieRecoverableError( 'filetype-missing', 'filename' );
break;
case UploadBase::WINDOWS_NONASCII_FILENAME:
$this->dieRecoverableError( 'windows-nonascii-filename', 'filename' );
break;
// Unrecoverable errors
case UploadBase::EMPTY_FILE:
$this->dieUsage( 'The file you submitted was empty', 'empty-file' );
break;
case UploadBase::FILE_TOO_LARGE:
$this->dieUsage( 'The file you submitted was too large', 'file-too-large' );
break;
case UploadBase::FILETYPE_BADTYPE:
$extradata = array(
'filetype' => $verification['finalExt'],
'allowed' => array_values( array_unique( $this->getConfig()->get( 'FileExtensions' ) ) )
);
ApiResult::setIndexedTagName( $extradata['allowed'], 'ext' );
$msg = "Filetype not permitted: ";
if ( isset( $verification['blacklistedExt'] ) ) {
$msg .= join( ', ', $verification['blacklistedExt'] );
$extradata['blacklisted'] = array_values( $verification['blacklistedExt'] );
ApiResult::setIndexedTagName( $extradata['blacklisted'], 'ext' );
} else {
$msg .= $verification['finalExt'];
}
$this->dieUsage( $msg, 'filetype-banned', 0, $extradata );
break;
case UploadBase::VERIFICATION_ERROR:
ApiResult::setIndexedTagName( $verification['details'], 'detail' );
$this->dieUsage( 'This file did not pass file verification', 'verification-error',
0, array( 'details' => $verification['details'] ) );
break;
case UploadBase::HOOK_ABORTED:
$this->dieUsage( "The modification you tried to make was aborted by an extension hook",
'hookaborted', 0, array( 'error' => $verification['error'] ) );
break;
default:
$this->dieUsage( 'An unknown error occurred', 'unknown-error',
0, array( 'code' => $verification['status'] ) );
break;
}
}
/**
* Check warnings.
* Returns a suitable array for inclusion into API results if there were warnings
* Returns the empty array if there were no warnings
*
* @return array
*/
protected function getApiWarnings() {
$warnings = $this->mUpload->checkWarnings();
return $this->transformWarnings( $warnings );
}
protected function transformWarnings( $warnings ) {
if ( $warnings ) {
// Add indices
$result = $this->getResult();
ApiResult::setIndexedTagName( $warnings, 'warning' );
if ( isset( $warnings['duplicate'] ) ) {
$dupes = array();
/** @var File $dupe */
foreach ( $warnings['duplicate'] as $dupe ) {
$dupes[] = $dupe->getName();
}
ApiResult::setIndexedTagName( $dupes, 'duplicate' );
$warnings['duplicate'] = $dupes;
}
if ( isset( $warnings['exists'] ) ) {
$warning = $warnings['exists'];
unset( $warnings['exists'] );
/** @var LocalFile $localFile */
$localFile = isset( $warning['normalizedFile'] )
? $warning['normalizedFile']
: $warning['file'];
$warnings[$warning['warning']] = $localFile->getName();
}
}
return $warnings;
}
/**
* Handles a stash exception, giving a useful error to the user.
* @param Exception $e The exception we encountered.
*/
protected function handleStashException( $e ) {
$exceptionType = get_class( $e );
switch ( $exceptionType ) {
case 'UploadStashFileNotFoundException':
$this->dieUsage( 'Could not find the file in the stash: ' . $e->getMessage(), 'stashedfilenotfound' );
break;
case 'UploadStashBadPathException':
$this->dieUsage( 'File key of improper format or otherwise invalid: ' . $e->getMessage(), 'stashpathinvalid' );
break;
case 'UploadStashFileException':
$this->dieUsage( 'Could not store upload in the stash: ' . $e->getMessage(), 'stashfilestorage' );
break;
case 'UploadStashZeroLengthFileException':
$this->dieUsage( 'File is of zero length, and could not be stored in the stash: ' . $e->getMessage(), 'stashzerolength' );
break;
case 'UploadStashNotLoggedInException':
$this->dieUsage( 'Not logged in: ' . $e->getMessage(), 'stashnotloggedin' );
break;
case 'UploadStashWrongOwnerException':
$this->dieUsage( 'Wrong owner: ' . $e->getMessage(), 'stashwrongowner' );
break;
case 'UploadStashNoSuchKeyException':
$this->dieUsage( 'No such filekey: ' . $e->getMessage(), 'stashnosuchfilekey' );
break;
default:
$this->dieUsage( $exceptionType . ": " . $e->getMessage(), 'stasherror' );
break;
}
}
/**
* Perform the actual upload. Returns a suitable result array on success;
* dies on failure.
*
* @param array $warnings Array of Api upload warnings
* @return array
*/
protected function performUpload( $warnings ) {
// Use comment as initial page text by default
if ( is_null( $this->mParams['text'] ) ) {
$this->mParams['text'] = $this->mParams['comment'];
}
/** @var $file File */
$file = $this->mUpload->getLocalFile();
// For preferences mode, we want to watch if 'watchdefault' is set or
// if the *file* doesn't exist and 'watchcreations' is set. But
// getWatchlistValue()'s automatic handling checks if the *title*
// exists or not, so we need to check both prefs manually.
$watch = $this->getWatchlistValue(
$this->mParams['watchlist'], $file->getTitle(), 'watchdefault'
);
if ( !$watch && $this->mParams['watchlist'] == 'preferences' && !$file->exists() ) {
$watch = $this->getWatchlistValue(
$this->mParams['watchlist'], $file->getTitle(), 'watchcreations'
);
}
// Deprecated parameters
if ( $this->mParams['watch'] ) {
$this->logFeatureUsage( 'action=upload&watch' );
$watch = true;
}
// No errors, no warnings: do the upload
if ( $this->mParams['async'] ) {
$progress = UploadBase::getSessionStatus( $this->getUser(), $this->mParams['filekey'] );
if ( $progress && $progress['result'] === 'Poll' ) {
$this->dieUsage( "Upload from stash already in progress.", 'publishfailed' );
}
UploadBase::setSessionStatus(
$this->getUser(),
$this->mParams['filekey'],
array( 'result' => 'Poll', 'stage' => 'queued', 'status' => Status::newGood() )
);
JobQueueGroup::singleton()->push( new PublishStashedFileJob(
Title::makeTitle( NS_FILE, $this->mParams['filename'] ),
array(
'filename' => $this->mParams['filename'],
'filekey' => $this->mParams['filekey'],
'comment' => $this->mParams['comment'],
'text' => $this->mParams['text'],
'watch' => $watch,
'session' => $this->getContext()->exportSession()
)
) );
$result['result'] = 'Poll';
$result['stage'] = 'queued';
} else {
/** @var $status Status */
$status = $this->mUpload->performUpload( $this->mParams['comment'],
$this->mParams['text'], $watch, $this->getUser() );
if ( !$status->isGood() ) {
$error = $status->getErrorsArray();
if ( count( $error ) == 1 && $error[0][0] == 'async' ) {
// The upload can not be performed right now, because the user
// requested so
return array(
'result' => 'Queued',
'statuskey' => $error[0][1],
);
}
ApiResult::setIndexedTagName( $error, 'error' );
$this->dieUsage( 'An internal error occurred', 'internal-error', 0, $error );
}
$result['result'] = 'Success';
}
$result['filename'] = $file->getName();
if ( $warnings && count( $warnings ) > 0 ) {
$result['warnings'] = $warnings;
}
return $result;
}
/**
* Checks if asynchronous copy uploads are enabled and throws an error if they are not.
*/
protected function checkAsyncDownloadEnabled() {
if ( !$this->getConfig()->get( 'AllowAsyncCopyUploads' ) ) {
$this->dieUsage( 'Asynchronous copy uploads disabled', 'asynccopyuploaddisabled' );
}
}
public function mustBePosted() {
return true;
}
public function isWriteMode() {
return true;
}
public function getAllowedParams() {
$params = array(
'filename' => array(
ApiBase::PARAM_TYPE => 'string',
),
'comment' => array(
ApiBase::PARAM_DFLT => ''
),
'text' => null,
'watch' => array(
ApiBase::PARAM_DFLT => false,
ApiBase::PARAM_DEPRECATED => true,
),
'watchlist' => array(
ApiBase::PARAM_DFLT => 'preferences',
ApiBase::PARAM_TYPE => array(
'watch',
'preferences',
'nochange'
),
),
'ignorewarnings' => false,
'file' => array(
ApiBase::PARAM_TYPE => 'upload',
),
'url' => null,
'filekey' => null,
'sessionkey' => array(
ApiBase::PARAM_DFLT => null,
ApiBase::PARAM_DEPRECATED => true,
),
'stash' => false,
'filesize' => array(
ApiBase::PARAM_TYPE => 'integer',
ApiBase::PARAM_MIN => 0,
ApiBase::PARAM_MAX => UploadBase::getMaxUploadSize(),
),
'offset' => array(
ApiBase::PARAM_TYPE => 'integer',
ApiBase::PARAM_MIN => 0,
),
'chunk' => array(
ApiBase::PARAM_TYPE => 'upload',
),
'async' => false,
'asyncdownload' => false,
'leavemessage' => false,
'statuskey' => null,
'checkstatus' => false,
);
return $params;
}
public function needsToken() {
return 'csrf';
}
protected function getExamplesMessages() {
return array(
'action=upload&filename=Wiki.png' .
'&url=http%3A//upload.wikimedia.org/wikipedia/en/b/bc/Wiki.png&token=123ABC'
=> 'apihelp-upload-example-url',
'action=upload&filename=Wiki.png&filekey=filekey&ignorewarnings=1&token=123ABC'
=> 'apihelp-upload-example-filekey',
);
}
public function getHelpUrls() {
return 'https://www.mediawiki.org/wiki/API:Upload';
}
}
| {
"content_hash": "b03a9f11e05a3c20a036de78a01e5c8f",
"timestamp": "",
"source": "github",
"line_count": 833,
"max_line_length": 126,
"avg_line_length": 31.73469387755102,
"alnum_prop": 0.6275770758464158,
"repo_name": "spicy-tmw/wiki-raw",
"id": "7661625c09bf482c779eb52738e6d2690bfbb317",
"size": "27321",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "includes/api/ApiUpload.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "326"
},
{
"name": "Batchfile",
"bytes": "45"
},
{
"name": "C++",
"bytes": "1865"
},
{
"name": "CSS",
"bytes": "485186"
},
{
"name": "Cucumber",
"bytes": "5728"
},
{
"name": "HTML",
"bytes": "4033"
},
{
"name": "JavaScript",
"bytes": "2367631"
},
{
"name": "Makefile",
"bytes": "4495"
},
{
"name": "PHP",
"bytes": "15834807"
},
{
"name": "PLSQL",
"bytes": "61551"
},
{
"name": "PLpgSQL",
"bytes": "31212"
},
{
"name": "Perl",
"bytes": "27998"
},
{
"name": "Python",
"bytes": "14510"
},
{
"name": "Ruby",
"bytes": "26351"
},
{
"name": "SQLPL",
"bytes": "2159"
},
{
"name": "Shell",
"bytes": "9772"
}
],
"symlink_target": ""
} |
/*!
* Author : Matteo Bruni
* MIT license: https://opensource.org/licenses/MIT
* Demo / Generator : https://particles.js.org/
* GitHub : https://www.github.com/matteobruni/tsparticles
* How to use? : Check the GitHub README
* v1.43.0
*/
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory(require("tsparticles"));
else if(typeof define === 'function' && define.amd)
define(["tsparticles"], factory);
else {
var a = typeof exports === 'object' ? factory(require("tsparticles")) : factory(root["window"]);
for(var i in a) (typeof exports === 'object' ? exports : root)[i] = a[i];
}
})(this, function(__WEBPACK_EXTERNAL_MODULE__764__) {
return /******/ (function() { // webpackBootstrap
/******/ "use strict";
/******/ var __webpack_modules__ = ({
/***/ 764:
/***/ (function(module) {
module.exports = __WEBPACK_EXTERNAL_MODULE__764__;
/***/ })
/******/ });
/************************************************************************/
/******/ // The module cache
/******/ var __webpack_module_cache__ = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ var cachedModule = __webpack_module_cache__[moduleId];
/******/ if (cachedModule !== undefined) {
/******/ return cachedModule.exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = __webpack_module_cache__[moduleId] = {
/******/ // no module.id needed
/******/ // no module.loaded needed
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/************************************************************************/
/******/ /* webpack/runtime/make namespace object */
/******/ !function() {
/******/ // define __esModule on exports
/******/ __webpack_require__.r = function(exports) {
/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) {
/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' });
/******/ }
/******/ Object.defineProperty(exports, '__esModule', { value: true });
/******/ };
/******/ }();
/******/
/************************************************************************/
var __webpack_exports__ = {};
// This entry need to be wrapped in an IIFE because it need to be isolated against other modules in the chunk.
!function() {
// ESM COMPAT FLAG
__webpack_require__.r(__webpack_exports__);
// EXTERNAL MODULE: external {"commonjs":"tsparticles","commonjs2":"tsparticles","amd":"tsparticles","root":"window"}
var external_commonjs_tsparticles_commonjs2_tsparticles_amd_tsparticles_root_window_ = __webpack_require__(764);
;// CONCATENATED MODULE: ./dist/browser/RoundedRectDrawer.js
const drawRoundedRect = (ctx, info, radius = {
topRight: 4,
bottomRight: 4,
bottomLeft: 4,
topLeft: 4
}) => {
const {
x,
y,
width,
height
} = info;
const r = x + width;
const b = y + height;
ctx.moveTo(x + radius.topLeft, y);
ctx.lineTo(r - radius.topRight, y);
ctx.quadraticCurveTo(r, y, r, y + radius.topRight);
ctx.lineTo(r, y + height - radius.bottomRight);
ctx.quadraticCurveTo(r, b, r - radius.bottomRight, b);
ctx.lineTo(x + radius.bottomLeft, b);
ctx.quadraticCurveTo(x, b, x, b - radius.bottomLeft);
ctx.lineTo(x, y + radius.topLeft);
ctx.quadraticCurveTo(x, y, x + radius.topLeft, y);
};
class RoundedRectDrawer {
particleInit(container, particle) {
var _a;
const shapeData = particle.shapeData;
const roundedRect = particle;
roundedRect.borderRadius = ((_a = shapeData === null || shapeData === void 0 ? void 0 : shapeData.radius) !== null && _a !== void 0 ? _a : 4) * container.retina.pixelRatio;
}
draw(context, particle, radius) {
const roundedRect = particle;
drawRoundedRect(context, {
x: 0,
y: 0,
height: radius,
width: radius
}, {
topLeft: roundedRect.borderRadius,
topRight: roundedRect.borderRadius,
bottomLeft: roundedRect.borderRadius,
bottomRight: roundedRect.borderRadius
});
}
}
;// CONCATENATED MODULE: ./dist/browser/shape.js
function loadRoundedRectShape(engine) {
engine.addShape("rounded-rect", new RoundedRectDrawer());
}
;// CONCATENATED MODULE: ./dist/browser/index.js
loadRoundedRectShape(external_commonjs_tsparticles_commonjs2_tsparticles_amd_tsparticles_root_window_.tsParticles);
}();
/******/ return __webpack_exports__;
/******/ })()
;
}); | {
"content_hash": "7a336092df9585411ea1209f4b534825",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 176,
"avg_line_length": 33.829787234042556,
"alnum_prop": 0.5821802935010482,
"repo_name": "cdnjs/cdnjs",
"id": "9b895f0c2916f7baa2652d556228bae25a8fc179",
"size": "4770",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ajax/libs/tsparticles-shape-rounded-rect/1.43.0/tsparticles.shape.rounded-rect.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" />
<link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/>
<link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/>
<!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]-->
<style type="text/css" media="all">
@import url('../../../../../style.css');
@import url('../../../../../tree.css');
</style>
<script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script>
<script src="../../../../../package-nodes-tree.js" type="text/javascript"></script>
<script src="../../../../../clover-tree.js" type="text/javascript"></script>
<script src="../../../../../clover.js" type="text/javascript"></script>
<script src="../../../../../clover-descriptions.js" type="text/javascript"></script>
<script src="../../../../../cloud.js" type="text/javascript"></script>
<title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title>
</head>
<body>
<div id="page">
<header id="header" role="banner">
<nav class="aui-header aui-dropdown2-trigger-group" role="navigation">
<div class="aui-header-inner">
<div class="aui-header-primary">
<h1 id="logo" class="aui-header-logo aui-header-logo-clover">
<a href="http://openclover.org" title="Visit OpenClover home page"><span class="aui-header-logo-device">OpenClover</span></a>
</h1>
</div>
<div class="aui-header-secondary">
<ul class="aui-nav">
<li id="system-help-menu">
<a class="aui-nav-link" title="Open online documentation" target="_blank"
href="http://openclover.org/documentation">
<span class="aui-icon aui-icon-small aui-iconfont-help"> Help</span>
</a>
</li>
</ul>
</div>
</div>
</nav>
</header>
<div class="aui-page-panel">
<div class="aui-page-panel-inner">
<div class="aui-page-panel-nav aui-page-panel-nav-clover">
<div class="aui-page-header-inner" style="margin-bottom: 20px;">
<div class="aui-page-header-image">
<a href="http://cardatechnologies.com" target="_top">
<div class="aui-avatar aui-avatar-large aui-avatar-project">
<div class="aui-avatar-inner">
<img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/>
</div>
</div>
</a>
</div>
<div class="aui-page-header-main" >
<h1>
<a href="http://cardatechnologies.com" target="_top">
ABA Route Transit Number Validator 1.0.1-SNAPSHOT
</a>
</h1>
</div>
</div>
<nav class="aui-navgroup aui-navgroup-vertical">
<div class="aui-navgroup-inner">
<ul class="aui-nav">
<li class="">
<a href="../../../../../dashboard.html">Project overview</a>
</li>
</ul>
<div class="aui-nav-heading packages-nav-heading">
<strong>Packages</strong>
</div>
<div class="aui-nav project-packages">
<form method="get" action="#" class="aui package-filter-container">
<input type="text" autocomplete="off" class="package-filter text"
placeholder="Type to filter packages..." name="package-filter" id="package-filter"
title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/>
</form>
<p class="package-filter-no-results-message hidden">
<small>No results found.</small>
</p>
<div class="packages-tree-wrapper" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator">
<div class="packages-tree-container"></div>
<div class="clover-packages-lozenges"></div>
</div>
</div>
</div>
</nav> </div>
<section class="aui-page-panel-content">
<div class="aui-page-panel-content-clover">
<div class="aui-page-header-main"><ol class="aui-nav aui-nav-breadcrumbs">
<li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li>
<li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li>
<li><a href="test-Test_AbaRouteValidator_15.html">Class Test_AbaRouteValidator_15</a></li>
</ol></div>
<h1 class="aui-h2-clover">
Test testAbaNumberCheck_33072_bad
</h1>
<table class="aui">
<thead>
<tr>
<th>Test</th>
<th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th>
<th><label title="When the test execution was started">Start time</label></th>
<th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th>
<th><label title="A failure or error message if the test is not successful.">Message</label></th>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_15.html?line=13508#src-13508" >testAbaNumberCheck_33072_bad</a>
</td>
<td>
<span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span>
</td>
<td>
7 Aug 12:44:54
</td>
<td>
0.001 </td>
<td>
<div></div>
<div class="errorMessage"></div>
</td>
</tr>
</tbody>
</table>
<div> </div>
<table class="aui aui-table-sortable">
<thead>
<tr>
<th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th>
<th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> testAbaNumberCheck_33072_bad</th>
</tr>
</thead>
<tbody>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/exceptions/AbaRouteValidationException.html?id=36232#AbaRouteValidationException" title="AbaRouteValidationException" name="sl-43">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.AbaRouteValidationException</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/ErrorCodes.html?id=36232#ErrorCodes" title="ErrorCodes" name="sl-42">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=36232#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a>
</td>
<td>
<span class="sortValue">0.29411766</span>29.4%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="29.4% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:29.4%"></div></div></div> </td>
</tr>
</tbody>
</table>
</div> <!-- class="aui-page-panel-content-clover" -->
<footer id="footer" role="contentinfo">
<section class="footer-body">
<ul>
<li>
Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1
on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT.
</li>
</ul>
<ul>
<li>OpenClover is free and open-source software. </li>
</ul>
</section>
</footer> </section> <!-- class="aui-page-panel-content" -->
</div> <!-- class="aui-page-panel-inner" -->
</div> <!-- class="aui-page-panel" -->
</div> <!-- id="page" -->
</body>
</html> | {
"content_hash": "9c46a91087e6ff516ae8f5ed721eb485",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 359,
"avg_line_length": 46.761702127659575,
"alnum_prop": 0.5304395304395304,
"repo_name": "dcarda/aba.route.validator",
"id": "6b1a9b877f2957670b081699c9c85af31681bd4b",
"size": "10989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "target13/site/clover/com/cardatechnologies/utils/validators/abaroutevalidator/Test_AbaRouteValidator_15_testAbaNumberCheck_33072_bad_ryg.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "18715254"
}
],
"symlink_target": ""
} |
{% extends 'registration/base_login.html' %}
{% load i18n %}
{% load bootstrap3 %}
{% block title %}{% trans 'Login' %} | Seeder {% endblock %}
{% block extrahead %}
<link href="{{ STATIC_URL }}login.css" rel="stylesheet">
{% endblock %}
{% block content %}
<form class="login-box" method="post">
{% csrf_token %}
{% bootstrap_form form %}
<button class="btn btn-lg btn-primary btn-block" type="submit">{% trans 'Login to Seeder' %}</button>
{% if form.errors %}
<a href="{% url 'password_reset' %}" class="btn btn-lg btn-primary btn-block btn-danger">{% trans 'Reset password' %}</a>
{% endif %}
</form>
{% endblock %} | {
"content_hash": "794da7a8e5623954eb5ad5819618f854",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 133,
"avg_line_length": 34.3,
"alnum_prop": 0.5728862973760933,
"repo_name": "WebArchivCZ/Seeder",
"id": "0adfc57135aed5b76116b8c60fb062f7c9bcff0f",
"size": "686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Seeder/templates/registration/login.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "40916"
},
{
"name": "HTML",
"bytes": "191411"
},
{
"name": "JavaScript",
"bytes": "35092"
},
{
"name": "PHP",
"bytes": "996"
},
{
"name": "Python",
"bytes": "298522"
},
{
"name": "Shell",
"bytes": "691"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- JBoss, Home of Professional Open Source Copyright 2017, Red Hat, Inc.
and/or its affiliates, and individual contributors by the @authors tag. See
the copyright.txt in the distribution for a full listing of individual contributors.
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License. -->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>sap-trfc-server-spring-boot</artifactId>
<name>Spring Boot Transactional RFC Server Example</name>
<description>This Quick Start demonstrates how to use the JBoss Fuse SAP Transactional RFC Server endpoint.</description>
<url>http://http://www.jboss.org/products/fuse/overview/</url>
<parent>
<groupId>org.jboss.quickstarts.fuse</groupId>
<artifactId>sap-quickstarts-spring-boot-parent</artifactId>
<version>7.1.0.fuse-710023-redhat-00001</version>
<relativePath>..</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-core-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.fusesource</groupId>
<artifactId>camel-sap-starter</artifactId>
</dependency>
<dependency>
<groupId>org.fusesource</groupId>
<artifactId>camel-sap</artifactId>
</dependency>
<dependency>
<groupId>com.sap.conn.jco</groupId>
<artifactId>sapjco3</artifactId>
<version>${sapjco3-version}</version>
<scope>system</scope>
<systemPath>${basedir}/lib/sapjco3.jar</systemPath>
</dependency>
<dependency>
<groupId>com.sap.conn.idoc</groupId>
<artifactId>sapidoc3</artifactId>
<version>${sapidoc3-version}</version>
<scope>system</scope>
<systemPath>${basedir}/lib/sapidoc3.jar</systemPath>
</dependency>
</dependencies>
<build>
<defaultGoal>spring-boot:run</defaultGoal>
<resources>
<resource>
<directory>src/main/resources</directory>
<filtering>true</filtering>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.camel</groupId>
<artifactId>camel-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
| {
"content_hash": "ae7178c537a143c63f0b33fe49aced6d",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 122,
"avg_line_length": 32.823008849557525,
"alnum_prop": 0.7158263682933406,
"repo_name": "punkhorn/sap-quickstarts",
"id": "a8046a1913e8f71e7fd8e1f8deda522534fb2561",
"size": "3709",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spring-boot/sap-trfc-server-spring-boot/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "201908"
}
],
"symlink_target": ""
} |
#ifndef Application_DEFINED
#define Application_DEFINED
namespace sk_app {
class Application {
public:
static Application* Create(int argc, char** argv, void* platformData);
virtual ~Application() {}
virtual void onIdle(double ms) = 0;
};
} // namespace sk_app
#endif
| {
"content_hash": "8db86eb545b07fe69a41c36f806c349d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 74,
"avg_line_length": 15.263157894736842,
"alnum_prop": 0.6931034482758621,
"repo_name": "tmpvar/skia.cc",
"id": "235ff09ed4ed73e7b78ce5d958ecde27d0d54104",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/no-webp",
"path": "tools/viewer/sk_app/Application.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1133"
},
{
"name": "C",
"bytes": "900542"
},
{
"name": "C++",
"bytes": "19621631"
},
{
"name": "Go",
"bytes": "7147"
},
{
"name": "HTML",
"bytes": "477"
},
{
"name": "Java",
"bytes": "27980"
},
{
"name": "JavaScript",
"bytes": "7593"
},
{
"name": "Lua",
"bytes": "25531"
},
{
"name": "Makefile",
"bytes": "8868"
},
{
"name": "Objective-C",
"bytes": "22088"
},
{
"name": "Objective-C++",
"bytes": "97189"
},
{
"name": "PHP",
"bytes": "116206"
},
{
"name": "Python",
"bytes": "371459"
},
{
"name": "Shell",
"bytes": "52874"
}
],
"symlink_target": ""
} |
.class Landroid/os/IUserManager$Stub$Proxy;
.super Ljava/lang/Object;
.source "IUserManager.java"
# interfaces
.implements Landroid/os/IUserManager;
# annotations
.annotation system Ldalvik/annotation/EnclosingClass;
value = Landroid/os/IUserManager$Stub;
.end annotation
.annotation system Ldalvik/annotation/InnerClass;
accessFlags = 0xa
name = "Proxy"
.end annotation
# instance fields
.field private mRemote:Landroid/os/IBinder;
# direct methods
.method constructor <init>(Landroid/os/IBinder;)V
.locals 0
.parameter "remote"
.prologue
.line 290
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
.line 291
iput-object p1, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
.line 292
return-void
.end method
# virtual methods
.method public asBinder()Landroid/os/IBinder;
.locals 1
.prologue
.line 295
iget-object v0, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
return-object v0
.end method
.method public createUser(Ljava/lang/String;I)Landroid/content/pm/UserInfo;
.locals 6
.parameter "name"
.parameter "flags"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 303
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 304
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 307
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 308
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeString(Ljava/lang/String;)V
.line 309
invoke-virtual {v0, p2}, Landroid/os/Parcel;->writeInt(I)V
.line 310
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v4, 0x1
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 311
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 312
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 313
sget-object v3, Landroid/content/pm/UserInfo;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/content/pm/UserInfo;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 320
.local v2, _result:Landroid/content/pm/UserInfo;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 321
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 323
return-object v2
.line 316
.end local v2 #_result:Landroid/content/pm/UserInfo;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/content/pm/UserInfo;
goto :goto_0
.line 320
.end local v2 #_result:Landroid/content/pm/UserInfo;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 321
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getApplicationRestrictions(Ljava/lang/String;)Landroid/os/Bundle;
.locals 6
.parameter "packageName"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 615
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 616
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 619
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 620
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeString(Ljava/lang/String;)V
.line 621
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0x11
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 622
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 623
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 624
sget-object v3, Landroid/os/Bundle;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/os/Bundle;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 631
.local v2, _result:Landroid/os/Bundle;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 632
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 634
return-object v2
.line 627
.end local v2 #_result:Landroid/os/Bundle;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/os/Bundle;
goto :goto_0
.line 631
.end local v2 #_result:Landroid/os/Bundle;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 632
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getApplicationRestrictionsForUser(Ljava/lang/String;I)Landroid/os/Bundle;
.locals 6
.parameter "packageName"
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 638
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 639
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 642
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 643
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeString(Ljava/lang/String;)V
.line 644
invoke-virtual {v0, p2}, Landroid/os/Parcel;->writeInt(I)V
.line 645
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0x12
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 646
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 647
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 648
sget-object v3, Landroid/os/Bundle;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/os/Bundle;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 655
.local v2, _result:Landroid/os/Bundle;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 656
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 658
return-object v2
.line 651
.end local v2 #_result:Landroid/os/Bundle;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/os/Bundle;
goto :goto_0
.line 655
.end local v2 #_result:Landroid/os/Bundle;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 656
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getInterfaceDescriptor()Ljava/lang/String;
.locals 1
.prologue
.line 299
const-string v0, "android.os.IUserManager"
return-object v0
.end method
.method public getUserHandle(I)I
.locals 6
.parameter "userSerialNumber"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 529
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 530
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 533
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 534
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 535
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0xd
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 536
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 537
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v2
.line 540
.local v2, _result:I
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 541
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 543
return v2
.line 540
.end local v2 #_result:I
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 541
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getUserIcon(I)Landroid/graphics/Bitmap;
.locals 6
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 383
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 384
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 387
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 388
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 389
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v4, 0x5
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 390
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 391
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 392
sget-object v3, Landroid/graphics/Bitmap;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/graphics/Bitmap;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 399
.local v2, _result:Landroid/graphics/Bitmap;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 400
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 402
return-object v2
.line 395
.end local v2 #_result:Landroid/graphics/Bitmap;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/graphics/Bitmap;
goto :goto_0
.line 399
.end local v2 #_result:Landroid/graphics/Bitmap;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 400
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getUserInfo(I)Landroid/content/pm/UserInfo;
.locals 6
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 424
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 425
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 428
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 429
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 430
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v4, 0x7
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 431
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 432
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 433
sget-object v3, Landroid/content/pm/UserInfo;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/content/pm/UserInfo;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 440
.local v2, _result:Landroid/content/pm/UserInfo;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 441
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 443
return-object v2
.line 436
.end local v2 #_result:Landroid/content/pm/UserInfo;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/content/pm/UserInfo;
goto :goto_0
.line 440
.end local v2 #_result:Landroid/content/pm/UserInfo;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 441
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getUserRestrictions(I)Landroid/os/Bundle;
.locals 6
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 547
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 548
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 551
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 552
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 553
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0xe
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 554
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 555
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
move-result v3
if-eqz v3, :cond_0
.line 556
sget-object v3, Landroid/os/Bundle;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-interface {v3, v1}, Landroid/os/Parcelable$Creator;->createFromParcel(Landroid/os/Parcel;)Ljava/lang/Object;
move-result-object v2
check-cast v2, Landroid/os/Bundle;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 563
.local v2, _result:Landroid/os/Bundle;
:goto_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 564
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 566
return-object v2
.line 559
.end local v2 #_result:Landroid/os/Bundle;
:cond_0
const/4 v2, 0x0
.restart local v2 #_result:Landroid/os/Bundle;
goto :goto_0
.line 563
.end local v2 #_result:Landroid/os/Bundle;
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 564
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getUserSerialNumber(I)I
.locals 6
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 511
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 512
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 515
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 516
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 517
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0xc
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 518
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 519
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v2
.line 522
.local v2, _result:I
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 523
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 525
return v2
.line 522
.end local v2 #_result:I
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 523
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public getUsers(Z)Ljava/util/List;
.locals 6
.parameter "excludeDying"
.annotation system Ldalvik/annotation/Signature;
value = {
"(Z)",
"Ljava/util/List",
"<",
"Landroid/content/pm/UserInfo;",
">;"
}
.end annotation
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
const/4 v3, 0x0
.line 406
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 407
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 410
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v4, "android.os.IUserManager"
invoke-virtual {v0, v4}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 411
if-eqz p1, :cond_0
const/4 v3, 0x1
:cond_0
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInt(I)V
.line 412
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v4, 0x6
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 413
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 414
sget-object v3, Landroid/content/pm/UserInfo;->CREATOR:Landroid/os/Parcelable$Creator;
invoke-virtual {v1, v3}, Landroid/os/Parcel;->createTypedArrayList(Landroid/os/Parcelable$Creator;)Ljava/util/ArrayList;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result-object v2
.line 417
.local v2, _result:Ljava/util/List;,"Ljava/util/List<Landroid/content/pm/UserInfo;>;"
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 418
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 420
return-object v2
.line 417
.end local v2 #_result:Ljava/util/List;,"Ljava/util/List<Landroid/content/pm/UserInfo;>;"
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 418
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public isGuestEnabled()Z
.locals 6
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
const/4 v2, 0x0
.line 479
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 480
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 483
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 484
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0xa
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 485
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 486
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v3
if-eqz v3, :cond_0
const/4 v2, 0x1
.line 489
.local v2, _result:Z
:cond_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 490
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 492
return v2
.line 489
.end local v2 #_result:Z
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 490
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public isRestricted()Z
.locals 6
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
const/4 v2, 0x0
.line 447
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 448
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 451
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 452
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v4, 0x8
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 453
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 454
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v3
if-eqz v3, :cond_0
const/4 v2, 0x1
.line 457
.local v2, _result:Z
:cond_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 458
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 460
return v2
.line 457
.end local v2 #_result:Z
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 458
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public removeUser(I)Z
.locals 6
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
const/4 v2, 0x0
.line 327
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 328
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 331
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 332
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 333
iget-object v3, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v4, 0x2
const/4 v5, 0x0
invoke-interface {v3, v4, v0, v1, v5}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 334
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
.line 335
invoke-virtual {v1}, Landroid/os/Parcel;->readInt()I
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
move-result v3
if-eqz v3, :cond_0
const/4 v2, 0x1
.line 338
.local v2, _result:Z
:cond_0
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 339
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 341
return v2
.line 338
.end local v2 #_result:Z
:catchall_0
move-exception v3
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 339
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v3
.end method
.method public setApplicationRestrictions(Ljava/lang/String;Landroid/os/Bundle;I)V
.locals 5
.parameter "packageName"
.parameter "restrictions"
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 592
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 593
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 595
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v2, "android.os.IUserManager"
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 596
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeString(Ljava/lang/String;)V
.line 597
if-eqz p2, :cond_0
.line 598
const/4 v2, 0x1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
.line 599
const/4 v2, 0x0
invoke-virtual {p2, v0, v2}, Landroid/os/Bundle;->writeToParcel(Landroid/os/Parcel;I)V
.line 604
:goto_0
invoke-virtual {v0, p3}, Landroid/os/Parcel;->writeInt(I)V
.line 605
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v3, 0x10
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 606
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 609
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 610
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 612
return-void
.line 602
:cond_0
const/4 v2, 0x0
:try_start_1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
:try_end_1
.catchall {:try_start_1 .. :try_end_1} :catchall_0
goto :goto_0
.line 609
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 610
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
.method public setGuestEnabled(Z)V
.locals 5
.parameter "enable"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
const/4 v2, 0x0
.line 464
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 465
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 467
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v3, "android.os.IUserManager"
invoke-virtual {v0, v3}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 468
if-eqz p1, :cond_0
const/4 v2, 0x1
:cond_0
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
.line 469
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v3, 0x9
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 470
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 473
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 474
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 476
return-void
.line 473
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 474
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
.method public setUserIcon(ILandroid/graphics/Bitmap;)V
.locals 5
.parameter "userHandle"
.parameter "icon"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 361
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 362
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 364
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v2, "android.os.IUserManager"
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 365
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 366
if-eqz p2, :cond_0
.line 367
const/4 v2, 0x1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
.line 368
const/4 v2, 0x0
invoke-virtual {p2, v0, v2}, Landroid/graphics/Bitmap;->writeToParcel(Landroid/os/Parcel;I)V
.line 373
:goto_0
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v3, 0x4
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 374
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 377
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 378
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 380
return-void
.line 371
:cond_0
const/4 v2, 0x0
:try_start_1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
:try_end_1
.catchall {:try_start_1 .. :try_end_1} :catchall_0
goto :goto_0
.line 377
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 378
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
.method public setUserName(ILjava/lang/String;)V
.locals 5
.parameter "userHandle"
.parameter "name"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 345
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 346
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 348
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v2, "android.os.IUserManager"
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 349
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 350
invoke-virtual {v0, p2}, Landroid/os/Parcel;->writeString(Ljava/lang/String;)V
.line 351
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/4 v3, 0x3
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 352
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 355
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 356
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 358
return-void
.line 355
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 356
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
.method public setUserRestrictions(Landroid/os/Bundle;I)V
.locals 5
.parameter "restrictions"
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 570
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 571
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 573
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v2, "android.os.IUserManager"
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 574
if-eqz p1, :cond_0
.line 575
const/4 v2, 0x1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
.line 576
const/4 v2, 0x0
invoke-virtual {p1, v0, v2}, Landroid/os/Bundle;->writeToParcel(Landroid/os/Parcel;I)V
.line 581
:goto_0
invoke-virtual {v0, p2}, Landroid/os/Parcel;->writeInt(I)V
.line 582
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v3, 0xf
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 583
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 586
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 587
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 589
return-void
.line 579
:cond_0
const/4 v2, 0x0
:try_start_1
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInt(I)V
:try_end_1
.catchall {:try_start_1 .. :try_end_1} :catchall_0
goto :goto_0
.line 586
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 587
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
.method public wipeUser(I)V
.locals 5
.parameter "userHandle"
.annotation system Ldalvik/annotation/Throws;
value = {
Landroid/os/RemoteException;
}
.end annotation
.prologue
.line 496
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v0
.line 497
.local v0, _data:Landroid/os/Parcel;
invoke-static {}, Landroid/os/Parcel;->obtain()Landroid/os/Parcel;
move-result-object v1
.line 499
.local v1, _reply:Landroid/os/Parcel;
:try_start_0
const-string v2, "android.os.IUserManager"
invoke-virtual {v0, v2}, Landroid/os/Parcel;->writeInterfaceToken(Ljava/lang/String;)V
.line 500
invoke-virtual {v0, p1}, Landroid/os/Parcel;->writeInt(I)V
.line 501
iget-object v2, p0, Landroid/os/IUserManager$Stub$Proxy;->mRemote:Landroid/os/IBinder;
const/16 v3, 0xb
const/4 v4, 0x0
invoke-interface {v2, v3, v0, v1, v4}, Landroid/os/IBinder;->transact(ILandroid/os/Parcel;Landroid/os/Parcel;I)Z
.line 502
invoke-virtual {v1}, Landroid/os/Parcel;->readException()V
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
.line 505
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 506
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
.line 508
return-void
.line 505
:catchall_0
move-exception v2
invoke-virtual {v1}, Landroid/os/Parcel;->recycle()V
.line 506
invoke-virtual {v0}, Landroid/os/Parcel;->recycle()V
throw v2
.end method
| {
"content_hash": "0d211a0f05d9e1d2f877281f6bc62999",
"timestamp": "",
"source": "github",
"line_count": 1585,
"max_line_length": 124,
"avg_line_length": 24.062460567823344,
"alnum_prop": 0.6499908230420305,
"repo_name": "baidurom/devices-Coolpad8720L",
"id": "4e4f8559589cfc2a86b7eb0df0f5ac1003f3c205",
"size": "38139",
"binary": false,
"copies": "2",
"ref": "refs/heads/coron-4.3",
"path": "framework.jar.out/smali/android/os/IUserManager$Stub$Proxy.smali",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "13619"
},
{
"name": "Shell",
"bytes": "1917"
}
],
"symlink_target": ""
} |
<?php
namespace HWI\Bundle\OAuthBundle\Tests\Security;
use HWI\Bundle\OAuthBundle\Security\OAuthUtils;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Security\Http\HttpUtils;
class OAuthUtilsTest extends \PHPUnit_Framework_TestCase
{
public function testGetAuthorizationUrlWithRedirectUrl()
{
$url = 'http://localhost:8080/login/check-instagram';
$request = $this->getRequest($url);
$redirect = 'https://api.instagram.com/oauth/authorize?redirect='.rawurlencode($url);
if (interface_exists('Symfony\Component\Security\Core\Authorization\AuthorizationCheckerInterface')) {
$authorizationChecker = $this->getMockBuilder('Symfony\Component\Security\Core\Authorization\AuthorizationCheckerInterface')
->disableOriginalConstructor()
->getMock();
} else {
$authorizationChecker = $this->getMockBuilder('Symfony\Component\Security\Core\SecurityContextInterface')
->disableOriginalConstructor()
->getMock();
}
$utils = new OAuthUtils($this->getHttpUtils($url), $authorizationChecker, true);
$utils->addResourceOwnerMap($this->getMap($url, $redirect, false, true));
$this->assertEquals(
$redirect,
$utils->getAuthorizationUrl($request, 'instagram', $url)
);
$this->assertNull($request->attributes->get('service'));
}
public function testGetAuthorizationUrlWithConnectAndUserToken()
{
$url = 'http://localhost:8080/login/check-instagram';
$request = $this->getRequest($url);
$redirect = 'https://api.instagram.com/oauth/authorize?redirect='.rawurlencode($url);
$utils = new OAuthUtils($this->getHttpUtils($url), $this->getAutorizationChecker(true), true);
$utils->addResourceOwnerMap($this->getMap($url, $redirect, true));
$this->assertEquals(
$redirect,
$utils->getAuthorizationUrl($request, 'instagram')
);
$this->assertEquals(
'instagram',
$request->attributes->get('service')
);
}
public function testGetAuthorizationUrlWithoutUserToken()
{
$url = 'http://localhost:8080/login/check-instagram';
$request = $this->getRequest($url);
$redirect = 'https://api.instagram.com/oauth/authorize?redirect='.rawurlencode($url);
$utils = new OAuthUtils($this->getHttpUtils($url), $this->getAutorizationChecker(false), true);
$utils->addResourceOwnerMap($this->getMap($url, $redirect));
$this->assertEquals(
$redirect,
$utils->getAuthorizationUrl($request, 'instagram')
);
$this->assertNull($request->attributes->get('service'));
}
/**
* @dataProvider provideValidData
*/
public function testSignatureIsGeneratedCorrectly($signature, $url)
{
// Parameters from http://oauth.net/core/1.0a/#anchor46
$parameters = array(
'oauth_consumer_key' => 'dpf43f3p2l4k3l03',
'oauth_token' => 'nnch734d00sl2jdk',
'oauth_signature_method' => 'HMAC-SHA1',
'oauth_timestamp' => '1191242096',
'oauth_nonce' => 'kllo9940pd9333jh',
'oauth_version' => '1.0',
);
$this->assertEquals(
$signature,
OAuthUtils::signRequest('GET', $url, $parameters, 'kd94hf93k423kf44', 'pfkkdhi9sl3r4s00')
);
}
/**
* @dataProvider provideInvalidData
* @expectedException \RuntimeException
*/
public function testThrowsExceptionIfRequiredParameterIsMissing($parameters)
{
OAuthUtils::signRequest('GET', 'http://example.com', $parameters, 'client_secret');
}
public function provideValidData()
{
return array(
array('iflJZCKxEsZ58FFDyCysxfLbuKM=', 'http://photos.example.net/photos'),
array('tR3+Ty81lMeYAr/Fid0kMTYa/WM=', 'http://photos.example.net/photos?file=vacation.jpg&size=original'),
);
}
public function provideInvalidData()
{
return array(
array('oauth_timestamp' => '', 'oauth_nonce' => '', 'oauth_version' => '', 'oauth_signature_method' => ''),
array('oauth_consumer_key' => '', 'oauth_nonce' => '', 'oauth_version' => '', 'oauth_signature_method' => ''),
array('oauth_consumer_key' => '', 'oauth_timestamp' => '', 'oauth_version' => '', 'oauth_signature_method' => ''),
array('oauth_consumer_key' => '', 'oauth_timestamp' => '', 'oauth_nonce' => '', 'oauth_signature_method' => ''),
array('oauth_consumer_key' => '', 'oauth_timestamp' => '', 'oauth_nonce' => '', 'oauth_version' => ''),
);
}
private function getRequest($url)
{
return Request::create($url, 'get', array(), array(), array(), array('SERVER_PORT' => 8080));
}
private function getMap($url, $redirect, $hasUser = false, $hasOneRedirectUrl = false)
{
$resource = $this->getMockBuilder('HWI\Bundle\OAuthBundle\OAuth\ResourceOwnerInterface')
->getMock();
$resource
->expects($this->once())
->method('getAuthorizationUrl')
->with($url, array())
->will($this->returnValue($redirect));
$resource
->expects($this->any())
->method('getOption')
->with('auth_with_one_url')
->will($this->returnValue($hasOneRedirectUrl));
$mapMock = $this->getMockBuilder('HWI\Bundle\OAuthBundle\Security\Http\ResourceOwnerMap')
->disableOriginalConstructor()
->getMock();
$mapMock
->expects($this->once())
->method('getResourceOwnerByName')
->with('instagram')
->will($this->returnValue($resource));
if (!$hasUser && !$hasOneRedirectUrl) {
$mapMock
->expects($this->once())
->method('getResourceOwnerCheckPath')
->with('instagram')
->will($this->returnValue('/login/check-instagram'));
}
if ($hasUser) {
$resource
->expects($this->once())
->method('getName')
->will($this->returnValue('instagram'));
}
return $mapMock;
}
private function getHttpUtils($generatedUrl = '/')
{
$urlGenerator = $this->getMockBuilder('Symfony\Component\Routing\Generator\UrlGeneratorInterface')
->disableOriginalConstructor()
->getMock();
$urlGenerator
->expects($this->any())
->method('generate')
->will($this->returnValue($generatedUrl))
;
return new HttpUtils($urlGenerator);
}
private function getAutorizationChecker($hasUser)
{
if (interface_exists('Symfony\Component\Security\Core\Authorization\AuthorizationCheckerInterface')) {
$mock = $this->getMockBuilder('Symfony\Component\Security\Core\Authorization\AuthorizationCheckerInterface')
->disableOriginalConstructor()
->getMock();
} else {
$mock= $this->getMockBuilder('Symfony\Component\Security\Core\SecurityContextInterface')
->disableOriginalConstructor()
->getMock();
}
$mock->expects($this->once())
->method('isGranted')
->with('IS_AUTHENTICATED_REMEMBERED')
->will($this->returnValue($hasUser));
return $mock;
}
}
| {
"content_hash": "5d6fa4beafc6a9de1c4fbf60075094e5",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 136,
"avg_line_length": 37.150485436893206,
"alnum_prop": 0.5833006664053313,
"repo_name": "alpacinocj/SymfonyTest",
"id": "7817481b546eea93e140cdda111eaa7993934f1d",
"size": "7892",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "vendor/hwi/oauth-bundle/Tests/Security/OAuthUtilsTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8220"
},
{
"name": "HTML",
"bytes": "18801"
},
{
"name": "JavaScript",
"bytes": "73"
},
{
"name": "PHP",
"bytes": "220368"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Security.Cryptography;
using System.Threading.Tasks;
using Microsoft.AspNet.SignalR.Infrastructure;
namespace Microsoft.AspNet.SignalR.Messaging
{
internal class DefaultSubscription : Subscription
{
internal static string _defaultCursorPrefix = GetCursorPrefix();
private List<Cursor> _cursors;
private List<Topic> _cursorTopics;
private ulong[] _cursorsState;
private readonly IStringMinifier _stringMinifier;
public DefaultSubscription(string identity,
IList<string> eventKeys,
TopicLookup topics,
string cursor,
Func<MessageResult, object, Task<bool>> callback,
int maxMessages,
IStringMinifier stringMinifier,
IPerformanceCounterManager counters,
object state) :
base(identity, eventKeys, callback, maxMessages, counters, state)
{
_stringMinifier = stringMinifier;
if (String.IsNullOrEmpty(cursor))
{
_cursors = GetCursorsFromEventKeys(EventKeys, topics);
}
else
{
// Ensure delegate continues to use the C# Compiler static delegate caching optimization.
_cursors = Cursor.GetCursors(cursor, _defaultCursorPrefix, (k, s) => UnminifyCursor(k, s), stringMinifier) ?? GetCursorsFromEventKeys(EventKeys, topics);
}
_cursorTopics = new List<Topic>();
if (!String.IsNullOrEmpty(cursor))
{
// Update all of the cursors so we're within the range
for (int i = _cursors.Count - 1; i >= 0; i--)
{
Cursor c = _cursors[i];
Topic topic;
if (!EventKeys.Contains(c.Key))
{
_cursors.Remove(c);
}
else if (!topics.TryGetValue(_cursors[i].Key, out topic) || _cursors[i].Id > topic.Store.GetMessageCount())
{
UpdateCursor(c.Key, 0);
}
}
}
// Add dummy entries so they can be filled in
for (int i = 0; i < _cursors.Count; i++)
{
_cursorTopics.Add(null);
}
}
private static string UnminifyCursor(string key, object state)
{
return ((IStringMinifier)state).Unminify(key);
}
public override bool AddEvent(string eventKey, Topic topic)
{
base.AddEvent(eventKey, topic);
lock (_cursors)
{
// O(n), but small n and it's not common
var index = FindCursorIndex(eventKey);
if (index == -1)
{
_cursors.Add(new Cursor(eventKey, GetMessageId(topic), _stringMinifier.Minify(eventKey)));
_cursorTopics.Add(topic);
return true;
}
return false;
}
}
public override void RemoveEvent(string eventKey)
{
base.RemoveEvent(eventKey);
lock (_cursors)
{
var index = FindCursorIndex(eventKey);
if (index != -1)
{
_cursors.RemoveAt(index);
_cursorTopics.RemoveAt(index);
}
}
}
public override void SetEventTopic(string eventKey, Topic topic)
{
base.SetEventTopic(eventKey, topic);
lock (_cursors)
{
// O(n), but small n and it's not common
var index = FindCursorIndex(eventKey);
if (index != -1)
{
_cursorTopics[index] = topic;
}
}
}
public override void WriteCursor(TextWriter textWriter)
{
lock (_cursors)
{
Cursor.WriteCursors(textWriter, _cursors, _defaultCursorPrefix);
}
}
[SuppressMessage("Microsoft.Design", "CA1062:Validate arguments of public methods", MessageId = "0", Justification = "It is called from the base class")]
protected override void PerformWork(IList<ArraySegment<Message>> items, out int totalCount, out object state)
{
totalCount = 0;
lock (_cursors)
{
// perf sensitive: re-use cursors array to minimize allocations
if ((_cursorsState == null) || (_cursorsState.Length != _cursors.Count))
{
_cursorsState = new ulong[_cursors.Count];
}
for (int i = 0; i < _cursors.Count; i++)
{
MessageStoreResult<Message> storeResult = _cursorTopics[i].Store.GetMessages(_cursors[i].Id, MaxMessages);
_cursorsState[i] = storeResult.FirstMessageId + (ulong)storeResult.Messages.Count;
if (storeResult.Messages.Count > 0)
{
items.Add(storeResult.Messages);
totalCount += storeResult.Messages.Count;
}
}
// Return the state as a list of cursors
state = _cursorsState;
}
}
protected override void BeforeInvoke(object state)
{
lock (_cursors)
{
// Update the list of cursors before invoking anything
var nextCursors = (ulong[])state;
for (int i = 0; i < _cursors.Count; i++)
{
_cursors[i].Id = nextCursors[i];
}
}
}
private bool UpdateCursor(string key, ulong id)
{
lock (_cursors)
{
// O(n), but small n and it's not common
var index = FindCursorIndex(key);
if (index != -1)
{
_cursors[index].Id = id;
return true;
}
return false;
}
}
// perf: avoid List<T>.FindIndex which uses stateless predicate which requires closure
private int FindCursorIndex(string eventKey)
{
for (int i = 0; i < _cursors.Count; i++)
{
if (_cursors[i].Key == eventKey)
{
return i;
}
}
return -1;
}
private List<Cursor> GetCursorsFromEventKeys(IList<string> eventKeys, TopicLookup topics)
{
var list = new List<Cursor>(eventKeys.Count);
foreach (var eventKey in eventKeys)
{
var cursor = new Cursor(eventKey, GetMessageId(topics, eventKey), _stringMinifier.Minify(eventKey));
list.Add(cursor);
}
return list;
}
private static string GetCursorPrefix()
{
using (var rng = RandomNumberGenerator.Create())
{
var data = new byte[4];
rng.GetBytes(data);
using (var writer = new StringWriter(CultureInfo.InvariantCulture))
{
var randomValue = (ulong)BitConverter.ToUInt32(data, 0);
Cursor.WriteUlongAsHexToBuffer(randomValue, writer);
return "d-" + writer.ToString() + "-";
}
}
}
private static ulong GetMessageId(TopicLookup topics, string key)
{
Topic topic;
if (topics.TryGetValue(key, out topic))
{
return GetMessageId(topic);
}
return 0;
}
private static ulong GetMessageId(Topic topic)
{
if (topic == null)
{
return 0;
}
return topic.Store.GetMessageCount();
}
}
} | {
"content_hash": "a8f29eee9de58e6d7db2466f432dd04a",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 169,
"avg_line_length": 33.15686274509804,
"alnum_prop": 0.4807806031933767,
"repo_name": "BuildMaestro/BuildMaestro",
"id": "4f62322bd7293bf39e1b4ffacb96dfa0e30030ef",
"size": "8624",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Microsoft.AspNet.SignalR.Messaging/DefaultSubscription.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "818894"
},
{
"name": "CSS",
"bytes": "7141"
},
{
"name": "HTML",
"bytes": "1387"
},
{
"name": "JavaScript",
"bytes": "104488"
},
{
"name": "TypeScript",
"bytes": "18026"
}
],
"symlink_target": ""
} |
/**
* @providesModule OptionsListView
*
*/
'use strict';
import React, {
Component,
} from 'react';
import {
AsyncStorage,
InteractionManager,
ScrollView,
StyleSheet,
Text,
View,
} from 'react-native';
import WatchListView from 'WatchListView';
import LoadingReloadView from 'LoadingReloadView';
import OptionsListRowView from 'OptionsListRowView';
const AsyncStorageKeys = require('AsyncStorageKeys');
const AppStorageActions = require('AppStorageActions');
const WatchListStorageActions = require('WatchListStorageActions');
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#F5F5F5',
},
scrollContainer: {},
investmentContainer: {
marginLeft: 14,
marginRight: 14,
paddingTop: 14,
paddingBottom: 14,
alignItems: 'center',
justifyContent: 'flex-start',
flexDirection: 'row',
borderColor: 'rgba(0,0,0,0.12)',
borderBottomWidth: 1,
},
investmentTitleContainer: {
alignItems: 'center',
justifyContent: 'center',
},
investmentTitleText: {
fontSize: 22,
fontWeight: '300',
},
investmentValueInfoContainer: {
flex: 1,
alignItems: 'center',
justifyContent: 'flex-end',
flexDirection: 'row',
},
investmentValueContainer: {
paddingRight: 0,
},
investmentValueText: {
fontSize: 22,
fontWeight: '300',
},
investmentValueBoldContainer: {
paddingRight: 14,
},
investmentValueBoldText: {
fontSize: 22,
fontWeight: '600',
},
changeContainer: {
paddingTop: 4, paddingBottom: 4,
paddingLeft: 7, paddingRight: 7,
borderRadius: 6,
},
changeText: {
fontSize: 16,
fontWeight: '400',
color: '#F5F5F5',
},
});
export default class OptionsListView extends Component {
constructor(props) {
super(props);
this.state = {
refreshing: false,
reload: false,
reloadFunction: () => {
}, // function used to reload correct function
reloadData: null, // optional reloadData option to load with reloadFunction
loadingView: true,
loading: false,
data: null,
portfolioBalance: 0,
portfolioBalanceFormatted: 0,
portfolioProfits: 0,
portfolioProfitsFormatted: 0,
runAfterTransitionTimeout: 0,
closingView: false,
// refs
refListView: null,
// emitters
watchListLoadDate: Date.now(),
watchListLoadDateListener: null,
// requests
addNewStockSymbolRequestObject: null,
};
}
componentWillMount() {
this.state.watchListLoadDateListener = WatchListStorageActions.emitter.addListener('watchListLoadDate', (loadDate) => {
if (this.state.watchListLoadDate !== loadDate) {
this.state.watchListLoadDate = loadDate;
this._getWatchListData();
}
});
this._getWatchListData().then(() => {
// run after scene transition - set timeout to make sure function is called if animations hang
let called = false;
this.state.runAfterTransitionTimeout = setTimeout(() => {
called = true;
this._runAfterTransition();
}, 500);
InteractionManager.runAfterInteractions(() => {
if (called) return;
clearTimeout(this.state.runAfterTransitionTimeout);
this._runAfterTransition();
});
});
}
componentWillUnmount() {
this.state.closingView = true;
clearTimeout(this.state.runAfterTransitionTimeout);
// remove emitter listeners
if (this.state.watchListLoadDateListener) {
this.state.watchListLoadDateListener.remove();
}
// abort active requests
if (this.state.addNewStockSymbolRequestObject) {
this.state.addNewStockSymbolRequestObject.abort();
}
}
render() {
let investmentProfitsTextColor = {
color: '#4CAF50'
},
investmentProfitsBackgroundColor = {
backgroundColor: '#4CAF50'
};
if (this.state.portfolioProfits < 0) {
investmentProfitsTextColor = {
color: '#F44336'
};
investmentProfitsBackgroundColor = {
backgroundColor: '#F44336'
};
}
return (
<View style={styles.container}>
<ScrollView
style={styles.scrollContainer}
>
<View style={styles.investmentContainer}>
<View style={styles.investmentTitleContainer}>
<Text style={styles.investmentTitleText}>
{'Portfolio Balance'}
</Text>
</View>
<View style={styles.investmentValueInfoContainer}>
<View style={styles.investmentValueContainer}>
<Text style={[styles.investmentValueText]}>
{this.state.portfolioBalanceFormatted}
</Text>
</View>
</View>
</View>
<View style={styles.investmentContainer}>
<View style={styles.investmentTitleContainer}>
<Text style={styles.investmentTitleText}>
{'Profits'}
</Text>
</View>
<View style={styles.investmentValueInfoContainer}>
<View style={styles.investmentValueBoldContainer}>
<Text style={[styles.investmentValueBoldText, investmentProfitsTextColor]}>
{this.state.portfolioProfitsFormatted}
</Text>
</View>
<View style={[styles.changeContainer, investmentProfitsBackgroundColor]}>
<Text style={[styles.changeText]}>
{this.state.portfolioProfitsChange}
</Text>
</View>
</View>
</View>
<OptionsListRowView
buttonText={"Watch List"}
onButtonPress={this._setWatchListView.bind(this)}
/>
<OptionsListRowView
buttonText={"Trades"}
onButtonPress={this._setTradesView.bind(this)}
/>
<OptionsListRowView
buttonText={"Stock Data"}
/>
<OptionsListRowView
buttonText={"Charts"}
/>
<OptionsListRowView
buttonText={"Notes"}
/>
<OptionsListRowView
buttonText={"Other"}
/>
</ScrollView>
</View>
);
}
_runAfterTransition() {
if (this.state.closingView === false) {
if (this.state.data !== null) {
this.setState({
loading: false,
loadingView: false,
});
}
// no items have been added to watch list
else {
this.setState({
loading: false,
loadingView: false,
});
}
}
}
_getWatchListData() {
return new Promise((resolve, reject) => {
try {
let keys = [
AsyncStorageKeys.WatchListData,
];
AsyncStorage.multiGet(keys, (err, stores) => {
let nextState = {
data: null,
};
stores.map((result, i, store) => {
// get at each store's key/value so you can work with it
let key = store[i][0];
let value = store[i][1];
let parseValue = JSON.parse(value);
if (value !== null) {
switch (key) {
case AsyncStorageKeys.WatchListData:
nextState.data = parseValue;
break;
}
}
});
let portfolioBalance = 0,
portfolioProfits = 0,
portfolioProfitsChange = 0
;
if (nextState.data) {
// check to see if stock was already added to list before inserting
for (let i = 0; i < nextState.data.length; i++) {
let obj = nextState.data[i];
let objLiveData = obj.liveData;
let objUserData = obj.userData;
if (objUserData.buyInPrice && objUserData.shares && objLiveData.lastTradePriceOnly) {
let currentPrice = parseFloat(objLiveData.lastTradePriceOnly),
buyInPrice = parseFloat(objUserData.buyInPrice),
shares = parseFloat(objUserData.shares);
portfolioBalance = portfolioBalance + (buyInPrice * shares);
portfolioProfits = portfolioProfits + ((currentPrice - buyInPrice) * shares);
portfolioProfitsChange = portfolioProfitsChange + (((currentPrice - buyInPrice) / buyInPrice) * 100);
}
}
}
nextState.portfolioBalance = portfolioBalance;
nextState.portfolioBalanceFormatted = '$' + portfolioBalance.toLocaleString(
'en-US', // use a string like 'en-US' to override browser locale
{minimumFractionDigits: 2}
);
nextState.portfolioProfits = portfolioProfits;
nextState.portfolioProfitsFormatted = '$' + portfolioProfits.toLocaleString(
'en-US', // use a string like 'en-US' to override browser locale
{minimumFractionDigits: 2}
);
nextState.portfolioProfitsChange = (Math.round(portfolioProfitsChange * 100) / 100) + '%';
this.setState(nextState);
return resolve();
});
} catch (error) {
// do nothing if error exists on load, since data will be null and a fetchData will be loaded
return resolve();
}
});
}
_setWatchListView() {
this.props.navigation.navigate('WatchListView');
}
_setTradesView() {
this.props.navigation.navigate('TradesView');
}
} | {
"content_hash": "2643b26ae4701bc81e940a7407b33087",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 133,
"avg_line_length": 34.13742690058479,
"alnum_prop": 0.4760599571734475,
"repo_name": "TimeTechFinancial/TimeTechFinancial",
"id": "76df3702373912d4b99784fddb0fbe8f39ec609b",
"size": "11675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "assets/components/views/OptionsList/View.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1344"
},
{
"name": "JavaScript",
"bytes": "161997"
},
{
"name": "Objective-C",
"bytes": "4438"
},
{
"name": "Python",
"bytes": "1744"
}
],
"symlink_target": ""
} |
package org.apache.pulsar.tests;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import io.netty.util.concurrent.FastThreadLocal;
import io.netty.util.concurrent.FastThreadLocalThread;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import org.testng.annotations.Test;
public class FastThreadLocalStateCleanerTest {
private static final class MagicNumberWrapper {
private final int value;
private MagicNumberWrapper(int value) {
this.value = value;
}
public int intValue() {
return value;
}
}
final FastThreadLocal<MagicNumberWrapper> magicNumberThreadLocal = new FastThreadLocal<MagicNumberWrapper>() {
@Override
protected MagicNumberWrapper initialValue() throws Exception {
return new MagicNumberWrapper(42);
}
};
final FastThreadLocalStateCleaner cleaner = new FastThreadLocalStateCleaner(object ->
object.getClass() == MagicNumberWrapper.class);
@Test
public void testThreadLocalStateCleanupInCurrentThread() {
magicNumberThreadLocal.set(new MagicNumberWrapper(44));
assertEquals(magicNumberThreadLocal.get().intValue(), 44);
cleaner.cleanupAllFastThreadLocals(Thread.currentThread(), ((thread, o) -> {
System.out.println("Cleaning up " + thread + " value " + o);
}));
assertEquals(magicNumberThreadLocal.get().intValue(), 42);
}
@Test
public void testThreadLocalStateCleanupInCurrentAndOtherThread() throws InterruptedException, ExecutionException {
magicNumberThreadLocal.set(new MagicNumberWrapper(44));
assertEquals(magicNumberThreadLocal.get().intValue(), 44);
CountDownLatch numberHasBeenSet = new CountDownLatch(1);
CountDownLatch shutdownLatch = new CountDownLatch(1);
CompletableFuture<MagicNumberWrapper> valueAfterReset = new CompletableFuture<>();
Thread thread = new Thread(() -> {
try {
magicNumberThreadLocal.set(new MagicNumberWrapper(45));
assertEquals(magicNumberThreadLocal.get().intValue(), 45);
numberHasBeenSet.countDown();
shutdownLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} finally {
valueAfterReset.complete(magicNumberThreadLocal.get());
}
});
thread.start();
numberHasBeenSet.await();
Set<Thread> cleanedThreads = new HashSet<>();
cleaner.cleanupAllFastThreadLocals((t, currentValue) -> {
cleanedThreads.add(t);
});
shutdownLatch.countDown();
assertEquals(magicNumberThreadLocal.get().intValue(), 42);
assertEquals(valueAfterReset.get().intValue(), 42);
assertEquals(cleanedThreads.size(), 2);
assertTrue(cleanedThreads.contains(thread));
assertTrue(cleanedThreads.contains(Thread.currentThread()));
}
@Test
public void testThreadLocalStateCleanupInFastThreadLocalThread() throws InterruptedException, ExecutionException {
CountDownLatch numberHasBeenSet = new CountDownLatch(1);
CountDownLatch shutdownLatch = new CountDownLatch(1);
CompletableFuture<MagicNumberWrapper> valueAfterReset = new CompletableFuture<>();
Thread thread = new FastThreadLocalThread(() -> {
try {
magicNumberThreadLocal.set(new MagicNumberWrapper(45));
assertEquals(magicNumberThreadLocal.get().intValue(), 45);
numberHasBeenSet.countDown();
shutdownLatch.await();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} finally {
valueAfterReset.complete(magicNumberThreadLocal.get());
}
});
thread.start();
numberHasBeenSet.await();
Set<Thread> cleanedThreads = new HashSet<>();
cleaner.cleanupAllFastThreadLocals((t, currentValue) -> {
cleanedThreads.add(t);
});
shutdownLatch.countDown();
assertEquals(valueAfterReset.get().intValue(), 42);
assertTrue(cleanedThreads.contains(thread));
}
} | {
"content_hash": "d9f3ee2018cfff625ff1222bed26da7e",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 118,
"avg_line_length": 40.792792792792795,
"alnum_prop": 0.6592314487632509,
"repo_name": "yahoo/pulsar",
"id": "2fd8c4bbbe2c71cf944c65dfd83351231bf29967",
"size": "5336",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "buildtools/src/test/java/org/apache/pulsar/tests/FastThreadLocalStateCleanerTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "77960"
},
{
"name": "C++",
"bytes": "736937"
},
{
"name": "CMake",
"bytes": "9092"
},
{
"name": "HTML",
"bytes": "29382"
},
{
"name": "Java",
"bytes": "5371040"
},
{
"name": "Protocol Buffer",
"bytes": "15090"
},
{
"name": "Python",
"bytes": "91802"
},
{
"name": "Shell",
"bytes": "47717"
}
],
"symlink_target": ""
} |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <vector>
#include "base/files/scoped_temp_dir.h"
#include "base/memory/scoped_ptr.h"
#include "base/memory/weak_ptr.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/run_loop.h"
#include "content/browser/fileapi/mock_file_change_observer.h"
#include "content/browser/quota/mock_quota_manager.h"
#include "content/public/test/mock_blob_url_request_context.h"
#include "content/public/test/test_file_system_backend.h"
#include "content/public/test/test_file_system_context.h"
#include "net/url_request/url_request.h"
#include "net/url_request/url_request_context.h"
#include "net/url_request/url_request_job.h"
#include "net/url_request/url_request_job_factory_impl.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "url/gurl.h"
#include "webkit/browser/blob/blob_storage_context.h"
#include "webkit/browser/blob/blob_url_request_job.h"
#include "webkit/browser/fileapi/file_system_context.h"
#include "webkit/browser/fileapi/file_system_file_util.h"
#include "webkit/browser/fileapi/file_system_operation_context.h"
#include "webkit/browser/fileapi/file_system_operation_runner.h"
#include "webkit/browser/fileapi/local_file_util.h"
#include "webkit/common/blob/blob_data.h"
#include "webkit/common/fileapi/file_system_util.h"
using fileapi::FileSystemOperation;
using fileapi::FileSystemOperationRunner;
using fileapi::FileSystemURL;
using content::MockBlobURLRequestContext;
using content::ScopedTextBlob;
namespace content {
namespace {
const GURL kOrigin("http://example.com");
const fileapi::FileSystemType kFileSystemType = fileapi::kFileSystemTypeTest;
void AssertStatusEq(base::File::Error expected,
base::File::Error actual) {
ASSERT_EQ(expected, actual);
}
} // namespace
class FileSystemOperationImplWriteTest
: public testing::Test {
public:
FileSystemOperationImplWriteTest()
: status_(base::File::FILE_OK),
cancel_status_(base::File::FILE_ERROR_FAILED),
bytes_written_(0),
complete_(false),
weak_factory_(this) {
change_observers_ = fileapi::MockFileChangeObserver::CreateList(
&change_observer_);
}
virtual void SetUp() {
ASSERT_TRUE(dir_.CreateUniqueTempDir());
quota_manager_ =
new MockQuotaManager(false /* is_incognito */,
dir_.path(),
base::MessageLoopProxy::current().get(),
base::MessageLoopProxy::current().get(),
NULL /* special storage policy */);
virtual_path_ = base::FilePath(FILE_PATH_LITERAL("temporary file"));
file_system_context_ = CreateFileSystemContextForTesting(
quota_manager_->proxy(), dir_.path());
url_request_context_.reset(
new MockBlobURLRequestContext(file_system_context_.get()));
file_system_context_->operation_runner()->CreateFile(
URLForPath(virtual_path_), true /* exclusive */,
base::Bind(&AssertStatusEq, base::File::FILE_OK));
static_cast<TestFileSystemBackend*>(
file_system_context_->GetFileSystemBackend(kFileSystemType))
->AddFileChangeObserver(change_observer());
}
virtual void TearDown() {
quota_manager_ = NULL;
file_system_context_ = NULL;
base::RunLoop().RunUntilIdle();
}
base::File::Error status() const { return status_; }
base::File::Error cancel_status() const { return cancel_status_; }
void add_bytes_written(int64 bytes, bool complete) {
bytes_written_ += bytes;
EXPECT_FALSE(complete_);
complete_ = complete;
}
int64 bytes_written() const { return bytes_written_; }
bool complete() const { return complete_; }
protected:
const fileapi::ChangeObserverList& change_observers() const {
return change_observers_;
}
fileapi::MockFileChangeObserver* change_observer() {
return &change_observer_;
}
FileSystemURL URLForPath(const base::FilePath& path) const {
return file_system_context_->CreateCrackedFileSystemURL(
kOrigin, kFileSystemType, path);
}
// Callback function for recording test results.
FileSystemOperation::WriteCallback RecordWriteCallback() {
return base::Bind(&FileSystemOperationImplWriteTest::DidWrite,
weak_factory_.GetWeakPtr());
}
FileSystemOperation::StatusCallback RecordCancelCallback() {
return base::Bind(&FileSystemOperationImplWriteTest::DidCancel,
weak_factory_.GetWeakPtr());
}
void DidWrite(base::File::Error status, int64 bytes, bool complete) {
if (status == base::File::FILE_OK) {
add_bytes_written(bytes, complete);
if (complete)
base::MessageLoop::current()->Quit();
} else {
EXPECT_FALSE(complete_);
EXPECT_EQ(status_, base::File::FILE_OK);
complete_ = true;
status_ = status;
if (base::MessageLoop::current()->is_running())
base::MessageLoop::current()->Quit();
}
}
void DidCancel(base::File::Error status) {
cancel_status_ = status;
}
const MockBlobURLRequestContext& url_request_context() const {
return *url_request_context_;
}
scoped_refptr<fileapi::FileSystemContext> file_system_context_;
scoped_refptr<MockQuotaManager> quota_manager_;
base::MessageLoopForIO loop_;
base::ScopedTempDir dir_;
base::FilePath virtual_path_;
// For post-operation status.
base::File::Error status_;
base::File::Error cancel_status_;
int64 bytes_written_;
bool complete_;
scoped_ptr<MockBlobURLRequestContext> url_request_context_;
fileapi::MockFileChangeObserver change_observer_;
fileapi::ChangeObserverList change_observers_;
base::WeakPtrFactory<FileSystemOperationImplWriteTest> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(FileSystemOperationImplWriteTest);
};
TEST_F(FileSystemOperationImplWriteTest, TestWriteSuccess) {
ScopedTextBlob blob(url_request_context(),
"blob-id:success",
"Hello, world!\n");
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_path_),
blob.GetBlobDataHandle(),
0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(14, bytes_written());
EXPECT_EQ(base::File::FILE_OK, status());
EXPECT_TRUE(complete());
EXPECT_EQ(1, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestWriteZero) {
ScopedTextBlob blob(url_request_context(), "blob_id:zero", "");
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_path_),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(0, bytes_written());
EXPECT_EQ(base::File::FILE_OK, status());
EXPECT_TRUE(complete());
EXPECT_EQ(1, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestWriteInvalidBlobUrl) {
scoped_ptr<webkit_blob::BlobDataHandle> null_handle;
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_path_),
null_handle.Pass(), 0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(0, bytes_written());
EXPECT_EQ(base::File::FILE_ERROR_FAILED, status());
EXPECT_TRUE(complete());
EXPECT_EQ(0, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestWriteInvalidFile) {
ScopedTextBlob blob(url_request_context(), "blob_id:writeinvalidfile",
"It\'ll not be written.");
file_system_context_->operation_runner()->Write(
&url_request_context(),
URLForPath(base::FilePath(FILE_PATH_LITERAL("nonexist"))),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(0, bytes_written());
EXPECT_EQ(base::File::FILE_ERROR_NOT_FOUND, status());
EXPECT_TRUE(complete());
EXPECT_EQ(1, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestWriteDir) {
base::FilePath virtual_dir_path(FILE_PATH_LITERAL("d"));
file_system_context_->operation_runner()->CreateDirectory(
URLForPath(virtual_dir_path),
true /* exclusive */, false /* recursive */,
base::Bind(&AssertStatusEq, base::File::FILE_OK));
ScopedTextBlob blob(url_request_context(), "blob:writedir",
"It\'ll not be written, too.");
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_dir_path),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(0, bytes_written());
// TODO(kinuko): This error code is platform- or fileutil- dependent
// right now. Make it return File::FILE_ERROR_NOT_A_FILE in every case.
EXPECT_TRUE(status() == base::File::FILE_ERROR_NOT_A_FILE ||
status() == base::File::FILE_ERROR_ACCESS_DENIED ||
status() == base::File::FILE_ERROR_FAILED);
EXPECT_TRUE(complete());
EXPECT_EQ(1, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestWriteFailureByQuota) {
ScopedTextBlob blob(url_request_context(), "blob:success",
"Hello, world!\n");
quota_manager_->SetQuota(
kOrigin, FileSystemTypeToQuotaStorageType(kFileSystemType), 10);
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_path_),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
base::MessageLoop::current()->Run();
EXPECT_EQ(10, bytes_written());
EXPECT_EQ(base::File::FILE_ERROR_NO_SPACE, status());
EXPECT_TRUE(complete());
EXPECT_EQ(1, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestImmediateCancelSuccessfulWrite) {
ScopedTextBlob blob(url_request_context(), "blob:success",
"Hello, world!\n");
FileSystemOperationRunner::OperationID id =
file_system_context_->operation_runner()->Write(
&url_request_context(), URLForPath(virtual_path_),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
file_system_context_->operation_runner()->Cancel(id, RecordCancelCallback());
// We use RunAllPendings() instead of Run() here, because we won't dispatch
// callbacks after Cancel() is issued (so no chance to Quit) nor do we need
// to run another write cycle.
base::RunLoop().RunUntilIdle();
// Issued Cancel() before receiving any response from Write(),
// so nothing should have happen.
EXPECT_EQ(0, bytes_written());
EXPECT_EQ(base::File::FILE_ERROR_ABORT, status());
EXPECT_EQ(base::File::FILE_OK, cancel_status());
EXPECT_TRUE(complete());
EXPECT_EQ(0, change_observer()->get_and_reset_modify_file_count());
}
TEST_F(FileSystemOperationImplWriteTest, TestImmediateCancelFailingWrite) {
ScopedTextBlob blob(url_request_context(), "blob:writeinvalidfile",
"It\'ll not be written.");
FileSystemOperationRunner::OperationID id =
file_system_context_->operation_runner()->Write(
&url_request_context(),
URLForPath(base::FilePath(FILE_PATH_LITERAL("nonexist"))),
blob.GetBlobDataHandle(), 0, RecordWriteCallback());
file_system_context_->operation_runner()->Cancel(id, RecordCancelCallback());
// We use RunAllPendings() instead of Run() here, because we won't dispatch
// callbacks after Cancel() is issued (so no chance to Quit) nor do we need
// to run another write cycle.
base::RunLoop().RunUntilIdle();
// Issued Cancel() before receiving any response from Write(),
// so nothing should have happen.
EXPECT_EQ(0, bytes_written());
EXPECT_EQ(base::File::FILE_ERROR_ABORT, status());
EXPECT_EQ(base::File::FILE_OK, cancel_status());
EXPECT_TRUE(complete());
EXPECT_EQ(0, change_observer()->get_and_reset_modify_file_count());
}
// TODO(ericu,dmikurube,kinuko): Add more tests for cancel cases.
} // namespace content
| {
"content_hash": "2f728c2cd520d2d3e04566c0f79de0a0",
"timestamp": "",
"source": "github",
"line_count": 332,
"max_line_length": 79,
"avg_line_length": 36.78313253012048,
"alnum_prop": 0.6870291516541107,
"repo_name": "TeamEOS/external_chromium_org",
"id": "47b0e2d122ba21c3e6c6c04d9e8d4e6d357ead1c",
"size": "12212",
"binary": false,
"copies": "15",
"ref": "refs/heads/lp5.0",
"path": "content/browser/fileapi/file_system_operation_impl_write_unittest.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "8242"
},
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Assembly",
"bytes": "24741"
},
{
"name": "C",
"bytes": "3548503"
},
{
"name": "C++",
"bytes": "200795068"
},
{
"name": "CSS",
"bytes": "941169"
},
{
"name": "HTML",
"bytes": "18201975"
},
{
"name": "Java",
"bytes": "5192594"
},
{
"name": "JavaScript",
"bytes": "11001749"
},
{
"name": "Makefile",
"bytes": "20865646"
},
{
"name": "Objective-C",
"bytes": "1171732"
},
{
"name": "Objective-C++",
"bytes": "7082902"
},
{
"name": "PHP",
"bytes": "61320"
},
{
"name": "Perl",
"bytes": "69392"
},
{
"name": "Protocol Buffer",
"bytes": "360984"
},
{
"name": "Python",
"bytes": "6310657"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "477288"
},
{
"name": "Standard ML",
"bytes": "1589"
},
{
"name": "XML",
"bytes": "344021"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "15206"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "730c43afc297fc77be0867793d1cd295",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "2b6ca69b15427be98e405814dd75b35d820c48dc",
"size": "211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Myrtales/Melastomataceae/Sarcopyramis/Sarcopyramis bodinieri/ Syn. Sarcopyramis bodinieri delicata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Ops and optimizations for using BLAS calls
BLAS = Basic Linear Algebra Subroutines
Learn more about BLAS here:
http://www.netlib.org/blas/blast-forum/
The standard BLAS libraries implement what is called "legacy BLAS" in that
document.
This documentation describes Theano's BLAS optimization pipeline.
Where there is a discrepancy between how things do work and how they *should*
work, both aspects should be documented.
There are four kinds of BLAS Ops in Theano:
- Python implementations (this file)
- SciPy-based (blas_scipy)
- C-based (blas_c)
- CUDA-based (theano.sandbox.cuda.blas)
Notes
-----
Unfortunately (because it's confusing) this file currently contains Ops
that contain both Python and C versions. I think it would be better to
move the C implementations to blas_c so that this file is pure Python.
-JB
Ops
===
GEMM: Dot22, Dot22Scalar, GemmRelated, Gemm
-------------------------------------------
The BLAS GEMM operation implements Z <- a X Y + b Z,
where Z, X and Y are matrices, and a and b are scalars.
Dot22 is a GEMM where a=1, b=0, and Z is allocated every time.
Dot22Scalar is a GEMM where b=0 and Z is allocated every time.
Gemm is a GEMM in all its generality.
In the future we can refactor the GemmRelated, Gemm, Dot22 and
Dot22Scalar Ops into a single Op. That new Op (Gemm2) is basically a
normal Gemm, but with an additional configuration variable that says
to ignore the input Z. Setting that configuration variable to True
would make Gemm2 equivalent to the current Dot22 and Dot22Scalar.
This would make the file a lot easier to read, and save a few hundred
lines of library, to say nothing of testing and documentation.
GEMV: Gemv
----------
The BLAS GEMV operation implements Z <- a X Y + b Z,
where X is a matrix, Y, and Z are vectors, and a and b are scalars.
GER: Ger
--------
The BLAS GER operation implements Z <- a X' Y + Z,
where X and Y are vectors, and matrix Z gets a rank-1 update.
Other Notable BLAS-related Ops
------------------------------
SYRK is another useful special case of GEMM. Particularly SYRK preserves
symmetry in the matrix that it updates. See how the linear-algebra module uses
symmetry hints before implementing this Op, so that this Op is compatible with
that system.
Optimizations
=============
The optimization pipeline works something like this:
1. identify dot22 from dot
2. identify gemm from dot22
3. identify dot22scalar from dot22 that are not gemm
4. specialize gemm to gemv where applicable
5. specialize gemm to ger where applicable
6. specialize dot22 -> gemv or ger where applicable
:note: GEMM is the most canonical BLAS signature that we deal with so far, it
would be good to turn most things into GEMM (dot, inner, outer, dot22,
dot22scalar), and then to specialize from gemm to the various other L2 and
L3 operations.
Identify Dot22
--------------
Numpy's dot supports arguments that are of any rank, and we should support that
too (just for compatibility). The BLAS optimizations work with Dot Ops whose
inputs are each either vector or matrix. So the first part of the optimization
pipeline is to transform qualifying Dot Ops to Dot22 Ops. Dot22 Ops may be
transformed further, but they will get implemented by a BLAS call.
More precisely, Dot nodes whose inputs are all vectors or matrices and whose
inputs both have the same dtype, and whose dtype is float or complex, become
Dot22. This is implemented in `local_dot_to_dot22`.
Identify Gemm from Dot22
------------------------
This is complicated, done in GemmOptimizer.
Identify Dot22Scalar from Dot22
-------------------------------
Dot22 Ops that remain after the GemmOptimizer is done have not
qualified as GEMM Ops. Still they might be scaled by a factor, in
which case we use Dot22Scalar which is like Gemm, but without the b
and the Z. In the future it would be good to merge this into the
GemmOptimizer.
Specialize Gemm to Gemv
-----------------------
If arguments to GEMM are dimshuffled vectors, then we can use GEMV
instead. This optimization is `local_gemm_to_gemv`.
"""
from __future__ import print_function
import copy
import logging
import os
import time
import numpy
import numpy.distutils
try:
import numpy.distutils.__config__
except ImportError:
pass
from six import iteritems
from six.moves import reduce, xrange
from theano import config
from theano.gof import (utils, Op, view_roots,
local_optimizer, Optimizer,
InconsistencyError, toolbox, SequenceDB,
EquilibriumOptimizer, Apply,
ReplacementDidntRemovedError)
from theano.printing import pprint, FunctionPrinter, debugprint
from theano.compile.mode import optdb
import theano.scalar
from theano.tensor import basic as T
from theano.tensor.blas_headers import blas_header_text
from theano.tensor.blas_headers import blas_header_version
from theano.tensor.opt import in2out, local_dimshuffle_lift
_logger = logging.getLogger('theano.tensor.blas')
try:
import scipy.linalg.blas
have_fblas = True
try:
fblas = scipy.linalg.blas.fblas
except AttributeError:
# A change merged in Scipy development version on 2012-12-02 replaced
# `scipy.linalg.blas.fblas` with `scipy.linalg.blas`.
# See http://github.com/scipy/scipy/pull/358
fblas = scipy.linalg.blas
_blas_gemv_fns = {numpy.dtype('float32'): fblas.sgemv,
numpy.dtype('float64'): fblas.dgemv,
numpy.dtype('complex64'): fblas.cgemv,
numpy.dtype('complex128'): fblas.zgemv}
except ImportError as e:
have_fblas = False
# This is used in Gemv and ScipyGer. We use CGemv and CGer
# when theano.config.blas.ldflags is defined. So we don't need a
# warning in that case.
if not config.blas.ldflags:
_logger.warning('Failed to import scipy.linalg.blas, and '
'Theano flag blas.ldflags is empty. '
'Falling back on slower implementations for '
'dot(matrix, vector), dot(vector, matrix) and '
'dot(vector, vector) (%s)',
str(e))
# If check_init_y() == True we need to initialize y when beta == 0.
def check_init_y():
if check_init_y._result is None:
if not have_fblas:
check_init_y._result = False
y = float('NaN') * numpy.ones((2,))
x = numpy.ones((2,))
A = numpy.ones((2, 2))
gemv = _blas_gemv_fns[y.dtype]
gemv(1.0, A.T, x, 0.0, y, overwrite_y=True, trans=True)
check_init_y._result = numpy.isnan(y).any()
return check_init_y._result
check_init_y._result = None
class Gemv(Op):
"""
expression is beta * y + alpha * A x
A is matrix
x, y are vectors
alpha, beta are scalars
output is a vector that can be inplace on y
"""
__props__ = ("inplace",)
def __init__(self, inplace):
self.inplace = inplace
if inplace:
self.destroy_map = {0: [0]}
def __str__(self):
if self.inplace:
return '%s{inplace}' % self.__class__.__name__
else:
return '%s{no_inplace}' % self.__class__.__name__
def make_node(self, y, alpha, A, x, beta):
y = T.as_tensor_variable(y)
x = T.as_tensor_variable(x)
A = T.as_tensor_variable(A)
alpha = T.as_tensor_variable(alpha)
beta = T.as_tensor_variable(beta)
if y.dtype != A.dtype or y.dtype != x.dtype:
raise TypeError('Gemv requires matching dtypes',
(y.dtype, A.dtype, x.dtype))
if A.ndim != 2:
raise TypeError('gemv requires matrix for A', A.type)
if x.ndim != 1:
raise TypeError('gemv requires vector for x', x.type)
if y.ndim != 1:
raise TypeError('gemv requires vector for y', y.type)
return Apply(self, [y, alpha, A, x, beta], [y.type()])
def perform(self, node, inputs, out_storage):
y, alpha, A, x, beta = inputs
if (have_fblas and y.shape[0] != 0 and x.shape[0] != 0 and
y.dtype in _blas_gemv_fns):
gemv = _blas_gemv_fns[y.dtype]
if (A.shape[0] != y.shape[0] or A.shape[1] != x.shape[0]):
raise ValueError(
'Incompatible shapes for gemv '
'(beta * y + alpha * dot(A, x)). y: %s, A: %s, x: %s '
% (y.shape, A.shape, x.shape))
if beta == 0 and check_init_y():
y.fill(0)
# Here I suppose that A is in c order. If we don't make it
# explicitly as fortran order, scipy 0.7.2 seam to create
# a copy in fortran order instead of just reshaping it
# and using the trans flag.
# If A is already in fortran order, make it in c order and using the
# trans flag don't seam to cause slowdown.
# out_storage[0][0] = gemv(alpha, A, x, beta, y,
# overwrite_y=self.inplace)
out_storage[0][0] = gemv(alpha, A.T, x, beta, y,
overwrite_y=self.inplace, trans=True)
else:
out = numpy.dot(A, x)
if alpha != 1:
out *= alpha
if beta != 0:
if beta != 1:
out += beta * y
else:
out += y
out_storage[0][0] = numpy.asarray(out, dtype=y.dtype)
def infer_shape(self, node, input_shapes):
return [input_shapes[0]]
gemv_no_inplace = Gemv(inplace=False)
gemv_inplace = Gemv(inplace=True)
# For the user interface. Opt will make them inplace later
gemv = gemv_no_inplace
class Ger(Op):
"""
BLAS defines general rank-1 update GER as A <- A + alpha x y'
for matrix A, scalar alpha, vectors x and y.
This interface to GER allows non-destructive operation on A via the
`destructive` argument to the constructor.
:TODO: Create better classes ScipyGer and CGer that inherit from this class
and override the make_thunk() method to use Scipy and C respectively.
"""
__props__ = ("destructive",)
def __init__(self, destructive):
self.destructive = destructive
if destructive:
self.destroy_map = {0: [0]}
def __str__(self):
if self.destructive:
return '%s{destructive}' % self.__class__.__name__
else:
return '%s{non-destructive}' % self.__class__.__name__
def make_node(self, A, alpha, x, y):
A = T.as_tensor_variable(A)
y = T.as_tensor_variable(y)
x = T.as_tensor_variable(x)
alpha = T.as_tensor_variable(alpha)
if len(set([A.dtype, alpha.dtype, x.dtype, y.dtype])) != 1:
raise TypeError('ger requires matching dtypes',
(A.dtype, alpha.dtype, x.dtype, y.dtype))
if alpha.ndim != 0:
raise TypeError('ger requires scalar alpha', alpha.type)
if A.ndim != 2:
raise TypeError('ger requires matrix for A', A.type)
if x.ndim != 1:
raise TypeError('ger requires vector for x', x.type)
if y.ndim != 1:
raise TypeError('ger requires vector for y', y.type)
if x.dtype not in ('float32', 'float64', 'complex64', 'complex128'):
raise TypeError('only float and complex types supported', x.dtype)
return Apply(self, [A, alpha, x, y], [A.type()])
def perform(self, node, inp, out):
cA, calpha, cx, cy = inp
cZ, = out
if self.destructive:
A = cA
else:
A = cA.copy()
if calpha != 1:
A += calpha * numpy.outer(cx, cy)
else:
A += numpy.outer(cx, cy)
cZ[0] = A
def infer_shape(self, node, input_shapes):
return [input_shapes[0]]
ger = Ger(destructive=False)
ger_destructive = Ger(destructive=True)
def ldflags(libs=True, flags=False, libs_dir=False, include_dir=False):
"""Extract a list of compilation flags from config.blas.ldflags.
Depending on the options, different type of flags will be kept.
It returns a list of libraries against which an Op's object file
should be linked to benefit from a BLAS implementation.
Parameters
----------
libs : bool, optional
Extract flags starting with "-l" (the default is True).
libs_dir : bool, optional
Extract flags starting with "-L" (the default is False).
include_dir : bool, optional
Extract flags starting with "-I" (the default is False).
flags: bool, optional
Extract all the other flags (the default is False).
Returns
-------
list of strings
Extracted flags.
"""
ldflags_str = theano.config.blas.ldflags
return _ldflags(ldflags_str=ldflags_str,
libs=libs,
flags=flags,
libs_dir=libs_dir,
include_dir=include_dir)
@utils.memoize
def _ldflags(ldflags_str, libs, flags, libs_dir, include_dir):
"""Extract list of compilation flags from a string.
Depending on the options, different type of flags will be kept.
Parameters
----------
ldflags_str : string
The string to process. Typically, this will be the content of
`theano.config.blas.ldflags`.
libs : bool
Extract flags starting with "-l".
flags: bool
Extract all the other flags.
libs_dir: bool
Extract flags starting with "-L".
include_dir: bool
Extract flags starting with "-I".
Returns
-------
list of strings
Extracted flags.
"""
rval = []
if libs_dir:
found_dyn = False
dirs = [x[2:] for x in ldflags_str.split()
if x.startswith('-L')]
l = _ldflags(ldflags_str=ldflags_str, libs=True,
flags=False, libs_dir=False, include_dir=False)
for d in dirs:
for f in os.listdir(d):
if (f.endswith('.so') or f.endswith('.dylib') or
f.endswith('.dll')):
if any([f.find(ll) >= 0 for ll in l]):
found_dyn = True
if not found_dyn and dirs:
_logger.warning(
"We did not found a dynamic library into the "
"library_dir of the library we use for blas. If you use "
"ATLAS, make sure to compile it with dynamics library.")
for t in ldflags_str.split():
# Remove extra quote.
if t.startswith("'") or t.startswith('"'):
t = t[1:]
if t.endswith("'") or t.endswith('"'):
t = t[:-1]
try:
t0, t1, t2 = t[0:3]
assert t0 == '-'
except Exception:
raise ValueError('invalid token "%s" in ldflags_str: "%s"'
% (t, ldflags_str))
if libs_dir and t1 == 'L':
rval.append(t[2:])
elif include_dir and t1 == 'I':
raise ValueError('Include dirs are not used for blas. We disable'
' this as this can hide other headers and this'
' is not wanted.', t)
rval.append(t[2:])
elif libs and t1 == 'l': # example -lmkl
rval.append(t[2:])
elif flags and t1 not in ['L', 'I', 'l']: # example -openmp
rval.append(t)
elif flags and t1 == 'L':
# to find it when we load the compiled op if the env of the
# used is not well configured.
rval.append('-Wl,-rpath,' + t[2:])
return rval
class GemmRelated(Op):
"""Base class for Gemm and Dot22.
This class provides a kind of templated gemm Op.
"""
__props__ = ()
def c_support_code(self):
# return cblas_header_text()
mod_str = """
#ifndef MOD
#define MOD %
#endif
static double time_time() // a time function like time.time()
{
struct timeval tv;
gettimeofday(&tv, 0);
return (double) tv.tv_sec + (double) tv.tv_usec / 1000000.0;
}
"""
return blas_header_text() + mod_str
def c_headers(self):
# std.cout doesn't require the '%' symbol to print stuff...
# so it works much better with python's string-substitution stuff.
return ['<iostream>', '<time.h>', '<sys/time.h>']
def c_libraries(self):
return ldflags()
# code_cache_version is built by subclasses from
# build_gemm_version
def c_compile_args(self):
return ldflags(libs=False, flags=True)
def c_lib_dirs(self):
return ldflags(libs=False, libs_dir=True)
def c_header_dirs(self):
return ldflags(libs=False, include_dir=True)
declare_NS = """
int unit = 0;
int type_num = PyArray_DESCR(%(_x)s)->type_num;
int type_size = PyArray_DESCR(%(_x)s)->elsize; // in bytes
npy_intp* Nx = PyArray_DIMS(%(_x)s);
npy_intp* Ny = PyArray_DIMS(%(_y)s);
npy_intp* Nz = 0; //PyArray_DIMS(%(_zout)s);
npy_intp* Sx = PyArray_STRIDES(%(_x)s);
npy_intp* Sy = PyArray_STRIDES(%(_y)s);
npy_intp* Sz = 0; //PyArray_STRIDES(%(_zout)s);
//strides for x, y, z in dimensions 0, 1
int sx_0, sx_1, sy_0, sy_1, sz_0, sz_1;
"""
# setup_z_Nz_Sz = None
check_xyz_rank2 = """
if (PyArray_NDIM(%(_x)s) != 2) {
PyErr_Format(PyExc_NotImplementedError,
"rank(x) != 2. rank(x) is %%d.",
PyArray_NDIM(%(_x)s));
%(fail)s;
}
if (PyArray_NDIM(%(_y)s) != 2) {
PyErr_Format(PyExc_NotImplementedError,
"rank(y) != 2. rank(y) is %%d.", PyArray_NDIM(%(_y)s));
%(fail)s;
}
if (%(_zout)s && PyArray_NDIM(%(_zout)s) != 2) {
PyErr_Format(PyExc_NotImplementedError,
"rank(z) != 2. rank(z) is %%d.", PyArray_NDIM(%(_zout)s));
%(fail)s;
}
"""
check_xyz_double_or_float = """
if ((PyArray_DESCR(%(_x)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_x)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(x) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(%(_y)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_y)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(y) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(%(_zout)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_zout)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(z) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(%(_x)s)->type_num != PyArray_DESCR(%(_y)s)->type_num)
||(PyArray_DESCR(%(_x)s)->type_num != PyArray_DESCR(%(_zout)s)->type_num))
{ PyErr_SetString(PyExc_NotImplementedError, "type(x), type(y), type(z) are not all the same"); %(fail)s; }
"""
# it is not necessary that a or b have the same type as x,y,z
check_ab_double_or_float = """
if ((PyArray_DESCR(%(_a)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_a)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(a) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(%(_b)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_b)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(b) is not double or float"); %(fail)s;}
"""
check_dims = """
if (Nx[0] != Nz[0])
{
PyErr_Format(PyExc_ValueError,
"Shape mismatch: x has %%ld rows but z has %%ld rows",
(long int)Nx[0], (long int)Nz[0]);
%(fail)s;
}
if (Nx[1] != Ny[0])
{
PyErr_Format(PyExc_ValueError,
"Shape mismatch: x has %%ld cols (and %%ld rows) but y has %%ld rows (and %%ld cols)",
(long int)Nx[1], (long int)Nx[0], (long int)Ny[0], (long int)Ny[1]);
%(fail)s;
}
if (Ny[1] != Nz[1])
{
PyErr_Format(PyExc_ValueError,
"Shape mismatch: y has %%ld cols but z has %%ld cols",
(long int)Ny[1], (long int)Nz[1]);
%(fail)s;
}
// We must not raise an error when Nx[1] == 0. This would disable cases
// that numpy.dot accept.
"""
check_strides = """
/*
If some matrices are not contiguous on either dimensions,
or have invalid strides, copy their content into a contiguous one
*/
if ((Sx[0] < 1) || (Sx[1] < 1) || (Sx[0] MOD type_size) || (Sx[1] MOD type_size)
|| ((Sx[0] != type_size) && (Sx[1] != type_size)))
{
PyArrayObject * _x_copy = (PyArrayObject *) PyArray_Copy(%(_x)s);
if (!_x_copy)
%(fail)s
Py_XDECREF(%(_x)s);
%(_x)s = _x_copy;
Sx = PyArray_STRIDES(%(_x)s);
}
if ((Sy[0] < 1) || (Sy[1] < 1) || (Sy[0] MOD type_size) || (Sy[1] MOD type_size)
|| ((Sy[0] != type_size) && (Sy[1] != type_size)))
{
PyArrayObject * _y_copy = (PyArrayObject *) PyArray_Copy(%(_y)s);
if (!_y_copy)
%(fail)s
Py_XDECREF(%(_y)s);
%(_y)s = _y_copy;
Sy = PyArray_STRIDES(%(_y)s);
}
if ((Sz[0] < 1) || (Sz[1] < 1) || (Sz[0] MOD type_size) || (Sz[1] MOD type_size)
|| ((Sz[0] != type_size) && (Sz[1] != type_size)))
{
PyArrayObject * _z_copy = (PyArrayObject *) PyArray_Copy(%(_zout)s);
if (!_z_copy)
%(fail)s
Py_XDECREF(%(_zout)s);
%(_zout)s = _z_copy;
Sz = PyArray_STRIDES(%(_zout)s);
}
"""
encode_strides_in_unit = """
/*
encode the stride structure of _x,_y,_zout into a single integer
*/
unit |= ((Sx[1] == type_size || Nx[1]==1) ? 0x0 : (Sx[0] == type_size || Nx[0]==1) ? 0x1 : 0x2) << 8;
unit |= ((Sy[1] == type_size || Ny[1]==1) ? 0x0 : (Sy[0] == type_size || Ny[0]==1) ? 0x1 : 0x2) << 4;
unit |= ((Sz[1] == type_size || Nz[1]==1) ? 0x0 : (Sz[0] == type_size || Nz[0]==1) ? 0x1 : 0x2) << 0;
"""
compute_strides = """
/* create appropriate strides for malformed matrices that are row or column
* vectors, or empty matrices.
* In that case, the value of the stride does not really matter, but
* some versions of BLAS insist that:
* - they are not smaller than the number of elements in the array,
* - they are not 0.
*/
sx_0 = (Nx[0] > 1) ? Sx[0]/type_size : (Nx[1] + 1);
sx_1 = (Nx[1] > 1) ? Sx[1]/type_size : (Nx[0] + 1);
sy_0 = (Ny[0] > 1) ? Sy[0]/type_size : (Ny[1] + 1);
sy_1 = (Ny[1] > 1) ? Sy[1]/type_size : (Ny[0] + 1);
sz_0 = (Nz[0] > 1) ? Sz[0]/type_size : (Nz[1] + 1);
sz_1 = (Nz[1] > 1) ? Sz[1]/type_size : (Nz[0] + 1);
"""
begin_switch_typenum = """
switch (type_num)
{
"""
case_float = """
case NPY_FLOAT:
{
"""
# case_float_ab_constants = None
case_float_gemm = """
float* x = (float*)PyArray_DATA(%(_x)s);
float* y = (float*)PyArray_DATA(%(_y)s);
float* z = (float*)PyArray_DATA(%(_zout)s);
char N = 'N';
char T = 'T';
int Nz0 = Nz[0], Nz1 = Nz[1], Nx1 = Nx[1];
//std::cerr << (unit/256) MOD 16 << (unit / 16) MOD 16 << unit MOD 16<< '\\n';
//double t0 = time_time();
switch(unit)
{
case 0x000: sgemm_(&N, &N, &Nz1, &Nz0, &Nx1, &a, y, &sy_0, x, &sx_0, &b, z, &sz_0); break;
case 0x100: sgemm_(&N, &T, &Nz1, &Nz0, &Nx1, &a, y, &sy_0, x, &sx_1, &b, z, &sz_0); break;
case 0x010: sgemm_(&T, &N, &Nz1, &Nz0, &Nx1, &a, y, &sy_1, x, &sx_0, &b, z, &sz_0); break;
case 0x110: sgemm_(&T, &T, &Nz1, &Nz0, &Nx1, &a, y, &sy_1, x, &sx_1, &b, z, &sz_0); break;
case 0x001: sgemm_(&T, &T, &Nz0, &Nz1, &Nx1, &a, x, &sx_0, y, &sy_0, &b, z, &sz_1); break;
case 0x101: sgemm_(&N, &T, &Nz0, &Nz1, &Nx1, &a, x, &sx_1, y, &sy_0, &b, z, &sz_1); break;
case 0x011: sgemm_(&T, &N, &Nz0, &Nz1, &Nx1, &a, x, &sx_0, y, &sy_1, &b, z, &sz_1); break;
case 0x111: sgemm_(&N, &N, &Nz0, &Nz1, &Nx1, &a, x, &sx_1, y, &sy_1, &b, z, &sz_1); break;
default: PyErr_SetString(PyExc_ValueError, "some matrix has no unit stride"); %(fail)s;
};
//fprintf(stderr, "Calling sgemm %%i %%i %%i %%i took %%f\\n", unit, Nz1, Nz0, Nx1, time_time() - t0);
"""
case_double = """
}
break;
case NPY_DOUBLE:
{
"""
# case_double_ab_constants = None
case_double_gemm = """
double* x = (double*)PyArray_DATA(%(_x)s);
double* y = (double*)PyArray_DATA(%(_y)s);
double* z = (double*)PyArray_DATA(%(_zout)s);
char N = 'N';
char T = 'T';
int Nz0 = Nz[0], Nz1 = Nz[1], Nx1 = Nx[1];
//std::cerr << (unit/256) MOD 16 << (unit / 16) MOD 16 << unit MOD 16<< '\\n';
//double t0 = time_time();
//fprintf(stderr, "unit=%%x N= %%i %%i %%i S = %%i %%i %%i %%i %%i %%i\\n", unit,
//Nz1, Nz0, Nx1,
//sy_0, sy_1,
//sx_0, sx_1,
//sz_0, sz_1
//);
switch(unit)
{
case 0x000: dgemm_(&N, &N, &Nz1, &Nz0, &Nx1, &a, y,
&sy_0, x, &sx_0, &b, z, &sz_0); break;
case 0x100: dgemm_(&N, &T, &Nz1, &Nz0, &Nx1, &a, y,
&sy_0, x, &sx_1, &b, z, &sz_0); break;
case 0x010: dgemm_(&T, &N, &Nz1, &Nz0, &Nx1, &a, y,
&sy_1, x, &sx_0, &b, z, &sz_0); break;
case 0x110: dgemm_(&T, &T, &Nz1, &Nz0, &Nx1, &a, y,
&sy_1, x, &sx_1, &b, z, &sz_0); break;
case 0x001: dgemm_(&T, &T, &Nz0, &Nz1, &Nx1, &a, x,
&sx_0, y, &sy_0, &b, z, &sz_1); break;
case 0x101: dgemm_(&N, &T, &Nz0, &Nz1, &Nx1, &a, x,
&sx_1, y, &sy_0, &b, z, &sz_1); break;
case 0x011: dgemm_(&T, &N, &Nz0, &Nz1, &Nx1, &a, x,
&sx_0, y, &sy_1, &b, z, &sz_1); break;
case 0x111: dgemm_(&N, &N, &Nz0, &Nz1, &Nx1, &a, x,
&sx_1, y, &sy_1, &b, z, &sz_1); break;
default: PyErr_SetString(PyExc_ValueError,
"some matrix has no unit stride");
%(fail)s;
};
//fprintf(stderr, "Calling dgemm %%i %%i %%i %%i took %%f\\n",
// unit, Nz1, Nz0, Nx1, time_time()- t0);
"""
end_switch_typenum = """
}
break;
}
"""
def build_gemm_call(self):
return reduce(str.__add__, (
self.declare_NS,
self.check_xyz_rank2,
self.setup_z_Nz_Sz,
self.check_xyz_double_or_float,
self.check_ab_double_or_float,
self.check_dims,
self.check_strides,
self.encode_strides_in_unit,
self.compute_strides,
self.begin_switch_typenum,
self.case_float,
self.case_float_ab_constants,
self.case_float_gemm,
self.case_double,
self.case_double_ab_constants,
self.case_double_gemm,
self.end_switch_typenum), '')
def build_gemm_version(self):
return (13, blas_header_version())
class Gemm(GemmRelated):
"""In-place version of matrix-matrix multiplication (with accumulation).
When a and b are scalars and x, y, and z are matrices, then
gemm(z,a,x,y,b)
is similar to
b*z + a*dot(x,y)
The difference between the two is that the top form is destructive
on z, whereas the bottom form is not. Gemm works in-place on the
storage associated with z, and the L{Variable} returned by Gemm
has a storage that will be aliased to the storage of the z
argument. Because of this in-place computation, an L{Apply} of
this op will destroy the L{Variable} z on which it operates. (See
L{DestructiveOps} for an explanation of what destroying means in
the context of theano graphs. See L{BlasLapackSupport} for more
optimized linear algebra operations.)
"""
E_rank = 'gemm only works for rank 2'
E_scalar = 'gemm requires scalar argument'
E_z_uniq = 'argument z aliased to x or y' # TODO: justify / delete this
E_mixed = 'gemm requires matching dtypes'
E_float = 'gemm requires floating-point dtypes'
__props__ = ('inplace',)
def __init__(self, inplace):
self.inplace = inplace
if self.inplace:
self.destroy_map = {0: [0]}
self.setup_z_Nz_Sz = self.setup_z_Nz_Sz_inplace
else:
self.setup_z_Nz_Sz = self.setup_z_Nz_Sz_outplace
def __str__(self):
if self.inplace:
inplace_str = 'inplace'
else:
inplace_str = 'no_inplace'
return '%s{%s}' % (self.__class__.__name__, inplace_str)
def __setstate__(self, dct):
self.__dict__.update(dct)
if self.inplace:
self.setup_z_Nz_Sz = self.setup_z_Nz_Sz_inplace
else:
self.setup_z_Nz_Sz = self.setup_z_Nz_Sz_outplace
# Correctly reload older pickles where _op_use_c_code and
# destroy_map were not saved
if '_op_use_c_code' not in self.__dict__:
self._op_use_c_code = theano.config.cxx
if 'destroy_map' not in self.__dict__ and self.inplace:
self.destroy_map = {0: [0]}
def __getstate__(self):
rval = self.__dict__.copy()
# Do not serialize the setup code, it will be restored in __setstate__
# depending on the value of 'inplace'
rval.pop('setup_z_Nz_Sz')
return rval
def make_node(self, *inputs):
inputs = list(map(T.as_tensor_variable, inputs))
if len(inputs) != 5:
raise TypeError(
"Wrong number of inputs for %s (expected 5, got %s)" %
(self, len(inputs)))
z, a, x, y, b = inputs
# For the consistency check we don't want z to be a cached constant.
if getattr(z, 'cached', False):
z = copy.copy(z)
zr, xr, yr = [set(view_roots(i)) for i in (z, x, y)]
# We want the gemm to be inplace. When this op is inplace, it
# declare to be inplace only on z. So to make it safe, we
# raise an error if z can be a view on x or y.
# I don't know if Theano currently can support that case. As
# this case don't happen in our code, I won't spent time
# investigating this. So the assert is for safety. I also
# think there is another mechanism that would prevent this,
# but I don't what to modify old code and have chance to break
# something.
if zr.intersection(xr):
raise InconsistencyError(Gemm.E_z_uniq, (z, x))
if zr.intersection(yr):
raise InconsistencyError(Gemm.E_z_uniq, (z, y))
if z.ndim != 2:
raise TypeError(Gemm.E_rank, z)
if a.ndim != 0:
raise TypeError(Gemm.E_scalar, a)
if x.ndim != 2:
raise TypeError(Gemm.E_rank, x)
if y.ndim != 2:
raise TypeError(Gemm.E_rank, y)
if b.ndim != 0:
raise TypeError(Gemm.E_scalar, b)
if not (z.dtype == a.dtype == x.dtype == y.dtype == b.dtype):
raise TypeError(Gemm.E_mixed,
(z.dtype, a.dtype, x.dtype, y.dtype, b.dtype))
if (not z.dtype.startswith('float') and
not z.dtype.startswith('complex')):
raise TypeError(Gemm.E_float, (z.dtype))
output = z.type()
return Apply(self, inputs, [output])
def perform(self, node, inp, out):
z, a, x, y, b = inp
zout, = out
assert a.shape == ()
assert b.shape == ()
if not self.inplace:
z = z.copy() # the original z will not be changed
if z.shape == ():
z.itemset(z * a + b * numpy.dot(x, y))
zout[0] = z
else:
if b == 0.0:
if a == 1.0:
z[:] = numpy.dot(x, y)
elif a == -1.0:
z[:] = -numpy.dot(x, y)
else:
z[:] = a * numpy.dot(x, y)
elif b == 1.0:
if a == 1.0:
z += numpy.dot(x, y)
elif a == -1.0:
z -= numpy.dot(x, y)
else:
z += a * numpy.dot(x, y)
else:
z *= b
z += a * numpy.dot(x, y)
zout[0] = z
def infer_shape(self, node, input_shapes):
return [input_shapes[0]]
setup_z_Nz_Sz_inplace = """
if (%(_zout)s != %(_z)s)
{
if (%(_zout)s)
{
Py_DECREF(%(_zout)s);
}
%(_zout)s = %(_z)s;
Py_INCREF(%(_zout)s);
}
Nz = PyArray_DIMS(%(_z)s);
Sz = PyArray_STRIDES(%(_z)s);
"""
setup_z_Nz_Sz_outplace = """
if ((NULL == %(_zout)s)
|| (PyArray_DIMS(%(_zout)s)[0] != PyArray_DIMS(%(_z)s)[0])
|| (PyArray_DIMS(%(_zout)s)[1] != PyArray_DIMS(%(_z)s)[1])
|| (PyArray_STRIDES(%(_zout)s)[0] <= 0)
|| (PyArray_STRIDES(%(_zout)s)[1] <= 0)
|| (PyArray_STRIDES(%(_zout)s)[0] MOD type_size)
|| (PyArray_STRIDES(%(_zout)s)[1] MOD type_size)
|| ((PyArray_STRIDES(%(_zout)s)[0] != type_size)
&& (PyArray_STRIDES(%(_zout)s)[1] != type_size)))
{
Py_XDECREF(%(_zout)s);
npy_intp dims[2];
dims[0] = PyArray_DIMS(%(_z)s)[0];
dims[1] = PyArray_DIMS(%(_z)s)[1];
%(_zout)s = (PyArrayObject*)PyArray_SimpleNew(2, dims,
PyArray_TYPE(%(_z)s));
//fprintf(stderr, "Gemm Allocating %%i %%i\\n", dims[0], dims[1]);
if(!%(_zout)s) {
PyErr_SetString(PyExc_MemoryError,
"failed to alloc gemm_no_inplace output");
%(fail)s
}
}
Nz = PyArray_DIMS(%(_zout)s);
Sz = PyArray_STRIDES(%(_zout)s);
if (PyArray_DESCR(%(_zout)s)->type_num == NPY_FLOAT)
{
float * zoutdata = (float*)PyArray_DATA(%(_zout)s);
int zoi = Sz[0] / sizeof(float);
int zoj = Sz[1] / sizeof(float);
const float * zdata = (float*)PyArray_DATA(%(_z)s);
int zi = PyArray_STRIDES(%(_z)s)[0]/sizeof(float);
int zj = PyArray_STRIDES(%(_z)s)[1]/sizeof(float);
for (int i = 0; i < Nz[0]; ++i)
{
for (int j = 0; j < Nz[1]; ++j)
{
zoutdata[zoi*i + zoj*j] = zdata[zi*i + zj*j];
}
}
}
else if (PyArray_DESCR(%(_zout)s)->type_num == NPY_DOUBLE)
{
double * zoutdata = (double*) PyArray_DATA(%(_zout)s);
int zoi = Sz[0] / sizeof(double);
int zoj = Sz[1] / sizeof(double);
const double * zdata = (double*)PyArray_DATA(%(_z)s);
int zi = PyArray_STRIDES(%(_z)s)[0]/sizeof(double);
int zj = PyArray_STRIDES(%(_z)s)[1]/sizeof(double);
for (int i = 0; i < Nz[0]; ++i)
{
for (int j = 0; j < Nz[1]; ++j)
{
zoutdata[zoi*i + zoj*j] = zdata[zi*i + zj*j];
}
}
}
else
{
PyErr_SetString(PyExc_AssertionError,
"neither float nor double dtype");
%(fail)s
}
"""
case_float_ab_constants = """
#define REAL float
float a = (PyArray_DESCR(%(_a)s)->type_num == NPY_FLOAT)
? (REAL)(((float*)PyArray_DATA(%(_a)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_a)s))[0]);
float b = (PyArray_DESCR(%(_b)s)->type_num == NPY_FLOAT) ?
(REAL)(((float*)PyArray_DATA(%(_b)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_b)s))[0]);
#undef REAL
"""
case_double_ab_constants = """
#define REAL double
double a = (PyArray_DESCR(%(_a)s)->type_num == NPY_FLOAT)
? (REAL)(((float*)PyArray_DATA(%(_a)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_a)s))[0]);
double b = (PyArray_DESCR(%(_b)s)->type_num == NPY_FLOAT) ?
(REAL)(((float*)PyArray_DATA(%(_b)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_b)s))[0]);
#undef REAL
"""
def c_code(self, node, name, inp, out, sub):
_z, _a, _x, _y, _b = inp
_zout, = out
if node.inputs[0].type.dtype.startswith('complex'):
raise utils.MethodNotDefined('%s.c_code'
% self.__class__.__name__)
if not config.blas.ldflags:
return super(Gemm, self).c_code(node, name,
(_z, _a, _x, _y, _b), (_zout, ),
sub)
full_code = self.build_gemm_call() % dict(locals(), **sub)
return full_code
def c_code_cache_version(self):
gv = self.build_gemm_version()
if gv:
return (5,) + gv
else:
return gv
gemm_inplace = Gemm(inplace=True)
gemm_no_inplace = Gemm(inplace=False)
# For the user interface. Theano optimization will make them inplace
gemm = gemm_no_inplace
pprint.assign(gemm_inplace, FunctionPrinter('gemm_inplace'))
pprint.assign(gemm_no_inplace, FunctionPrinter('gemm_no_inplace'))
def res_is_a(node, op, maxclients=None):
if maxclients is not None:
retval = (len(node.clients) <= maxclients)
else:
retval = True
return (node.owner and
node.owner.op == op and
retval)
def _as_scalar(res, dtype=None):
"""Return None or a TensorVariable whose type is in T.float_scalar_types"""
if dtype is None:
dtype = config.floatX
if numpy.all(res.type.broadcastable):
while res.owner and isinstance(res.owner.op, T.DimShuffle):
res = res.owner.inputs[0]
# may still have some number of True's
if res.type.broadcastable:
rval = res.dimshuffle()
else:
rval = res
if rval.type.dtype[:3] in ('int', 'uin'):
# We check that the upcast of res and dtype won't change dtype.
# If dtype is float64, we will cast int64 to float64.
# This is valid when res is a scalar used as input to a dot22
# as the cast of the scalar can be done before or after the dot22
# and this will give the same result.
if theano.scalar.upcast(res.dtype, dtype) == dtype:
return T.cast(rval, dtype)
else:
return None
return rval
def _is_real_matrix(res):
return (res.type.dtype in ('float32', 'float64') and
res.type.ndim == 2 and
res.type.broadcastable[0] is False and
res.type.broadcastable[1] is False) # cope with tuple vs. list
def _is_real_vector(res):
return (res.type.dtype in ('float32', 'float64') and
res.type.ndim == 1 and
res.type.broadcastable[0] is False)
def _beta_L_plus_alpha_M(beta, L, alpha, M, recurse_flip=True):
# print 'BETA L + ALPHA M', beta, L, alpha, M, recurse_flip
# EXPRESSION: (beta * L) + (alpha * M)
# we've already checked the client counts, now just make the type check.
# if res_is_a(M, _dot22, 1):
if M.owner and M.owner.op == _dot22:
Ml, Mr = M.owner.inputs
rval = [gemm_no_inplace(L, alpha, Ml, Mr, beta)]
# print 'GEMM 0', rval, beta, L, alpha, M
return rval, M
# it also might be the case that there is a dimshuffle between the +
# and the dot22. local_dot_to_dot22 in particular will put in such things.
if (M.owner and isinstance(M.owner.op, T.DimShuffle) and
M.owner.inputs[0].owner and
isinstance(M.owner.inputs[0].owner.op, Dot22)):
MM = M.owner.inputs[0]
if M.owner.op.new_order == (0,):
# it is making a column MM into a vector
MMl, MMr = MM.owner.inputs
g = gemm_no_inplace(L.dimshuffle(0, 'x'),
alpha, MMl, MMr, beta)
rval = [g.dimshuffle(0)]
return rval, MM
if M.owner.op.new_order == (1,):
# it is making a row MM into a vector
MMl, MMr = MM.owner.inputs
g = gemm_no_inplace(L.dimshuffle('x', 0),
alpha, MMl, MMr, beta)
rval = [g.dimshuffle(1)]
return rval, MM
if len(M.owner.op.new_order) == 0:
# it is making a row MM into a vector
MMl, MMr = MM.owner.inputs
g = gemm_no_inplace(L.dimshuffle('x', 'x'),
alpha, MMl, MMr, beta)
rval = [g.dimshuffle()]
return rval, MM
# this is False'd out because of inadequate testing.
# TODO see ticket #237
if False and res_is_a(M, gemm_no_inplace, 1):
# EXPRESSION: (beta * L) + (alpha * (gemm_no_inplace(G, a, u, v, b)))
# EXPRESSION: (beta * L) + alpha * (b * G) + alpha * a * dot(u, v)
G, a, u, v, b = M.owner.inputs
# print 'GEMM', G, L
if res_is_a(G, _dot22, 1):
# EXPRESSION: (beta * L) +
# (alpha * (gemm_no_inplace(dot(x,y), a, u, v, b)))
x, y = G.owner.inputs
# EXPRESSION: (beta * L) + (alpha * ((b*dot(x,y) +
# (a * dot(u, v)))))
# EXPRESSION: (beta * L) + (alpha*b*dot(x,y)) +
# (alpha * a * dot(u, v))
rval = [gemm_no_inplace(gemm_no_inplace(L, alpha * b, x, y, beta),
alpha * a, u, v, 1.0)]
return rval
if (G is L):
# EXPRESSION: (beta * L) + (alpha*b*L) + (alpha * a * dot(u, v))
rval = [gemm_no_inplace(L, alpha * a, u, v, alpha * b + beta)]
return rval
if (1.0 != alpha):
# at the very least, move the alpha inside the gemm_no_inplace
rval = [beta * L + gemm_no_inplace(G, alpha * a, u, v, alpha * b)]
return rval
if recurse_flip:
return _beta_L_plus_alpha_M(alpha, M, beta, L, recurse_flip=False)
else:
return False, False
def _gemm_canonicalize(r, scale, rval, maxclients):
# Tries to interpret node as a sum of scalars * (vectors or matrices)
def scaled(thing):
if scale == 1:
return thing
if scale == -1:
return -thing
else:
return scale * thing
try:
r.type.broadcastable
except Exception:
return None
if ((r.type.ndim not in (1, 2)) or
r.type.dtype not in ('float32', 'float64',
'complex64', 'complex128')):
rval.append(scaled(r))
return rval
if maxclients and len(getattr(r, 'clients', [])) > maxclients:
rval.append((scale, r))
return rval
if r.owner and r.owner.op == T.sub:
_gemm_canonicalize(r.owner.inputs[0], scale, rval, 1)
_gemm_canonicalize(r.owner.inputs[1], -scale, rval, 1)
elif r.owner and r.owner.op == T.add:
for i in r.owner.inputs:
_gemm_canonicalize(i, scale, rval, 1)
elif r.owner and r.owner.op == T.neg:
_gemm_canonicalize(r.owner.inputs[0], -scale, rval, 1)
elif r.owner and r.owner.op == T.mul:
scalars = []
vectors = []
matrices = []
for i in r.owner.inputs:
if numpy.all(i.type.broadcastable):
while i.owner and isinstance(i.owner.op, T.DimShuffle):
i = i.owner.inputs[0]
if i.type.broadcastable:
scalars.append(i.dimshuffle())
else:
scalars.append(i)
elif _is_real_vector(i):
vectors.append(i)
elif _is_real_matrix(i):
matrices.append(i)
else:
# just put the original arguments as in the base case
rval.append((scale, r))
return rval
if len(matrices) == 1:
assert len(vectors) == 0
m = matrices[0]
if len(scalars) == 0:
_gemm_canonicalize(m, scale, rval, 1)
elif len(scalars) == 1:
_gemm_canonicalize(m, scaled(scalars[0]), rval, 1)
else:
_gemm_canonicalize(m, T.mul(scaled(scalars[0]), *scalars[1:]),
rval, 1)
elif len(vectors) == 1:
assert len(matrices) == 0
v = vectors[0]
if len(scalars) == 0:
_gemm_canonicalize(v, scale, rval, 1)
elif len(scalars) == 1:
_gemm_canonicalize(v, scaled(scalars[0]), rval, 1)
else:
_gemm_canonicalize(v, T.mul(scaled(scalars[0]),
*scalars[1:]), rval, 1)
else: # lets not open this up
rval.append((scale, r))
else:
rval.append((scale, r))
return rval
def _factor_canonicalized(lst):
# remove duplicates from canonicalized list
# we only delete out of the right end of the list,
# once i has touched a list element, it is permantent
lst = list(lst)
# print 'FACTOR', lst
# for t in lst:
# if not isinstance(t, (list, tuple)):
# t = (t,)
# for e in t:
# try:
# theano.printing.debugprint(e)
# except TypeError:
# print e, type(e)
i = 0
while i < len(lst) - 1:
try:
s_i, M_i = lst[i]
except Exception:
i += 1
continue
j = i + 1
while j < len(lst):
try:
s_j, M_j = lst[j]
except Exception:
j += 1
continue
if M_i is M_j:
s_i = s_i + s_j
lst[i] = (s_i, M_i)
del lst[j]
else:
j += 1
i += 1
return lst
def _gemm_from_factored_list(lst):
"""
Returns None, or a list to replace node.outputs.
"""
lst2 = []
# Remove the tuple that can't be cast correctly.
# This can happen when we try to cast a complex to a real
for sM in lst:
# Make every pair in list have matching dtypes
# sM can be a tuple of 2 elements or a theano variable.
if isinstance(sM, tuple):
sm0, sm1 = sM
sm0 = T.as_tensor_variable(sm0)
if theano.scalar.upcast(sm0.dtype, sm1.dtype) == sm1.dtype:
lst2.append((T.cast(sm0, sm1.dtype), sM[1]))
lst = lst2
def item_to_var(t):
try:
s, M = t
except Exception:
return t
if s == 1:
return M
if s == -1:
return -M
return s * M
# Try every pair in the sM_list, trying to turn it into a gemm operation
for i in xrange(len(lst) - 1):
s_i, M_i = lst[i]
for j in xrange(i + 1, len(lst)):
s_j, M_j = lst[j]
if M_i.type != M_j.type:
continue
# print 'TRYING', (s_i, M_i, s_j, M_j)
gemm_of_sM_list, old_dot22 = _beta_L_plus_alpha_M(s_i, M_i,
s_j, M_j)
# print 'GOT IT', gemm_of_sM_list
if gemm_of_sM_list:
assert len(gemm_of_sM_list) == 1
add_inputs = [item_to_var(input)
for k, input in enumerate(lst) if k not in (i, j)]
add_inputs.extend(gemm_of_sM_list)
if len(add_inputs) > 1:
rval = [T.add(*add_inputs)]
else:
rval = add_inputs
# print "RETURNING GEMM THIGN", rval
return rval, old_dot22
def _gemm_from_node2(node):
"""
:todo: In many expressions, there are many ways to turn it into a
gemm. For example dot(a,b) + c + d. This function should
return all of them, so that if one version of gemm causes a
cycle in the graph, then another application of gemm can be
tried.
"""
lst = []
t0 = time.time()
_gemm_canonicalize(node.outputs[0], 1.0, lst, 0)
t1 = time.time()
# print "GEMM CANON", lst
if len(lst) > 1:
lst = _factor_canonicalized(lst)
t2 = time.time()
rval = _gemm_from_factored_list(lst)
t3 = time.time()
# It can happen that _factor_canonicalized and
# _gemm_from_factored_list return a node with an incorrect
# type. This happens in particular when one of the scalar
# factors forces the upcast of the whole expression. In that
# case, we simply skip that candidate for Gemm. This was
# discussed in
# http://groups.google.com/group/theano-dev/browse_thread/thread/a3096c82856e3ad5,
# but never made it into a trac ticket.
if rval and (rval[0][0].type == node.outputs[0].type):
return rval, t1 - t0, t2 - t1, t3 - t2
return None, t1 - t0, 0, 0
class GemmOptimizer(Optimizer):
"""Graph optimizer for inserting Gemm operations."""
def __init__(self):
Optimizer.__init__(self)
self.warned = False
def add_requirements(self, fgraph):
fgraph.attach_feature(toolbox.ReplaceValidate())
def apply(self, fgraph):
did_something = True
nb_iter = 0
nb_replacement = 0
nb_replacement_didn_t_remove = 0
nb_inconsistency_make = 0
nb_inconsistency_replace = 0
time_canonicalize = 0
time_factor_can = 0
time_factor_list = 0
time_toposort = 0
if fgraph.profile:
validate_before = fgraph.profile.validate_time
callbacks_before = fgraph.execute_callbacks_times.copy()
callback_before = fgraph.execute_callbacks_time
def on_import(new_node):
if new_node is not node:
nodelist.append(new_node)
u = theano.gof.opt.Updater(on_import, None, None)
fgraph.attach_feature(u)
while did_something:
nb_iter += 1
t0 = time.time()
nodelist = theano.gof.graph.io_toposort(fgraph.inputs, fgraph.outputs)
time_toposort += time.time() - t0
did_something = False
nodelist.reverse()
for node in nodelist:
if not (isinstance(node.op, T.Elemwise) and
isinstance(node.op.scalar_op,
(theano.scalar.Add, theano.scalar.Sub,
theano.scalar.Neg, theano.scalar.Mul))):
continue
if node not in fgraph.apply_nodes:
# This mean that we already removed this node from
# the graph
continue
try:
new_outputs, time1, time2, time3 = _gemm_from_node2(node)
time_canonicalize += time1
time_factor_can += time2
time_factor_list += time3
except InconsistencyError:
nb_inconsistency_make += 1
continue
if new_outputs:
new_outputs, old_dot22 = new_outputs
assert len(new_outputs) == len(node.outputs)
try:
fgraph.replace_all_validate_remove(
list(zip(node.outputs, new_outputs)),
[old_dot22],
reason='GemmOptimizer',
# For now we disable the warning as we know case
# that we need to fix.
warn=False, # warn=not self.warned
)
did_something = True
nb_replacement += 1
except InconsistencyError:
# TODO: retry other applications of gemm (see comment
# in _gemm_from_node)
nb_inconsistency_replace += 1
except ReplacementDidntRemovedError:
nb_replacement_didn_t_remove += 1
self.warned = True
fgraph.remove_feature(u)
if fgraph.profile:
validate_time = fgraph.profile.validate_time - validate_before
callback_time = fgraph.execute_callbacks_time - callback_before
callbacks_time = {}
for k, v in iteritems(fgraph.execute_callbacks_times):
if k in callbacks_before:
callbacks_time[k] = v - callbacks_before[k]
else:
callbacks_time[k] = v
else:
validate_time = None
callback_time = None
callbacks_time = {}
return (self, nb_iter, nb_replacement, nb_replacement_didn_t_remove,
nb_inconsistency_make, nb_inconsistency_replace,
time_canonicalize, time_factor_can,
time_factor_list, time_toposort,
validate_time, callback_time, callbacks_time,)
@staticmethod
def print_profile(stream, prof, level=0):
blanc = (' ' * level)
print(blanc, "GemmOptimizer", file=stream)
print(blanc, " nb_iter", prof[1], file=stream)
print(blanc, " nb_replacement", prof[2], file=stream)
print(blanc, " nb_replacement_didn_t_remove", prof[3], file=stream)
print(blanc, " nb_inconsistency_make", prof[4], file=stream)
print(blanc, " nb_inconsistency_replace", prof[5], file=stream)
print(blanc, " time_canonicalize", prof[6], file=stream)
print(blanc, " time_factor_can", prof[7], file=stream)
print(blanc, " time_factor_list", prof[8], file=stream)
print(blanc, " time_toposort", prof[9], file=stream)
print(blanc, " validate_time", prof[10], file=stream)
print(blanc, " callback_time", prof[11], file=stream)
if prof[11] > 1:
print(blanc, " callbacks_time", file=stream)
for i in sorted(iteritems(prof[12]), key=lambda a: a[1]):
if i[1] > 0:
print(i)
class Dot22(GemmRelated):
"""Compute a matrix-matrix product.
This is a specialization of the more general Dot().
"""
def make_node(self, x, y):
dtypes = ('float32', 'float64', 'complex64', 'complex128')
if x.type.ndim != 2 or x.type.dtype not in dtypes:
raise TypeError(x)
if y.type.ndim != 2 or y.type.dtype not in dtypes:
raise TypeError(y)
if y.type.dtype != x.type.dtype:
raise TypeError('dtype mismatch to Dot22')
bz = (x.type.broadcastable[0], y.type.broadcastable[1])
outputs = [T.tensor(x.type.dtype, bz)]
return Apply(self, [x, y], outputs)
def perform(self, node, inp, out):
x, y = inp
z, = out
try:
z[0] = numpy.asarray(numpy.dot(x, y))
except ValueError as e:
# The error raised by numpy has no shape information, we mean to
# add that
e.args = e.args + (x.shape, y.shape)
raise
def infer_shape(self, node, input_shapes):
return [[input_shapes[0][0], input_shapes[1][1]]]
setup_z_Nz_Sz = """
if ((NULL == %(_zout)s)
|| (PyArray_DIMS(%(_zout)s)[0] != PyArray_DIMS(%(_x)s)[0])
|| (PyArray_DIMS(%(_zout)s)[1] != PyArray_DIMS(%(_y)s)[1]))
{
if (NULL != %(_zout)s) Py_XDECREF(%(_zout)s);
npy_intp dims[2];
dims[0] = PyArray_DIMS(%(_x)s)[0];
dims[1] = PyArray_DIMS(%(_y)s)[1];
%(_zout)s = (PyArrayObject*)PyArray_SimpleNew(2, dims,
PyArray_TYPE(%(_x)s));
//fprintf(stderr, "Dot Allocating %%i %%i\\n", dims[0], dims[1]);
if(!%(_zout)s) {
PyErr_SetString(PyExc_MemoryError,
"failed to alloc dot22 output");
%(fail)s
}
}
Nz = PyArray_DIMS(%(_zout)s);
Sz = PyArray_STRIDES(%(_zout)s);
"""
check_ab_double_or_float = ""
case_float_ab_constants = """
float a = 1.0;
float b = 0.0;
"""
case_double_ab_constants = """
double a = 1.0;
double b = 0.0;
"""
def c_code(self, node, name, inp, out, sub): # DEBUG
_x, _y = inp
_zout, = out
if node.inputs[0].type.dtype.startswith('complex'):
raise utils.MethodNotDefined('%s.c_code'
% self.__class__.__name__)
if len(self.c_libraries()) <= 0:
return super(Dot22, self).c_code(node, name, (_x, _y),
(_zout, ), sub)
full_code = self.build_gemm_call() % dict(locals(), **sub)
return full_code
def c_code_cache_version(self):
gv = self.build_gemm_version()
if gv:
return (2,) + gv
else:
return gv
_dot22 = Dot22()
@local_optimizer([T.Dot])
def local_dot_to_dot22(node):
# This works for tensor.outer too because basic.outer is a macro that
# produces a dot(dimshuffle,dimshuffle) of form 4 below
if not isinstance(node.op, T.Dot):
return
x, y = node.inputs
if y.type.dtype != x.type.dtype:
# TODO: upcast one so the types match
_logger.info('Not optimizing dot with inputs %s %s %s %s',
x, y, x.type, y.type)
return
if y.type.dtype in ['float32', 'float64', 'complex64', 'complex128']:
if x.ndim == 2 and y.ndim == 2:
# print "local_dot_to_dot22: MM"
return [_dot22(*node.inputs)]
if x.ndim == 2 and y.ndim == 1:
# print "local_dot_to_dot22: MV"
return [_dot22(x, y.dimshuffle(0, 'x')).dimshuffle(0)]
if x.ndim == 1 and y.ndim == 2:
# print "local_dot_to_dot22: VM"
return [_dot22(x.dimshuffle('x', 0), y).dimshuffle(1)]
if x.ndim == 1 and y.ndim == 1:
# print "local_dot_to_dot22: VV"
return [_dot22(x.dimshuffle('x', 0),
y.dimshuffle(0, 'x')).dimshuffle()]
_logger.info('Not optimizing dot with inputs %s %s %s %s',
x, y, x.type, y.type)
@local_optimizer([gemm_no_inplace], inplace=True)
def local_inplace_gemm(node):
if node.op == gemm_no_inplace:
return [gemm_inplace(*node.inputs)]
@local_optimizer([gemv_no_inplace], inplace=True)
def local_inplace_gemv(node):
if node.op == gemv_no_inplace:
return [gemv_inplace(*node.inputs)]
@local_optimizer([ger], inplace=True)
def local_inplace_ger(node):
if node.op == ger:
return [ger_destructive(*node.inputs)]
@local_optimizer([gemm_no_inplace])
def local_gemm_to_gemv(node):
"""GEMM acting on row or column matrices -> GEMV."""
if node.op == gemm_no_inplace:
z, a, x, y, b = node.inputs
if z.broadcastable == x.broadcastable == (True, False):
r = gemv_no_inplace(z.dimshuffle(1), a, y.T, x.dimshuffle(1), b)
return [r.dimshuffle('x', 0)]
if z.broadcastable == y.broadcastable == (False, True):
r = gemv_no_inplace(z.dimshuffle(0), a, x, y.dimshuffle(0), b)
return [r.dimshuffle(0, 'x')]
@local_optimizer([gemm_no_inplace])
def local_gemm_to_ger(node):
"""GEMM computing an outer-product -> GER."""
if node.op == gemm_no_inplace:
z, a, x, y, b = node.inputs
if x.broadcastable[1] and y.broadcastable[0]:
# x and y are both vectors so this might qualifies for a GER
xv = x.dimshuffle(0)
yv = y.dimshuffle(1)
try:
bval = T.get_scalar_constant_value(b)
except T.NotScalarConstantError:
# b isn't a constant, GEMM is doing useful pre-scaling
return
if bval == 1: # best case a natural GER
rval = ger(z, a, xv, yv)
return [rval]
elif bval == 0: # GER on zeros_like should be faster than GEMM
zeros = T.zeros([x.shape[0], y.shape[1]], x.dtype)
rval = ger(zeros, a, xv, yv)
return [rval]
else:
# if bval is another constant, then z is being usefully
# pre-scaled and GER isn't really the right tool for the job.
return
# TODO: delete this optimization when we have the proper dot->gemm->ger pipeline
# working
@local_optimizer([_dot22])
def local_dot22_to_ger_or_gemv(node):
"""dot22 computing an outer-product -> GER."""
if node.op == _dot22:
x, y = node.inputs
xb = x.broadcastable
yb = y.broadcastable
one = T.as_tensor_variable(numpy.asarray(1, dtype=x.dtype))
zero = T.as_tensor_variable(numpy.asarray(0, dtype=x.dtype))
if xb[1] and yb[0]:
# x and y are both vectors so this might qualifies for a GER
xv = x.dimshuffle(0)
yv = y.dimshuffle(1)
zeros = T.zeros([x.shape[0], y.shape[1]], dtype=x.dtype)
rval = ger(zeros, one, xv, yv)
return [rval]
if xb[0] and yb[1]:
# x and y are both vectors so this qualifies for a sdot / ddot
# TODO: Theano doesn't have a sdot, but gemv is better than _dot22
xv = x.dimshuffle(1)
zeros = T.AllocEmpty(x.dtype)(1)
rval = gemv_no_inplace(zeros, one, y.T, xv, zero)
return [rval.dimshuffle('x', 0)]
if xb[0] and not yb[0] and not yb[1]:
# x is vector, y is matrix so try gemv
xv = x.dimshuffle(1)
zeros = T.AllocEmpty(x.dtype)(y.shape[1])
rval = gemv_no_inplace(zeros, one, y.T, xv, zero)
return [rval.dimshuffle('x', 0)]
if not xb[0] and not xb[1] and yb[1]:
# x is matrix, y is vector, try gemv
yv = y.dimshuffle(0)
zeros = T.AllocEmpty(x.dtype)(x.shape[0])
rval = gemv_no_inplace(zeros, one, x, yv, zero)
return [rval.dimshuffle(0, 'x')]
#################################
#
# Set up the BlasOpt optimizer
#
#################################
blas_optdb = SequenceDB()
# run after numerical stability optimizations (1.5)
optdb.register('BlasOpt', blas_optdb, 1.7, 'fast_run', 'fast_compile')
# run before specialize (2.0) because specialize is basically a
# free-for-all that makes the graph crazy.
# fast_compile is needed to have GpuDot22 created.
blas_optdb.register('local_dot_to_dot22',
in2out(local_dot_to_dot22),
0, 'fast_run', 'fast_compile')
blas_optdb.register('gemm_optimizer',
GemmOptimizer(),
10, 'fast_run')
blas_optdb.register('local_gemm_to_gemv',
EquilibriumOptimizer([local_gemm_to_gemv,
local_gemm_to_ger,
local_dot22_to_ger_or_gemv,
local_dimshuffle_lift],
max_use_ratio=5,
ignore_newtrees=False),
15, 'fast_run')
# After destroyhandler(49.5) but before we try to make elemwise things
# inplace (75)
blas_opt_inplace = in2out(local_inplace_gemm,
local_inplace_gemv,
local_inplace_ger,
name="blas_opt_inplace")
optdb.register('InplaceBlasOpt',
blas_opt_inplace,
70.0, 'fast_run', 'inplace', 'blas_opt_inplace')
class Dot22Scalar(GemmRelated):
"""Compute a matrix-matrix product.
This is a specialization of the more general Dot()
Used to call optimized gemm implementation.
Also used to generate a gemm later.
compute scalar*dot(x,y).
"""
def make_node(self, x, y, a):
if a.ndim != 0:
raise TypeError(Gemm.E_scalar, a)
if x.ndim != 2:
raise TypeError(Gemm.E_rank, x)
if y.ndim != 2:
raise TypeError(Gemm.E_rank, y)
if not (a.dtype == x.dtype == y.dtype):
raise TypeError('Dot22Scalar requires matching dtypes',
(a.dtype, x.dtype, y.dtype))
if (not a.dtype.startswith('float') and
not a.dtype.startswith('complex')):
raise TypeError('Dot22Scalar requires float or complex args',
a.dtype)
bz = [x.type.broadcastable[0], y.type.broadcastable[1]]
outputs = [T.tensor(x.type.dtype, bz)]
return Apply(self, [x, y, a], outputs)
def perform(self, node, inp, out):
x, y, scalar = inp
z, = out
try:
z[0] = numpy.asarray(scalar * numpy.dot(x, y))
except ValueError as e:
# The error raised by numpy has no shape information, we
# mean to add that
e.args = e.args + (x.shape, y.shape)
raise
def infer_shape(self, node, input_shapes):
return [[input_shapes[0][0], input_shapes[1][1]]]
setup_z_Nz_Sz = Dot22.setup_z_Nz_Sz
check_ab_double_or_float = """
if ((PyArray_DESCR(%(_a)s)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(%(_a)s)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError,
"type(a) is not double or float"); %(fail)s;}
"""
case_float_ab_constants = """
#define REAL float
float a = (PyArray_DESCR(%(_a)s)->type_num == NPY_FLOAT)
? (REAL)(((float*)PyArray_DATA(%(_a)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_a)s))[0]);
#undef REAL
float b = 0.0;
"""
case_double_ab_constants = """
#define REAL double
double a = (PyArray_DESCR(%(_a)s)->type_num == NPY_FLOAT)
? (REAL)(((float*)PyArray_DATA(%(_a)s))[0])
: (REAL)(((double*)PyArray_DATA(%(_a)s))[0]);
#undef REAL
double b = 0.0;
"""
def c_code(self, node, name, inp, out, sub):
_x, _y, _a = inp
_zout, = out
if node.inputs[0].type.dtype.startswith('complex'):
raise utils.MethodNotDefined('%s.c_code'
% self.__class__.__name__)
if len(self.c_libraries()) <= 0:
return super(Dot22Scalar, self).c_code(node, name, (_x, _y),
(_zout, ), sub)
full_code = self.build_gemm_call() % dict(locals(), **sub)
return full_code
def c_code_cache_version(self):
gv = self.build_gemm_version()
if gv:
return (2,) + gv
else:
return gv
_dot22scalar = Dot22Scalar()
@local_optimizer([T.mul])
def local_dot22_to_dot22scalar(node):
"""
Notes
-----
Previous attempts to alter this optimization to replace dot22 with
gemm instead of dot22scalar resulted in some Scan nodes being
duplicated and the ScanSaveMem optimization never running on them,
resulting in highly increased memory usage. Until this issue is
resolved, this optimization should keep using dot22scalar instead of
gemm.
We upcast the scalar if after the multiplication with the dot this give
the same type.
We execute this optimizer after the gemm optimizer. This
allow to give more priority to gemm that give more speed up
then this optimizer, but allow the gemm optimizer to ignore
this op.
TODO: support when we can reorder the mul to generate a
dot22scalar or fix the canonizer to merge them(1 mul with multiple
inputs)
"""
if node.op != T.mul:
return False
i_dot22 = [x.owner and x.owner.op == _dot22 for x in node.inputs]
if not any(i_dot22):
return False # no dot22
if i_dot22.count(True) > 1:
# TODO: try each of them.
pass
# return False #TODO fix
dot22_idx = i_dot22.index(True)
d = node.inputs[dot22_idx]
i_scalar = [_as_scalar(x, dtype=d.dtype) for x in node.inputs]
if not any(i_scalar):
# Check if we can reorder the graph as this mul have a mul in inputs.
# We support only 1 additional level of mul.
# The canonizer should have merged those mul together.
i_mul = [x.owner and x.owner.op == T.mul and
any([_as_scalar(x_i, dtype=d.dtype)
for x_i in x.owner.inputs])
for x in node.inputs]
if not any(i_mul):
# no scalar in input and no multiplication
# if their was a multiplication we couls reorder the graph
# by the associativity of the graph.
return False
mul_idx = i_mul.index(True) # The first one should always work
m = node.inputs[mul_idx]
scalar_idx = -1
for i, x in enumerate(m.owner.inputs):
if _as_scalar(x, dtype=d.dtype) and (theano.scalar.upcast(
x.type.dtype, d.type.dtype) == d.type.dtype):
scalar_idx = i
break
if scalar_idx < 0:
_logger.info('Not optimizing dot22 with inputs %s %s, as the'
' type of the scalar cannot be upcasted to the'
' matrix type',
node.inputs, [x.type for x in node.inputs])
return False
a = T.cast(_as_scalar(m.owner.inputs[scalar_idx],
dtype=d.dtype), d.type.dtype)
assert not a.type.ndim
dot = _dot22scalar(d.owner.inputs[0], d.owner.inputs[1], a)
# The other inputs to the original node that were
# neither part of the dot22 or this mul should be
# factors in the returned "mul" node.
assert dot22_idx != mul_idx
other_factors = [inpt
for i, inpt in enumerate(node.inputs)
if i not in (dot22_idx, mul_idx)]
other_m_inputs = [inpt
for i, inpt in enumerate(m.owner.inputs)
if i != scalar_idx]
return [T.mul(dot, *(other_factors + other_m_inputs))]
scalar_idx = -1
for i, x in enumerate(node.inputs):
if (i != dot22_idx and i_scalar[i] is not None and
(theano.scalar.upcast(x.type.dtype, d.type.dtype) ==
d.type.dtype)):
scalar_idx = i
break
if scalar_idx < 0:
_logger.info('Not optimizing dot22 with inputs %s %s, as the type '
'of the scalar cannot be upcasted to the matrix type',
node.inputs, [x.type for x in node.inputs])
return False
assert scalar_idx < len(node.inputs)
s = node.inputs[scalar_idx]
o = copy.copy(node.inputs)
o.remove(d)
o.remove(s)
a = T.cast(i_scalar[scalar_idx], d.type.dtype)
assert not a.type.ndim
if len(o) == 0:
return [_dot22scalar(d.owner.inputs[0], d.owner.inputs[1], a)]
else:
return [T.mul(_dot22scalar(d.owner.inputs[0],
d.owner.inputs[1], a), *o)]
# must happen after gemm as the gemm optimizer don't understant
# dot22scalar and gemm give more speed up then dot22scalar
blas_optdb.register('local_dot22_to_dot22scalar',
in2out(local_dot22_to_dot22scalar),
11, 'fast_run')
class BatchedDot(Op):
"""
Computes the batched dot product of two variables:
batched_dot(a, b)[i] = dot(a[i], b[i])
"""
__props__ = ()
def make_node(self, *inputs):
inputs = list(map(T.as_tensor_variable, inputs))
if len(inputs) != 2:
raise TypeError("theano.tensor.blas.BatchedDot: 2 arguments"
" required, %d given " % len(inputs))
if inputs[0].ndim not in (2, 3):
raise TypeError("theano.tensor.blas.BatchedDot: input 0 (0-indexed)"
" must have ndim of 2 or 3, %d given. Consider"
" calling theano.tensor.batched_dot instead."
% inputs[0].ndim)
if inputs[1].ndim not in (2, 3):
raise TypeError("theano.tensor.blas.BatchedDot: input 1 (0-indexed)"
" must have ndim of 2 or 3, %d given. Consider"
" calling theano.tensor.batched_dot instead."
% inputs[1].ndim)
dtype = theano.scalar.upcast(*[input.type.dtype for input in inputs])
# upcast inputs to common dtype if needed
upcasted_inputs = [T.cast(input, dtype) for input in inputs]
broadcastable = ((inputs[0].type.broadcastable[0] or
inputs[1].type.broadcastable[0],) +
inputs[0].type.broadcastable[1:-1] +
inputs[1].type.broadcastable[2:])
return Apply(self, upcasted_inputs, [T.tensor(dtype, broadcastable)])
def perform(self, node, inp, out):
x, y = inp
z, = out
if x.shape[0] != y.shape[0]:
raise TypeError(
"theano.tensor.blas.BatchedDot: inputs [%s] must have the"
" same size in axis 0, but have sizes [%s]." %
(", ".join(map(str, inp)),
", ".join([str(i.shape[0]) for i in inp])))
shape = self.infer_shape(node, [i.shape for i in inp])[0]
dtype = node.outputs[0].dtype
z0 = z[0] = numpy.empty(shape, dtype=dtype)
for i in xrange(z0.shape[0]):
z0[i] = numpy.dot(x[i], y[i])
def c_support_code(self):
batch_gemm_defn = """
template<typename dtype, typename function>
bool batch_gemm(function gemm, int type_size,
PyArrayObject* xs, PyArrayObject* ys, PyArrayObject* zs) {
npy_intp *Nx = PyArray_DIMS(xs), *Sx = PyArray_STRIDES(xs);
npy_intp *Ny = PyArray_DIMS(ys), *Sy = PyArray_STRIDES(ys);
npy_intp *Nz = PyArray_DIMS(zs), *Sz = PyArray_STRIDES(zs);
if (Nx[0] != Ny[0]) {
PyErr_Format(PyExc_ValueError,
"Shape mismatch: batch sizes unequal."
" x.shape is (%d, %d, %d),"
" y.shape is (%d, %d, %d).",
Nx[0], Nx[1], Nx[2],
Ny[0], Ny[1], Ny[2]);
return 1;
}
if (Nx[2] != Ny[1]) {
PyErr_Format(PyExc_ValueError,
"Shape mismatch: summation axis sizes unequal."
" x.shape is (%d, %d, %d),"
" y.shape is (%d, %d, %d).",
Nx[0], Nx[1], Nx[2],
Ny[0], Ny[1], Ny[2]);
return 1;
}
/* encode the stride structure of _x,_y,_z into a single integer. */
int unit = 0;
unit |= ((Sx[2] == type_size || Nx[2] == 1) ? 0x0 : (Sx[1] == type_size || Nx[1]==1) ? 0x1 : 0x2) << 8;
unit |= ((Sy[2] == type_size || Ny[2] == 1) ? 0x0 : (Sy[1] == type_size || Ny[1]==1) ? 0x1 : 0x2) << 4;
unit |= ((Sz[2] == type_size || Nz[2] == 1) ? 0x0 : (Sz[1] == type_size || Nz[1]==1) ? 0x1 : 0x2) << 0;
/* create appropriate strides for malformed matrices that are row or column
* vectors, or empty matrices.
* In that case, the value of the stride does not really matter, but
* some versions of BLAS insist that:
* - they are not smaller than the number of elements in the array,
* - they are not 0.
*/
int sx_1 = (Nx[1] > 1) ? Sx[1]/type_size : (Nx[2] + 1);
int sx_2 = (Nx[2] > 1) ? Sx[2]/type_size : (Nx[1] + 1);
int sy_1 = (Ny[1] > 1) ? Sy[1]/type_size : (Ny[2] + 1);
int sy_2 = (Ny[2] > 1) ? Sy[2]/type_size : (Ny[1] + 1);
int sz_1 = (Nz[1] > 1) ? Sz[1]/type_size : (Nz[2] + 1);
int sz_2 = (Nz[2] > 1) ? Sz[2]/type_size : (Nz[1] + 1);
dtype* x = (dtype*)PyArray_DATA(xs);
dtype* y = (dtype*)PyArray_DATA(ys);
dtype* z = (dtype*)PyArray_DATA(zs);
dtype a = 1.0;
dtype b = 0.0;
char N = 'N';
char T = 'T';
int Nz1 = Nz[1], Nz2 = Nz[2], Nx2 = Nx[2];
// loop over batch axis
for (int i = 0; i < Nz[0]; i++) {
switch(unit)
{
case 0x000: gemm(&N, &N, &Nz2, &Nz1, &Nx2, &a, y, &sy_1, x, &sx_1, &b, z, &sz_1); break;
case 0x100: gemm(&N, &T, &Nz2, &Nz1, &Nx2, &a, y, &sy_1, x, &sx_2, &b, z, &sz_1); break;
case 0x010: gemm(&T, &N, &Nz2, &Nz1, &Nx2, &a, y, &sy_2, x, &sx_1, &b, z, &sz_1); break;
case 0x110: gemm(&T, &T, &Nz2, &Nz1, &Nx2, &a, y, &sy_2, x, &sx_2, &b, z, &sz_1); break;
case 0x001: gemm(&T, &T, &Nz1, &Nz2, &Nx2, &a, x, &sx_1, y, &sy_1, &b, z, &sz_2); break;
case 0x101: gemm(&N, &T, &Nz1, &Nz2, &Nx2, &a, x, &sx_2, y, &sy_1, &b, z, &sz_2); break;
case 0x011: gemm(&T, &N, &Nz1, &Nz2, &Nx2, &a, x, &sx_1, y, &sy_2, &b, z, &sz_2); break;
case 0x111: gemm(&N, &N, &Nz1, &Nz2, &Nx2, &a, x, &sx_2, y, &sy_2, &b, z, &sz_2); break;
default: PyErr_SetString(PyExc_ValueError, "some matrix has no unit stride"); return 1;
};
x += Sx[0] / type_size;
y += Sy[0] / type_size;
z += Sz[0] / type_size;
}
return 0;
}
"""
return blas_header_text() + batch_gemm_defn
def c_libraries(self):
return ldflags()
def c_compile_args(self):
return ldflags(libs=False, flags=True)
def c_lib_dirs(self):
return ldflags(libs=False, libs_dir=True)
def c_header_dirs(self):
return ldflags(libs=False, include_dir=True)
def c_code_cleanup(self, node, name, inputs, outputs, sub):
return """
// clean up views
Py_XDECREF(xs); xs = 0;
Py_XDECREF(ys); ys = 0;
Py_XDECREF(zs); zs = 0;
"""
def c_code(self, node, name, inp, out, sub):
_x, _y = inp
_z, = out
fail = sub["fail"]
if not config.blas.ldflags:
return super(BatchedDot, self).c_code(node, name,
inp, out, sub)
# generate contiguity condition
def contiguous(var, ndim):
strides = "PyArray_STRIDES(%s)" % var
return " && ".join([
" && ".join("{strides}[{i}] > 0 && {strides}[{i}] % type_size == 0"
.format(strides=strides, i=i) for i in range(ndim)),
"(%s)" % " || ".join("{strides}[{i}] == type_size"
.format(strides=strides, i=i) for i in range(ndim)),
])
x_ndim, y_ndim, z_ndim = node.inputs[0].ndim, node.inputs[1].ndim, node.outputs[0].ndim
# generate code to allocate output based on runtime input shapes
z_dims = ["PyArray_DIMS(%s)[0]" % _x]
if x_ndim == 3:
z_dims.append("PyArray_DIMS(%s)[1]" % _x)
if y_ndim == 3:
z_dims.append("PyArray_DIMS(%s)[2]" % _y)
assert len(z_dims) == z_ndim
z_shape_correct = " && ".join("PyArray_DIMS(%s)[%i] == %s"
% (_z, i, dim) for i, dim in enumerate(z_dims))
z_shape = ", ".join(z_dims)
z_contiguous = contiguous(_z, z_ndim)
allocate = """
if (NULL == %(_z)s || !(%(z_shape_correct)s) || !(%(z_contiguous)s))
{
npy_intp dims[%(z_ndim)s] = {%(z_shape)s};
Py_XDECREF(%(_z)s);
%(_z)s = (PyArrayObject*)PyArray_SimpleNew(
%(z_ndim)s, dims, PyArray_TYPE(%(_x)s));
if(!%(_z)s) {
PyErr_SetString(PyExc_MemoryError,
"failed to alloc BatchedDot output");
%(fail)s
}
}
""" % locals()
# code to reallocate inputs contiguously if necessary
contiguate = []
for var, ndim in [(_x, x_ndim), (_y, y_ndim)]:
_contiguous = contiguous(var, ndim)
contiguate.append("""
if (!(%(_contiguous)s)) {
PyArrayObject * _copy = (PyArrayObject *) PyArray_Copy(%(var)s);
if (!_copy)
%(fail)s
Py_XDECREF(%(var)s);
%(var)s = _copy;
}
""" % locals())
contiguate = "\n".join(contiguate)
def c_dimshuffle(newname, oldname, shape):
_fail = fail
_shape = ", ".join("1" if axis is None else "PyArray_DIMS(%s)[%i]" % (oldname, axis)
for axis in shape)
return """{
npy_intp dims[3] = {%(_shape)s};
PyArray_Dims newshape = {dims, 3};
%(newname)s = (PyArrayObject*)PyArray_Newshape(%(oldname)s, &newshape, NPY_ANYORDER);
if (!%(newname)s)
%(_fail)s
// make sure we didn't accidentally copy
assert(PyArray_DATA(%(oldname)s) == PyArray_DATA(%(newname)s));
}""" % locals()
# create tensor3 views for any of x, y, z that are not tensor3, so that
# we only need to implement the tensor3-tensor3 batched dot product.
# xs, ys and zs will point to these views, or to the original array if
# it was already tensor3.
# in the latter case, we artificially increase the reference count of
# the original array so that the c_code_cleanup method can decref them
# all indiscriminately.
upcast = []
if x_ndim == 3:
upcast.append("xs = %(_x)s; Py_XINCREF(xs);")
elif x_ndim == 2:
upcast.append(c_dimshuffle("xs", _x, (0, None, 1)))
if y_ndim == 3:
upcast.append("ys = %(_y)s; Py_XINCREF(ys);")
elif y_ndim == 2:
upcast.append(c_dimshuffle("ys", _y, (0, 1, None)))
if z_ndim == 3:
upcast.append("zs = %(_z)s; Py_XINCREF(zs);")
else:
upcast.append(c_dimshuffle(
"zs", _z, (0,
None if x_ndim == 2 else 1,
None if y_ndim == 2 else 1)))
upcast = "\n".join(upcast) % locals()
return """
int type_num = PyArray_DESCR(%(_x)s)->type_num;
int type_size = PyArray_DESCR(%(_x)s)->elsize; // in bytes
// xs, ys, zs will point to views onto %(_x)s, %(_y)s, %(_z)s
PyArrayObject *xs = 0, *ys = 0, *zs = 0;
if (PyArray_NDIM(%(_x)s) != %(x_ndim)s) {
PyErr_Format(PyExc_NotImplementedError,
"rank(x) != %(x_ndim)s. rank(x) is %%d.",
PyArray_NDIM(%(_x)s));
%(fail)s;
}
if (PyArray_NDIM(%(_y)s) != %(y_ndim)s) {
PyErr_Format(PyExc_NotImplementedError,
"rank(y) != %(y_ndim)s. rank(y) is %%d.",
PyArray_NDIM(%(_y)s));
%(fail)s;
}
if (%(_z)s && PyArray_NDIM(%(_z)s) != %(z_ndim)s) {
PyErr_Format(PyExc_NotImplementedError,
"rank(z) != %(z_ndim)s. rank(z) is %%d.",
PyArray_NDIM(%(_z)s));
%(fail)s;
}
// allocate output
%(allocate)s
// reallocate any noncontiguous arrays or arrays with invalid strides
%(contiguate)s
// add dims to make sure everything is tensor3
%(upcast)s
// from here on, use xs, ys and zs as they are tensor3 and share memory
// with the original %(_x)s, %(_y)s and %(_z)s arrays.
if ((PyArray_DESCR(xs)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(xs)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(x) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(ys)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(ys)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(y) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(zs)->type_num != NPY_DOUBLE)
&& (PyArray_DESCR(zs)->type_num != NPY_FLOAT))
{PyErr_SetString(PyExc_NotImplementedError, "type(z) is not double or float"); %(fail)s;}
if ((PyArray_DESCR(xs)->type_num != PyArray_DESCR(ys)->type_num)
||(PyArray_DESCR(xs)->type_num != PyArray_DESCR(zs)->type_num))
{ PyErr_SetString(PyExc_NotImplementedError, "type(x), type(y), type(z) are not all the same"); %(fail)s; }
switch (type_num)
{
case NPY_FLOAT:
if (batch_gemm<float>(sgemm_, type_size, xs, ys, zs)) {
%(fail)s;
}
break;
case NPY_DOUBLE:
if (batch_gemm<double>(dgemm_, type_size, xs, ys, zs)) {
%(fail)s;
}
break;
}
""" % locals()
def c_code_cache_version(self):
from theano.tensor.blas_headers import blas_header_version
return (1, blas_header_version())
def grad(self, inp, grads):
x, y = inp
gz, = grads
xdim, ydim, gdim = x.type.ndim, y.type.ndim, gz.type.ndim
# grad is a vector, so x is a matrix and y is a matrix
if gdim == 1:
xgrad = gz.dimshuffle(0, 'x') * y
ygrad = gz.dimshuffle(0, 'x') * x
# x is a matrix, y is a tensor3, grad is a matrix
elif xdim == 2 and ydim == 3:
xgrad = T.batched_dot(gz, y.dimshuffle(0, 2, 1))
ygrad = x.dimshuffle(0, 1, 'x') * gz.dimshuffle(0, 'x', 1)
# x is a tensor3, y is a matrix, grad is a matrix
elif xdim == 3 and ydim == 2:
xgrad = gz.dimshuffle(0, 1, 'x') * y.dimshuffle(0, 'x', 1)
ygrad = T.batched_dot(x.dimshuffle(0, 2, 1), gz)
# x is a tensor3, y is a tensor3, grad is a tensor3
elif xdim == ydim == 3:
xgrad = T.batched_dot(gz, y.dimshuffle(0, 2, 1))
ygrad = T.batched_dot(x.dimshuffle(0, 2, 1), gz)
# If x or y contain broadcastable dimensions but only one of
# them know that a matching dimensions is broadcastable, the
# above code don't always return the right broadcast pattern.
# This cause problem down the road. See gh-1461.
if xgrad.broadcastable != x.broadcastable:
xgrad = T.patternbroadcast(xgrad, x.broadcastable)
if ygrad.broadcastable != y.broadcastable:
ygrad = T.patternbroadcast(ygrad, y.broadcastable)
return xgrad, ygrad
def R_op(self, inputs, eval_points):
# R_op for batched_dot(a, b) evaluted at c for a and d for b is
# simply batched_dot(c, b) + batched_dot(a, d)
assert len(inputs) == 2
assert len(eval_points) == 2
if eval_points[0] is None and eval_points[1] is None:
return [None]
debugger_available = config.compute_test_value != 'off'
if debugger_available:
try:
iv0 = theano.gof.op.get_test_value(inputs[0])
except AttributeError:
theano.gof.op.missing_test_message(
'first input passed to BatchedDot.R_op has no test value')
debugger_available = False
try:
iv1 = theano.gof.op.get_test_value(inputs[1])
except AttributeError:
theano.gof.op.missing_test_message(
'second input passed to BatchedDot.R_op has no test value')
debugger_available = False
if eval_points[0]:
try:
ev0 = theano.gof.op.get_test_value(eval_points[0])
except AttributeError:
theano.gof.op.missing_test_message(
'first eval point passed to BatchedDot.R_op '
'has no test value')
debugger_available = False
if eval_points[1]:
try:
ev1 = theano.gof.op.get_test_value(eval_points[1])
except AttributeError:
theano.gof.op.missing_test_message(
'second eval point passed to BatchedDot.R_op '
'has no test value')
debugger_available = False
if debugger_available:
input_values = [iv0, iv1]
eval_point_values = [ev0, ev1]
for i in xrange(2):
if eval_point_values[i] is not None and \
input_values[i].shape != eval_point_values[i].shape:
raise ValueError(
'input ' + str(i) + ' and eval_point ' + str(i) +
' to BatchedDot.R_op should have the same shape, but '
'their shapes are %s and %s, respectively' % (
str(input_values[i].shape),
str(eval_point_values[i].shape)))
if eval_points[0]:
t1 = self(eval_points[0], inputs[1])
if eval_points[1]:
t2 = self(inputs[0], eval_points[1])
if eval_points[0] and eval_points[1]:
return [t1 + t2]
elif eval_points[0]:
return [t1]
else:
return [t2]
def infer_shape(self, node, shapes):
for shape_ in shapes:
if len(shape_) not in (2, 3):
raise NotImplementedError()
xshp, yshp = shapes
return [xshp[:-1] + yshp[2:]]
# from opt import register_specialize, register_canonicalize
# @register_specialize
@local_optimizer([T.sub, T.add])
def local_print_as_we_go_along(node):
if node.op in (T.sub, T.add):
debugprint(node)
| {
"content_hash": "6e57160e5f3a110fe682cbef2e44d17b",
"timestamp": "",
"source": "github",
"line_count": 2443,
"max_line_length": 118,
"avg_line_length": 37.79410560785919,
"alnum_prop": 0.5099587354193066,
"repo_name": "surgebiswas/poker",
"id": "c63bf6dd68e35c05b4b8bd12de2b1fb88c6410e2",
"size": "92331",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "PokerBots_2017/Johnny/theano/tensor/blas.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "20"
},
{
"name": "C",
"bytes": "569372"
},
{
"name": "C++",
"bytes": "3604944"
},
{
"name": "CSS",
"bytes": "1750"
},
{
"name": "Cuda",
"bytes": "232079"
},
{
"name": "Fortran",
"bytes": "13029"
},
{
"name": "HTML",
"bytes": "127417"
},
{
"name": "Jupyter Notebook",
"bytes": "97929"
},
{
"name": "Makefile",
"bytes": "76699"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Perl",
"bytes": "25163"
},
{
"name": "Python",
"bytes": "26314770"
},
{
"name": "Shell",
"bytes": "1082"
}
],
"symlink_target": ""
} |
package cn.mutils.app.ui.web;
/**
* Web message result state of framework
*/
public enum WebMessageState {
/** Invalid state of web message result */
invalid,
/** Complete state of web message result */
complete,
/** Error state of web message result */
error,
/** OK state of web message result */
ok,
/** Cancel state of web message result */
cancel,
/** Success state of web message result */
success,
/** Fail state of web message result */
fail,
/** Yes state of web message result */
yes,
/** No state of web message result */
no
}
| {
"content_hash": "911a28b65af918c4a49fa2b60d0bcb30",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 44,
"avg_line_length": 16.257142857142856,
"alnum_prop": 0.6590509666080844,
"repo_name": "wavinsun/OApp",
"id": "e8484f98518fa56e0892a0a5914649752018dd22",
"size": "569",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "MUtils/src/main/java/cn/mutils/app/ui/web/WebMessageState.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "518"
},
{
"name": "HTML",
"bytes": "2818"
},
{
"name": "Java",
"bytes": "1144965"
},
{
"name": "JavaScript",
"bytes": "3518"
}
],
"symlink_target": ""
} |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote.http;
import static com.google.common.base.Preconditions.checkState;
import com.google.auth.Credentials;
import com.google.common.collect.ImmutableList;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.HttpContent;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpObject;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.timeout.ReadTimeoutException;
import io.netty.util.internal.StringUtil;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map.Entry;
/** ChannelHandler for downloads. */
final class HttpDownloadHandler extends AbstractHttpHandler<HttpObject> {
private OutputStream out;
private boolean keepAlive = HttpVersion.HTTP_1_1.isKeepAliveDefault();
private boolean downloadSucceeded;
private HttpResponse response;
private long bytesReceived;
private long contentLength = -1;
/** the path header in the http request */
private String path;
public HttpDownloadHandler(
Credentials credentials, ImmutableList<Entry<String, String>> extraHttpHeaders) {
super(credentials, extraHttpHeaders);
}
@Override
protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if (!msg.decoderResult().isSuccess()) {
failAndClose(new IOException("Failed to parse the HTTP response."), ctx);
return;
}
if (!(msg instanceof HttpResponse) && !(msg instanceof HttpContent)) {
failAndClose(
new IllegalArgumentException(
"Unsupported message type: " + StringUtil.simpleClassName(msg)),
ctx);
return;
}
checkState(userPromise != null, "response before request");
if (msg instanceof HttpResponse) {
response = (HttpResponse) msg;
if (!response.protocolVersion().equals(HttpVersion.HTTP_1_1)) {
HttpException error =
new HttpException(
response, "HTTP version 1.1 is required, was: " + response.protocolVersion(), null);
failAndClose(error, ctx);
return;
}
boolean contentLengthSet = HttpUtil.isContentLengthSet(response);
if (!contentLengthSet && !HttpUtil.isTransferEncodingChunked(response)) {
HttpException error =
new HttpException(
response, "Missing 'Content-Length' or 'Transfer-Encoding: chunked' header", null);
failAndClose(error, ctx);
return;
}
if (contentLengthSet) {
contentLength = HttpUtil.getContentLength(response);
}
downloadSucceeded = response.status().equals(HttpResponseStatus.OK);
if (!downloadSucceeded) {
out = new ByteArrayOutputStream();
}
keepAlive = HttpUtil.isKeepAlive((HttpResponse) msg);
}
if (msg instanceof HttpContent) {
checkState(response != null, "content before headers");
ByteBuf content = ((HttpContent) msg).content();
int readableBytes = content.readableBytes();
content.readBytes(out, readableBytes);
bytesReceived += readableBytes;
if (msg instanceof LastHttpContent) {
if (downloadSucceeded) {
succeedAndReset(ctx);
} else {
String errorMsg = response.status() + "\n";
errorMsg +=
new String(
((ByteArrayOutputStream) out).toByteArray(), HttpUtil.getCharset(response));
out.close();
HttpException error = new HttpException(response, errorMsg, null);
failAndReset(error, ctx);
}
}
}
}
@Override
public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise)
throws Exception {
checkState(userPromise == null, "handler can't be shared between pipelines.");
userPromise = promise;
if (!(msg instanceof DownloadCommand)) {
failAndResetUserPromise(
new IllegalArgumentException(
"Unsupported message type: " + StringUtil.simpleClassName(msg)));
return;
}
DownloadCommand cmd = (DownloadCommand) msg;
out = cmd.out();
path = constructPath(cmd.uri(), cmd.digest().getHash(), cmd.casDownload());
HttpRequest request = buildRequest(path, constructHost(cmd.uri()));
addCredentialHeaders(request, cmd.uri());
addExtraRemoteHeaders(request);
addUserAgentHeader(request);
ctx.writeAndFlush(request)
.addListener(
(f) -> {
if (!f.isSuccess()) {
failAndClose(f.cause(), ctx);
}
});
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable t) {
if (t instanceof ReadTimeoutException) {
super.exceptionCaught(ctx, new DownloadTimeoutException(path, bytesReceived, contentLength));
} else {
super.exceptionCaught(ctx, t);
}
}
private HttpRequest buildRequest(String path, String host) {
HttpRequest httpRequest =
new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, path);
httpRequest.headers().set(HttpHeaderNames.HOST, host);
httpRequest.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE);
httpRequest.headers().set(HttpHeaderNames.ACCEPT, "*/*");
httpRequest.headers().set(HttpHeaderNames.ACCEPT_ENCODING, HttpHeaderValues.GZIP);
return httpRequest;
}
private void succeedAndReset(ChannelHandlerContext ctx) {
try {
succeedAndResetUserPromise();
} finally {
reset(ctx);
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void failAndClose(Throwable t, ChannelHandlerContext ctx) {
try {
failAndResetUserPromise(t);
} finally {
ctx.close();
}
}
private void failAndReset(Throwable t, ChannelHandlerContext ctx) {
try {
failAndResetUserPromise(t);
} finally {
reset(ctx);
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void reset(ChannelHandlerContext ctx) {
try {
if (!keepAlive) {
ctx.close();
}
} finally {
out = null;
keepAlive = HttpVersion.HTTP_1_1.isKeepAliveDefault();
downloadSucceeded = false;
response = null;
}
}
}
| {
"content_hash": "8f3ebf2fc61b6c3d3725cef8806cf3a3",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 100,
"avg_line_length": 34.99523809523809,
"alnum_prop": 0.6916587290787862,
"repo_name": "ulfjack/bazel",
"id": "37012aa62af51109e4cc50c02824b24b8003e48a",
"size": "7349",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/java/com/google/devtools/build/lib/remote/http/HttpDownloadHandler.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1588"
},
{
"name": "C",
"bytes": "25393"
},
{
"name": "C++",
"bytes": "1525276"
},
{
"name": "Dockerfile",
"bytes": "839"
},
{
"name": "HTML",
"bytes": "21431"
},
{
"name": "Java",
"bytes": "35781133"
},
{
"name": "Makefile",
"bytes": "248"
},
{
"name": "Objective-C",
"bytes": "10369"
},
{
"name": "Objective-C++",
"bytes": "1043"
},
{
"name": "PowerShell",
"bytes": "15431"
},
{
"name": "Python",
"bytes": "2555679"
},
{
"name": "Ruby",
"bytes": "639"
},
{
"name": "Shell",
"bytes": "2022858"
},
{
"name": "Smarty",
"bytes": "18683"
}
],
"symlink_target": ""
} |
package org.dstadler.commons.util;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to suppress forbidden-apis errors inside a whole class, a method, or a field.
*
* See https://github.com/policeman-tools/forbidden-apis for details and file 'forbidden.signatures.txt'
* for our local rules as well as 'APMjavaProjects.gradle' for the configuration of the API checks.
*
*/
@Retention(RetentionPolicy.CLASS)
@Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE })
public @interface SuppressForbidden {
String reason();
}
| {
"content_hash": "888195ed974d2f16fb09dcde628b35ce",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 104,
"avg_line_length": 36.73684210526316,
"alnum_prop": 0.7836676217765043,
"repo_name": "centic9/commons-dost",
"id": "22763918062db09f87e76019a57bbb7c35501d22",
"size": "698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/dstadler/commons/util/SuppressForbidden.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "556260"
}
],
"symlink_target": ""
} |
conway-curses
============
Conway's game of life implemented in Python with Curses
### Usage:
./conway_curses.py [tick_count]
Example:
./conway_curses.py 20
### Alter Initial State:
The initial state is defined in `initial_state.txt` as a pattern of asterisks and spaces. To alter the initial pattern edit `initial_state.txt` and run again.
### Sample Glider:

| {
"content_hash": "65a9ffec0e9b7bef7d595156e910673f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 159,
"avg_line_length": 23.2,
"alnum_prop": 0.7262931034482759,
"repo_name": "cdated/conwayCurses",
"id": "390dab13724c62dc301fe0a3dad5306d8ceac977",
"size": "464",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8341"
}
],
"symlink_target": ""
} |
/**
* Router setup
*
*/
var express = require('express'),
routes = require('../routes/'),
api = require('../routes/api'),
manage = require('../routes/manage'),
mongoose = require('mongoose');
function ensureAuthenticated(req, res, next) {
if(req.session.user) {
mongoose.model('Pass').findById(req.session.user, function (err, pass) {
if(err) console.log(err);
if(pass) {
next();
} else {
res.redirect('/0');
}
});
} else {
res.redirect('/0');
}
}
module.exports = function (app) {
var pageRouter = express.Router();
pageRouter
// Basic functionality
.get('/', routes.index)
.get('/about', routes.about)
// Games
.get('/game/:name', routes.game)
// Categories
.get('/category', routes.categories)
.get('/category/:name', routes.category)
// Login
.get( '/0', routes.loginPage)
.post('/0', routes.login);
var manageRouter = express.Router();
manageRouter
// Manage
.use(ensureAuthenticated)
.get('/', manage.page)
.post('/add', manage.addLink);
var apiRouter = express.Router();
apiRouter
/**
* API + XHR requests
*/
.use(function (req, res, next) {
//if(!req.xhr) return res.send('Only xhr requests');
next();
})
.get( '/game', api.getRandom)
.get( '/game/:name', api.getGame)
.post('/add/all', api.addAll);
app
.use('/', pageRouter)
.use('/1', manageRouter)
.use('/api', apiRouter);
};
| {
"content_hash": "d373ad0ac95169f7cca59778b2c59f0b",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 76,
"avg_line_length": 20.43421052631579,
"alnum_prop": 0.54539600772698,
"repo_name": "zaynetro/choose-game",
"id": "0a3f65db72e959662335513b7f6ec65135034d3a",
"size": "1553",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/router.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6919"
},
{
"name": "JavaScript",
"bytes": "33420"
}
],
"symlink_target": ""
} |
import $ from "jquery";
import marked from "../third/marked/lib/marked";
import * as channel from "./channel";
import * as common from "./common";
import * as dark_theme from "./dark_theme";
import * as feedback_widget from "./feedback_widget";
import {$t} from "./i18n";
import * as scroll_bar from "./scroll_bar";
/*
What in the heck is a zcommand?
A zcommand is basically a specific type of slash
command where the client does almost no work and
the server just does something pretty simple like
flip a setting.
The first zcommand we wrote is for "/ping", and
the server just responds with a 200 for that.
Not all slash commands use zcommand under the hood.
For more exotic things like /poll see submessage.js
and widgetize.js
*/
export function send(opts) {
const command = opts.command;
const on_success = opts.on_success;
const data = {
command,
};
channel.post({
url: "/json/zcommand",
data,
success(data) {
if (on_success) {
on_success(data);
}
},
error() {
tell_user("server did not respond");
},
});
}
export function tell_user(msg) {
// This is a bit hacky, but we don't have a super easy API now
// for just telling users stuff.
$("#compose-send-status")
.removeClass(common.status_classes)
.addClass("alert-error")
.stop(true)
.fadeTo(0, 1);
$("#compose-error-msg").text(msg);
}
export function switch_to_light_theme() {
send({
command: "/day",
on_success(data) {
dark_theme.disable();
feedback_widget.show({
populate($container) {
const rendered_msg = marked(data.msg).trim();
$container.html(rendered_msg);
},
on_undo() {
send({
command: "/night",
});
},
title_text: $t({defaultMessage: "Light theme"}),
undo_button_text: $t({defaultMessage: "Dark theme"}),
});
},
});
}
export function switch_to_dark_theme() {
send({
command: "/night",
on_success(data) {
dark_theme.enable();
feedback_widget.show({
populate($container) {
const rendered_msg = marked(data.msg).trim();
$container.html(rendered_msg);
},
on_undo() {
send({
command: "/day",
});
},
title_text: $t({defaultMessage: "Dark theme"}),
undo_button_text: $t({defaultMessage: "Light theme"}),
});
},
});
}
export function enter_fluid_mode() {
send({
command: "/fluid-width",
on_success(data) {
scroll_bar.set_layout_width();
feedback_widget.show({
populate($container) {
const rendered_msg = marked(data.msg).trim();
$container.html(rendered_msg);
},
on_undo() {
send({
command: "/fixed-width",
});
},
title_text: $t({defaultMessage: "Fluid width mode"}),
undo_button_text: $t({defaultMessage: "Fixed width"}),
});
},
});
}
export function enter_fixed_mode() {
send({
command: "/fixed-width",
on_success(data) {
scroll_bar.set_layout_width();
feedback_widget.show({
populate($container) {
const rendered_msg = marked(data.msg).trim();
$container.html(rendered_msg);
},
on_undo() {
send({
command: "/fluid-width",
});
},
title_text: $t({defaultMessage: "Fixed width mode"}),
undo_button_text: $t({defaultMessage: "Fluid width"}),
});
},
});
}
export function process(message_content) {
const content = message_content.trim();
if (content === "/ping") {
const start_time = new Date();
send({
command: content,
on_success() {
const end_time = new Date();
let diff = end_time - start_time;
diff = Math.round(diff);
const msg = "ping time: " + diff + "ms";
tell_user(msg);
},
});
return true;
}
const day_commands = ["/day", "/light"];
if (day_commands.includes(content)) {
switch_to_light_theme();
return true;
}
const night_commands = ["/night", "/dark"];
if (night_commands.includes(content)) {
switch_to_dark_theme();
return true;
}
if (content === "/fluid-width") {
enter_fluid_mode();
return true;
}
if (content === "/fixed-width") {
enter_fixed_mode();
return true;
}
// It is incredibly important here to return false
// if we don't see an actual zcommand, so that compose.js
// knows this is a normal message.
return false;
}
| {
"content_hash": "1527de226ba85f174e35d771d14278af",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 70,
"avg_line_length": 27.656410256410258,
"alnum_prop": 0.482662710921565,
"repo_name": "kou/zulip",
"id": "c5e4ec931cfe7e9f25e87e76ac27d07d56fa36b6",
"size": "5393",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "static/js/zcommand.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "433376"
},
{
"name": "Dockerfile",
"bytes": "2941"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "635452"
},
{
"name": "Handlebars",
"bytes": "235334"
},
{
"name": "JavaScript",
"bytes": "3361648"
},
{
"name": "Perl",
"bytes": "8594"
},
{
"name": "Puppet",
"bytes": "79932"
},
{
"name": "Python",
"bytes": "8142846"
},
{
"name": "Ruby",
"bytes": "8480"
},
{
"name": "Shell",
"bytes": "134587"
},
{
"name": "TypeScript",
"bytes": "20233"
}
],
"symlink_target": ""
} |
<?php
/**
* Exception for 416 Requested Range Not Satisfiable responses
*
* @package Requests
*/
/**
* Exception for 416 Requested Range Not Satisfiable responses
*
* @package Requests
*/
class Requests_Exception_HTTP_416 extends Requests_Exception_HTTP {
/**
* HTTP status code
*
* @var integer
*/
protected $code = 416;
/**
* Reason phrase
*
* @var string
*/
protected $reason = 'Requested Range Not Satisfiable';
} | {
"content_hash": "e51b8131a917a961863b382ac8b4c32c",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 67,
"avg_line_length": 17.51851851851852,
"alnum_prop": 0.6300211416490487,
"repo_name": "mwendakith/eid_dashboard",
"id": "7c81443d3273acdd47e1a9342547f21a2664a5dd",
"size": "473",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "application/libraries/requests/library/Requests/Exception/HTTP/416.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "95311"
},
{
"name": "HTML",
"bytes": "8177944"
},
{
"name": "Hack",
"bytes": "758"
},
{
"name": "JavaScript",
"bytes": "5135927"
},
{
"name": "PHP",
"bytes": "6151777"
},
{
"name": "PLpgSQL",
"bytes": "47573"
},
{
"name": "SQLPL",
"bytes": "446896"
}
],
"symlink_target": ""
} |
package foodtruck.server.dashboard;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteStreams;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import foodtruck.dao.MenuDAO;
import foodtruck.dao.TruckDAO;
import foodtruck.model.Menu;
import foodtruck.model.Truck;
import foodtruck.server.CodedServletException;
import foodtruck.server.GuiceHackRequestWrapper;
import foodtruck.util.Link;
/**
* @author aviolette
* @since 8/21/16
*/
@Singleton
public class MenuServlet extends HttpServlet {
private static final Logger log = Logger.getLogger(
foodtruck.server.vendor.MenuServlet.class.getName());
private static final String JSP = "/WEB-INF/jsp/dashboard/menu.jsp";
private final MenuDAO menuDAO;
private final TruckDAO truckDAO;
@Inject
public MenuServlet(MenuDAO menuDAO, TruckDAO truckDAO) {
this.menuDAO = menuDAO;
this.truckDAO = truckDAO;
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
req = new GuiceHackRequestWrapper(req, JSP);
req.setAttribute("tab", "truck");
String truckId = truckId(req);
Truck truck = truckDAO.findByIdOpt(truckId)
.orElseThrow(() -> new CodedServletException(404, "Invalid truck Id: " + truckId));
req.setAttribute("endpoint", req.getRequestURI());
req.setAttribute("menu", menuDAO.findByTruck(truckId));
req.setAttribute("truck", truck);
req.setAttribute("breadcrumbs", ImmutableList.of(new Link("Trucks", "/admin/trucks"),
new Link(truck.getName(), "/admin/trucks/" + truckId),
new Link("Edit", "/admin/trucks/" + truckId + "/configuration")));
req.getRequestDispatcher(JSP)
.forward(req, resp);
}
private String truckId(HttpServletRequest req) {
final String requestURI = req.getRequestURI();
String truckId = requestURI.substring(14);
truckId = truckId.substring(0, truckId.length() - 5);
return truckId;
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
try {
JSONObject jsonPayload = new JSONObject(
new String(ByteStreams.toByteArray(req.getInputStream())));
String truckId = truckId(req);
Menu menu = Menu.builder(menuDAO.findByTruck(truckId))
.payload(jsonPayload.toString())
.truckId(truckId)
.build();
menuDAO.save(menu);
} catch (JSONException je) {
log.log(Level.SEVERE, je.getMessage(), je);
resp.sendError(400);
}
}
}
| {
"content_hash": "6bc1977c36ce13a4238e2f8bd5eac267",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 91,
"avg_line_length": 32.582417582417584,
"alnum_prop": 0.7254637436762226,
"repo_name": "aviolette/foodtrucklocator",
"id": "962a5e50d443ea9af63dc9222f3f77cf03059f69",
"size": "2965",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main/src/main/java/foodtruck/server/dashboard/MenuServlet.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25724"
},
{
"name": "HTML",
"bytes": "2330"
},
{
"name": "Java",
"bytes": "1463864"
},
{
"name": "JavaScript",
"bytes": "212726"
},
{
"name": "Shell",
"bytes": "1177"
}
],
"symlink_target": ""
} |
package com.hbsx.purordermanage.utils;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.helper.ItemTouchHelper;
import android.util.Log;
import android.widget.Toast;
/**
* Created by Administrator on 2017/1/19 0019.
*/
public class SimpleItemTouchHelperCallback extends ItemTouchHelper.Callback {
private onMoveAndSwipedListener swipedListener;
public SimpleItemTouchHelperCallback(onMoveAndSwipedListener listener){
swipedListener = listener;
}
@Override
public int getMovementFlags(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder) {
//如果是ListView样式的RecyclerView
if (recyclerView.getLayoutManager() instanceof LinearLayoutManager){
//不支持拖拽
final int dragFlags = 0;
//设置侧滑方向为从左到右和从右到左都可以
final int swipeFlags = ItemTouchHelper.START;
//将方向参数设置进去
return makeMovementFlags(dragFlags,swipeFlags);
}else{//如果是GridView样式的RecyclerView
//设置拖拽方向为上下左右
final int dragFlags = ItemTouchHelper.UP|ItemTouchHelper.DOWN|
ItemTouchHelper.LEFT|ItemTouchHelper.RIGHT;
//不支持侧滑
final int swipeFlags = 0;
return makeMovementFlags(dragFlags,swipeFlags);
}
}
@Override
public boolean onMove(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder, RecyclerView.ViewHolder target) {
return false;
}
@Override
public void onSwiped(RecyclerView.ViewHolder viewHolder, int direction) {
swipedListener.onItemDismiss(viewHolder.getAdapterPosition());
}
/**
* 当用户拖拽完或者侧滑一个item时回调此方法
* @param viewHolder
* @param actionState
*/
@Override
public void onSelectedChanged(RecyclerView.ViewHolder viewHolder, int actionState) {
//当前状态不是idel(空闲)状态时,说明当前正在拖拽或者侧滑
if(actionState != ItemTouchHelper.ACTION_STATE_IDLE){
if(viewHolder instanceof onStateChangedListener){
((onStateChangedListener) viewHolder).onItemSelected();
}
}
super.onSelectedChanged(viewHolder, actionState);
}
/**
* 当用户拖拽完或者侧滑完一个item时回调此方法,用来清除施加在item上的一些状态
* @param recyclerView
* @param viewHolder
*/
@Override
public void clearView(RecyclerView recyclerView, RecyclerView.ViewHolder viewHolder) {
super.clearView(recyclerView, viewHolder);
if(viewHolder instanceof onStateChangedListener){
((onStateChangedListener) viewHolder).onItemClear();
}
}
}
| {
"content_hash": "8dfb44e77b4bbb5ae8bfd0ce883f1ab8",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 122,
"avg_line_length": 33.36708860759494,
"alnum_prop": 0.6889226100151745,
"repo_name": "huochexia/purchasemanager",
"id": "e7832429b0e7b3a59da6acfbe6d44497d5eb18fb",
"size": "2912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/hbsx/purordermanage/utils/SimpleItemTouchHelperCallback.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "462950"
}
],
"symlink_target": ""
} |
<template name="NewTenant">
<!-- Top menu -->
<nav class="navbar navbar-inverse" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#top-navbar-1">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="index.html">Bulkpay Business Registration</a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="top-navbar-1">
<ul class="nav navbar-nav navbar-right">
<li>
<span class="li-text">
Bulkpay your reliable payment and solution
</span>
<a href="#"><strong>links</strong></a>
<span class="li-text">
Follow us on Social Media:
</span>
<span class="li-social">
<a href="#"><i class="fa fa-facebook"></i></a>
<a href="#"><i class="fa fa-twitter"></i></a>
<a href="#"><i class="fa fa-envelope"></i></a>
<a href="#"><i class="fa fa-skype"></i></a>
</span>
</li>
</ul>
</div>
</div>
</nav>
<!-- Top content -->
<div class="top-content">
<div class="inner-bg">
<div class="container">
<div class="row center">
<div class="col-sm-8 col-sm-offset-2 text">
<h1><strong>Business</strong> Registration </h1>
<div class="description">
<p>
Register your business or enterprise on our platform in few steps
</p>
</div>
</div>
</div>
<div class="row">
<div class="col-sm-6 col-sm-offset-3 form-box">
<form role="form" class="registration-form">
<fieldset>
<div class="form-top">
<div class="form-top-left">
<h3>Step 1 / 3</h3>
<p>Tell us who you are:</p>
</div>
<div class="form-top-right">
<i class="fa fa-user"></i>
</div>
</div>
<div class="form-bottom">
<div class="form-group">
<label class="sr-only" for="business-name">Business name</label>
<input type="text" name="business-name" placeholder="Business Name" class="form-first-name form-control" id="form-first-name">
</div>
<div class="form-group">
<label class="sr-only" for="industry">Industry</label>
<input type="text" name="industry" placeholder="Industry" class="form-last-name form-control" id="form-last-name">
</div>
<div class="form-group">
<label class="sr-only" for="address">Address</label>
<textarea name="address" placeholder="Address"
class="form-about-yourself form-control" id="address"></textarea>
</div>
<button type="button" class="btn btn-next">Next</button>
</div>
</fieldset>
<fieldset>
<div class="form-top">
<div class="form-top-left">
<h3>Step 2 / 3</h3>
<p>Set up root account:</p>
</div>
<div class="form-top-right">
<i class="fa fa-key"></i>
</div>
</div>
<div class="form-bottom">
<div class="form-group">
<label class="sr-only" for="form-email">Email</label>
<input type="text" name="form-email" placeholder="Email..." class="form-email form-control" id="form-email">
</div>
<div class="form-group">
<label class="sr-only" for="form-password">Password</label>
<input type="password" name="form-password" placeholder="Password..." class="form-password form-control" id="form-password">
</div>
<div class="form-group">
<label class="sr-only" for="form-repeat-password">Repeat password</label>
<input type="password" name="form-repeat-password" placeholder="Repeat password..."
class="form-repeat-password form-control" id="form-repeat-password">
</div>
<button type="button" class="btn btn-previous">Previous</button>
<button type="button" class="btn btn-next">Next</button>
</div>
</fieldset>
<fieldset>
<div class="form-top">
<div class="form-top-left">
<h3>Step 3 / 3</h3>
<p>Setup Additional Details:</p>
</div>
<div class="form-top-right">
<i class="fa fa-home"></i>
</div>
</div>
<div class="form-bottom">
<div class="form-group">
<label class="sr-only" for="country">country</label>
<select class="form-control selectpicker" data-live-search="true" name="country">
{{#each countries}}
<option value="{{ccode}}">{{cname}}</option>
{{/each}}
</select>
</div>
<div class="form-group">
<label class="sr-only" for="currency">currency</label>
<select class="form-control selectpicker" data-live-search="true" name="currency">
{{#each currencies}}
<option value="{{this}}">{{this}}</option>
{{/each}}
</select>
</div>
<div class="form-group">
<label class="sr-only" for="domain">Phone Number</label>
<input type="text" name="phone-number" placeholder="Enter Default Phone number" class="form-control" id="country">
</div>
<button type="button" class="btn btn-previous">Previous</button>
<button type="submit" class="btn" id="signup"> Sign me up!</button>
</div>
</fieldset>
</form>
<a href="{{pathFor 'home'}}" class="pull-right"><<login</a>
</div>
</div>
</div>
</div>
</div>
</template>
| {
"content_hash": "68113f4be31dda4b350fc5375171ba4a",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 166,
"avg_line_length": 55.3375,
"alnum_prop": 0.3607409080641518,
"repo_name": "c2gconsulting/bp-core",
"id": "a0f0d01d4cc152084dbbab37506b2382e80a747c",
"size": "8854",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "main/app/client/templates/tenants/registration/tenants.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1288615"
},
{
"name": "CoffeeScript",
"bytes": "35038"
},
{
"name": "Dockerfile",
"bytes": "4717"
},
{
"name": "HTML",
"bytes": "2132876"
},
{
"name": "JavaScript",
"bytes": "27306597"
},
{
"name": "Less",
"bytes": "17754"
},
{
"name": "Makefile",
"bytes": "502"
},
{
"name": "PowerShell",
"bytes": "471"
},
{
"name": "Ruby",
"bytes": "127"
},
{
"name": "SCSS",
"bytes": "39676"
},
{
"name": "Sass",
"bytes": "19972"
},
{
"name": "Shell",
"bytes": "13078"
},
{
"name": "TypeScript",
"bytes": "53771"
}
],
"symlink_target": ""
} |
{-# LANGUAGE OverloadedStrings #-}
module Euler.Pipeline where
import Data.ByteString (ByteString)
import Euler.Assets (publishAssets)
import Euler.Chintz (expandElements, getDependencies', elementMustachePath)
import Euler.Control.Monad.Extra
import Euler.Manifest (publishManifest)
import Euler.Parser
import Euler.Component hiding (name)
build :: ByteString -> IO [Component]
build input = do
case parseConfiguration input of
Left err -> error $ "Invalid Configuration: " ++ err
Right config -> mapM processComponent (componentNames config)
componentNames :: Configuration -> [String]
componentNames = (map name) . components
processComponent :: String -> IO Component
processComponent component = do
let componentsPath = "components"
expandedElements <- uniqConcatMapM (expandElements componentsPath) [component]
template <- elementMustachePath componentsPath component
let getDeps = getDependencies' componentsPath expandedElements
jsDeps <- getDeps "js"
cssDeps <- getDeps "css"
jsAssets <- publishAssets componentsPath jsDeps
cssAssets <- publishAssets componentsPath cssDeps
let assets = [("js", jsAssets), ("css", cssAssets)]
manifest <- publishManifest component template assets
return $ Component component template assets manifest
| {
"content_hash": "9b11199707402151fd3cea05c32743b8",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 82,
"avg_line_length": 30.045454545454547,
"alnum_prop": 0.7549167927382754,
"repo_name": "wildlyinaccurate/euler",
"id": "1600681e8100af2bffb246293f3bf94c3a58b8f5",
"size": "1322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Euler/Pipeline.hs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "290"
},
{
"name": "HTML",
"bytes": "682"
},
{
"name": "Haskell",
"bytes": "7286"
},
{
"name": "JavaScript",
"bytes": "427"
},
{
"name": "Makefile",
"bytes": "479"
}
],
"symlink_target": ""
} |
"""
Testing for pipeline_grid_search module.
"""
from __future__ import print_function
from __future__ import division
import time
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin, ClassifierMixin
from sklearn.cross_validation import StratifiedKFold
from sklearn.datasets import make_classification
from sklearn.decomposition import PCA
from sklearn.grid_search import GridSearchCV
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.preprocessing import Normalizer
from sklearn.svm import SVC
from nose.tools import assert_equal
from pipeline_grid_search import PipelineGridSearchCV
# Globals for counting estimator calls
n_transform_calls = 0
n_fit_calls = 0
# http://stackoverflow.com/a/27005560/4963543
def make_init_body(classname,parameters):
# Calling super does not work for some reason,
# but it does not matter in this case, since
# BaseEstimator, TransformerMixin and ClassifierMixin have an empty __init__ function.
#body = " super({}, self).__init__()".format(classname)
body = " pass"
body += ''.join('\n self.{}={}'.format(key,key) for key,_ in parameters)
func_str = " def __init__(self{}):\n{}".format(''.join(', {}={}'.format(key,val) for key,val in parameters), body)
return func_str
def create_mock_estimator(classname,parameters,is_classifier=False):
# parameters is a list of (key,val) pairs.
init_body = make_init_body(classname,parameters)
main_body = """
def fit(self, X, y=None):
global n_fit_calls
n_fit_calls += 1
return self
"""
if is_classifier:
bases = "(BaseEstimator, TransformerMixin, ClassifierMixin)"
main_body += """
def predict(self, X):
return np.arange(X.shape[0])
"""
else:
bases = "(BaseEstimator, TransformerMixin)"
main_body += """
def transform(self, X):
global n_transform_calls
n_transform_calls += 1
odd = False
for k,v in self.get_params().items():
if odd:
X = X*v
else:
X = X-v
odd = not odd
return X
"""
body = "class {}{}:\n{}\n{}".format(classname,bases,init_body,main_body)
print(body)
exec(body)
newclassobj = locals()[classname]()
return newclassobj
def create_mock_classifier(classname,parameters):
return create_mock_estimator(classname,parameters,is_classifier=True)
def nfits(nparams):
# calcs the number of optimal calls to fit when following DFS order
# (the number of nodes in the pipeline tree minus one)
s = 1
for c in reversed(nparams):
s = 1+c*s if c>1 else s+1
return s-1
def calc_n_ideal_fit_calls(parts, cv_params, n_folds):
pipe_length = len(parts)
nparams = []
for p in parts:
param_count = 1
for (name,vals) in cv_params:
est_name,_ = name.split("__",1)
if est_name == p.__class__.__name__:
param_count *= len(vals)
nparams.append(param_count)
print(nparams)
n_ideal_calls = nfits(nparams)
n_ideal_calls *= n_folds # We repeat the above number of fit calls for each fold
n_ideal_calls += pipe_length # plus the fits for fitting on the whole X last
return n_ideal_calls
def calc_n_ideal_transform_calls(parts, cv_params, n_folds):
pipe_length = len(parts)
nparams = []
for p in parts[:-1]: # Do not include the last part of the pipeline; it is a classifier (without transform)
param_count = 1
for (name,vals) in cv_params:
est_name,_ = name.split("__",1)
if est_name == p.__class__.__name__:
param_count *= len(vals)
nparams.append(param_count)
n_ideal_calls = nfits(nparams)
n_ideal_calls *= n_folds*2 # We repeat the above number of fit calls for each fold (and for both the train and development set)
n_ideal_calls += pipe_length-1 # plus the fits for fitting on the whole X last (minus the classifier at the end)
return n_ideal_calls
def test_pipeline_grid_search1():
# The that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_estimator("f0",[]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
create_mock_classifier("f6",[("c",0)]),
]
cv_params = [
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
('f6__c', [10,20,30,40]),
]
perform_pipeline_case(parts, cv_params)
def test_pipeline_grid_search2():
# The that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_estimator("f0",[]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
create_mock_estimator("f40",[]),
create_mock_estimator("f50",[]),
create_mock_estimator("f41",[]),
create_mock_estimator("f51",[]),
create_mock_estimator("f42",[]),
create_mock_estimator("f52",[]),
create_mock_classifier("f6",[("c",0)]),
]
cv_params = [
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
('f6__c', [10,20,30,40]),
]
perform_pipeline_case(parts, cv_params)
def test_pipeline_grid_search3():
# The that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_classifier("f1", [("p1",0)]),
]
cv_params = [
('f1__p1', [10,20]),
]
perform_pipeline_case(parts, cv_params)
def test_pipeline_grid_search4():
# The that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_classifier("f1", []),
]
cv_params = [
]
perform_pipeline_case(parts, cv_params)
def test_pipeline_grid_search5():
# The that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_estimator("f0",[]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
create_mock_estimator("f6",[]),
create_mock_estimator("f7",[]),
create_mock_estimator("f8",[]),
create_mock_estimator("f9",[]),
create_mock_estimator("f10",[]),
create_mock_classifier("f11",[]),
]
cv_params = [
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
]
perform_pipeline_case(parts, cv_params)
def test_pipeline_grid_search6():
# Test that the number of estimator calls is less than the ones for regular GridSearchCV
parts = [
create_mock_estimator("f0",[]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
SVC()
]
cv_params = [
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False)
def test_pipeline_grid_search7():
# Test that _DFSGridSearchCVPipeline gives the same selected parameters as the normal GridSearchCV
parts = [
PCA(),
Normalizer(),
SVC()
]
cv_params = [
('PCA__n_components', [3,5,7]),
('Normalizer__norm', ['l2']),
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False)
def test_pipeline_grid_search8():
# Test using a FeatureUnion with embedded Pipelines.
parts = [
create_mock_estimator("f0",[]),
FeatureUnion([
('feat1', Pipeline([
('f11', create_mock_estimator("f11", [("p1",0),("p2",2)])),
])),
('feat2', Pipeline([
('f12', create_mock_estimator("f12", [("a",0)])),
])),
]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
create_mock_classifier("f11",[]),
]
cv_params = [
('FeatureUnion__feat1__f11__p1', [10,20]),
('FeatureUnion__feat2__f12__a', [10,20,30]),
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False)
# TODO: Update assert_n_calls_equal logic to work correctly with pipelines embedded in FeatureUnions.
def test_pipeline_grid_search9():
# Test using a FeatureUnion with embedded Pipelines.
parts = [
create_mock_estimator("f0",[]),
FeatureUnion([
('feat1', Pipeline([
('f11', create_mock_estimator("f11", [("p1",0),("p2",2)])),
('f111', create_mock_estimator("f111", [("p1",0),("p2",2)])),
('f112', create_mock_estimator("f112", [("p1",0),("p2",2)])),
])),
('feat2', Pipeline([
('f12', create_mock_estimator("f12", [("a",0)])),
('f121', create_mock_estimator("f121", [("a",0)])),
('f122', create_mock_estimator("f122", [("a",0)])),
])),
]),
create_mock_estimator("f1", [("p1",0),("p2",2)]),
create_mock_estimator("f2",[]),
create_mock_estimator("f3",[("c",0),("d",0)]),
create_mock_estimator("f4",[]),
create_mock_estimator("f5",[]),
create_mock_classifier("f11",[]),
]
cv_params = [
('FeatureUnion__feat1__f11__p1', [10,20]),
#('FeatureUnion__feat1__f111__p1', [10,20]),
('FeatureUnion__feat1__f112__p1', [10,20]),
#('FeatureUnion__feat2__f12__a', [10,20,30]),
#('FeatureUnion__feat2__f121__a', [10,20,30]),
('FeatureUnion__feat2__f122__a', [10,20,30]),
('f1__p1', [10,20]),
('f3__c', [10,20,30]),
('f3__d', [10,20,30,40]),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search10():
# Test if _DFSGridSearchCVPipeline works with submerged pipelines.
parts = [
create_mock_estimator("f0",[]),
FeatureUnion([
('feat1', Pipeline([
('f11', create_mock_estimator("f11", [("p1",0),("p2",2)])),
('f111', create_mock_estimator("f111", [("p1",0),("p2",2)])),
('f112', create_mock_estimator("f112", [("p1",0),("p2",2)])),
])),
('feat2', Pipeline([
('f12', create_mock_estimator("f12", [("a",0)])),
('f121', create_mock_estimator("f121", [("a",0)])),
('f122', create_mock_estimator("f122", [("a",0)])),
])),
]),
PCA(),
Normalizer(),
SVC(),
]
cv_params = [
('FeatureUnion__feat1__f11__p1', [10,20]),
#('FeatureUnion__feat1__f111__p1', [10,20]),
('FeatureUnion__feat1__f112__p1', [10,20]),
#('FeatureUnion__feat2__f12__a', [10,20,30]),
#('FeatureUnion__feat2__f121__a', [10,20,30]),
('FeatureUnion__feat2__f122__a', [10,20,30]),
('PCA__n_components', [3,5,7]),
('Normalizer__norm', ['l2']),
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='dfs', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search11():
# Test if _CacheGridSearchCVPipeline works with submerged pipelines.
parts = [
create_mock_estimator("f0",[]),
FeatureUnion([
('feat1', Pipeline([
('f11', create_mock_estimator("f11", [("p1",0),("p2",2)])),
('f111', create_mock_estimator("f111", [("p1",0),("p2",2)])),
('f112', create_mock_estimator("f112", [("p1",0),("p2",2)])),
])),
('feat2', Pipeline([
('f12', create_mock_estimator("f12", [("a",0)])),
('f121', create_mock_estimator("f121", [("a",0)])),
('f122', create_mock_estimator("f122", [("a",0)])),
])),
]),
PCA(),
Normalizer(),
SVC(),
]
cv_params = [
('FeatureUnion__feat1__f11__p1', [10,20]),
#('FeatureUnion__feat1__f111__p1', [10,20]),
('FeatureUnion__feat1__f112__p1', [10,20]),
#('FeatureUnion__feat2__f12__a', [10,20,30]),
#('FeatureUnion__feat2__f121__a', [10,20,30]),
('FeatureUnion__feat2__f122__a', [10,20,30]),
('PCA__n_components', [3,5,7]),
('Normalizer__norm', ['l2']),
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search12():
# Test that _DFSGridSearchCVPipeline gives the same selected parameters as the normal GridSearchCV
parts = [
PCA(),
Normalizer(),
SVC()
]
cv_params = [
('PCA__n_components', [3,5,7]),
('Normalizer__norm', ['l1','l2']),
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search13():
# Test that _DFSGridSearchCVPipeline gives the same selected parameters as the normal GridSearchCV
parts = [
SVC()
]
cv_params = [
('SVC__C', [1.,10.,100.,1000.]),
('SVC__kernel', ['linear']),
]
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search14():
# Test that _DFSGridSearchCVPipeline gives the same selected parameters as the normal GridSearchCV
parts = [
PCA(),
Normalizer(),
SVC()
]
cv_params = [
('PCA__n_components', [3,5]),
('Normalizer__norm', ['l2']),
('SVC__C', [1.,10.]),
('SVC__kernel', ['linear']),
]
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def test_pipeline_grid_search15():
# Test if _CacheGridSearchCVPipeline works with submerged pipelines.
parts = [
create_mock_estimator("f0",[("p1",0)]),
FeatureUnion([
('feat1', Pipeline([
('f11', create_mock_estimator("f11", [("p1",0)])),
('f12', create_mock_estimator("f12", [("p1",0)])),
])),
('feat2', Pipeline([
('f21', create_mock_estimator("f21", [("p1",0)])),
('f22', create_mock_estimator("f22", [("p1",0)])),
])),
]),
PCA(),
Normalizer(),
SVC(),
]
cv_params = [
('f0__p1', [10,20]),
('FeatureUnion__feat1__f11__p1', [30,40]),
('FeatureUnion__feat1__f12__p1', [50,60]),
('FeatureUnion__feat2__f21__p1', [100,200,300]),
('FeatureUnion__feat2__f22__p1', [400,500,600]),
('PCA__n_components', [3,5]),
('Normalizer__norm', ['l2']),
('SVC__C', [1.,10.]),
('SVC__kernel', ['linear']),
]
# Set assert_n_calls_equal to False, as we need to implement our custom counting of function calls in order to measure the call tests.
perform_pipeline_case(parts, cv_params, assert_n_calls_equal=False, mode='file', cachedir='file_cache', datasetname='make_class')
def perform_pipeline_case(parts, cv_params, assert_n_calls_equal=True, **pipelinegridsearchcv_kwargs):
# tests a particular pipe and cv_params combination
pipe = Pipeline([ (p.__class__.__name__, p) for p in parts ])
print(pipe)
X, y = make_classification(n_samples=100, n_features=20)
n_folds = 5
n_jobs = 1
verbose = 1
random_seed = 0
# mock.MagicMock cannot be used since GridSearchCV resets each estimator using
# clone() before each call to fit.
# So, let's use global variables instead that we increment in our mock
# estimators.
global n_transform_calls, n_fit_calls
# Start PipelineGridSearchCV test here
n_transform_calls = 0
n_fit_calls = 0
ideal_cv_time = time.time()
model = PipelineGridSearchCV(pipe, dict(cv_params), cv=StratifiedKFold(y, n_folds, random_state=random_seed), verbose=verbose, n_jobs=n_jobs, **pipelinegridsearchcv_kwargs)
model.fit(X,y)
ideal_cv_time = time.time() - ideal_cv_time
print("model.best_estimator_: {}".format(model.best_estimator_))
print("Counts (PipelineGridSearchCV)")
print("n_fit_calls:",n_fit_calls)
print("n_transform_calls:",n_transform_calls)
print("time to do grid search:",ideal_cv_time)
n_ideal_fit_calls = calc_n_ideal_fit_calls(parts,cv_params,n_folds)
n_ideal_transform_calls = calc_n_ideal_transform_calls(parts,cv_params,n_folds)
if assert_n_calls_equal:
# Make sure that PipelineGridSearchCV only called fit the optimal number of times.
assert_equal(n_fit_calls, n_ideal_fit_calls)
assert_equal(n_transform_calls, n_ideal_transform_calls)
# Start GridSearchCV test here
n_transform_calls = 0
n_fit_calls = 0
naive_cv_time = time.time()
model_naive = GridSearchCV(pipe, dict(cv_params), cv=StratifiedKFold(y, n_folds, random_state=random_seed), verbose=verbose, n_jobs=n_jobs)
model_naive.fit(X,y)
naive_cv_time = time.time() - naive_cv_time
print("Counts (GridSearchCV)")
print("n_fit_calls:",n_fit_calls)
print("n_transform_calls:",n_transform_calls)
print("time to do grid search:",naive_cv_time)
n_param_combs = np.prod(map(lambda x: len(x[1]), cv_params))
n_naive_fit_calls = n_param_combs * len(parts) * n_folds + len(parts)
n_naive_transform_calls = n_param_combs * (len(parts)-1) * n_folds * 2 + (len(parts)-1) # The 2 is for running on both the train and dev. set
if assert_n_calls_equal:
assert_equal(n_fit_calls, n_naive_fit_calls)
assert_equal(n_transform_calls, n_naive_transform_calls)
# Make sure that PipelineGridSearchCV and GridSearchCV return the same result.
print("[pipeline_grid_search] best_params_:",model.best_params_)
print("[pipeline_grid_search] best_score_:",model.best_score_)
print("[naive_grid_search] best_params_:",model_naive.best_params_)
print("[naive_grid_search] best_score_:",model_naive.best_score_)
assert_equal(model_naive.best_score_, model.best_score_)
# Note that for equal mean_validation_score, the best params of GridSearchCV will depend
# on the order that they occur to the classifier, so sometimes this test fails even though
# PipelineGridSearchCV behaves correctly.
assert_equal(model_naive.best_params_, model.best_params_)
| {
"content_hash": "f506edad0c8435d84382c80f8a6b272c",
"timestamp": "",
"source": "github",
"line_count": 556,
"max_line_length": 176,
"avg_line_length": 37.17625899280576,
"alnum_prop": 0.5707789066279633,
"repo_name": "tkerola/pipeline_grid_search",
"id": "08f99bf099b509bb037360e36725eb363dadd17f",
"size": "20670",
"binary": false,
"copies": "1",
"ref": "refs/heads/dfs_based",
"path": "tests/test_pipeline_grid_search.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "105"
},
{
"name": "Python",
"bytes": "66513"
}
],
"symlink_target": ""
} |
describe("Ext.form.CheckboxGroup", function() {
var component;
function makeComponent(config) {
config = config || {};
component = new Ext.form.CheckboxGroup(config);
}
afterEach(function() {
if (component) {
component.destroy();
}
component = null;
});
describe("initial value", function() {
it("should set its originalValue to the aggregated value of its sub-checkboxes", function() {
makeComponent({
items: [
{name: 'one', checked: true},
{name: 'two', checked: true, inputValue: 'two-1'},
{name: 'two', checked: false, inputValue: 'two-2'},
{name: 'two', checked: true, inputValue: 'two-3'}
]
});
expect(component.originalValue).toEqual({one:'on', two: ['two-1', 'two-3']});
});
it("should set the values of its sub-checkboxes if the value config is specified", function() {
makeComponent({
items: [
{name: 'one', checked: true},
{name: 'two', checked: true, inputValue: 'two-1'},
{name: 'two', checked: false, inputValue: 'two-2'},
{name: 'two', checked: true, inputValue: 'two-3'}
],
value: {two: ['two-1', 'two-2']}
});
expect(component.originalValue).toEqual({two: ['two-1', 'two-2']});
expect(component.items.getAt(0).getValue()).toBe(false);
expect(component.items.getAt(1).getValue()).toBe(true);
expect(component.items.getAt(2).getValue()).toBe(true);
expect(component.items.getAt(3).getValue()).toBe(false);
});
});
describe("sizing", function() {
it("should respect a configured height", function() {
makeComponent({
renderTo: Ext.getBody(),
height: 100,
width: 300,
vertical: true,
columns: 2,
scrollable: 'y',
items: (function() {
var checkboxes = [],
i;
for (i = 0; i < 50; ++i) {
checkboxes.push({
xtype: 'checkbox'
});
}
return checkboxes;
})()
});
expect(component.getHeight()).toBe(100);
});
})
it("should fire the change event when a sub-checkbox is changed", function() {
makeComponent({
items: [{name: 'foo', checked: true}]
});
var spy = jasmine.createSpy();
component.on('change', spy);
component.items.getAt(0).setValue(false);
expect(spy.calls[0].args).toEqual([component, {}, {foo:'on'}]);
component.items.getAt(0).setValue(true);
expect(spy.calls[1].args).toEqual([component, {foo:'on'}, {}]);
});
describe("getValue", function() {
it("should return an object with keys matching the names of checked items", function() {
makeComponent({
items: [{name: 'one', checked: true}, {name: 'two'}]
});
var val = component.getValue();
expect(val.one).toBeDefined();
expect(val.two).not.toBeDefined();
});
it("should give the inputValue of a single checked item with a given name", function() {
makeComponent({
items: [{name: 'one', checked: true, inputValue: 'foo'}, {name: 'two'}]
});
expect(component.getValue().one).toEqual('foo');
});
it("should give an array of inputValues of multiple checked items with the same name", function() {
makeComponent({
items: [{name: 'one', checked: true, inputValue: '1'}, {name: 'one', checked: true, inputValue: '2'}, {name: 'one'}]
});
expect(component.getValue().one).toEqual(['1', '2']);
});
});
describe("getSubmitData", function() {
it("should return null", function() {
makeComponent({
value: {foo: true},
items: [{name: 'foo', inputValue: 'bar'}]
});
expect(component.getSubmitData()).toBeNull();
});
});
describe("getModelData", function() {
it("should return null", function() {
makeComponent({
value: {foo: true},
items: [{name: 'foo', inputValue: 'bar'}]
});
expect(component.getModelData()).toBeNull();
});
});
describe("reset", function() {
it("should reset each checkbox to its initial checked state", function() {
makeComponent({
items: [{name: 'one', checked: true}, {name: 'two'}, {name: 'three', checked: true}]
});
component.setValue({one: false, two: true});
component.reset();
expect(component.items.getAt(0).getValue()).toBe(true);
expect(component.items.getAt(1).getValue()).toBe(false);
expect(component.items.getAt(2).getValue()).toBe(true);
});
});
describe("allowBlank = false", function() {
it("should return a validation error when no sub-checkboxes are checked", function() {
makeComponent({
allowBlank: false,
items: [{name: 'one'}]
});
expect(component.isValid()).toBe(false);
});
it("should not return an error when a sub-checkbox is checked", function() {
makeComponent({
allowBlank: false,
items: [{name: 'one', checked: true}]
});
expect(component.isValid()).toBe(true);
});
it("should fire the validitychange event with true when checking a box previously undefined", function(){
makeComponent({
allowBlank: false,
items: [{name: 'one'}]
});
var isValid;
component.on('validitychange', function(field, validState){
isValid = validState;
});
component.setValue({
one: true
});
expect(isValid).toBe(true);
});
it("should fire the validitychange event with true when unchecking a box", function(){
makeComponent({
allowBlank: false,
items: [{name: 'one', checked: true}]
});
var isValid;
component.on('validitychange', function(field, validState){
isValid = validState;
});
component.setValue({
one: false
});
expect(isValid).toBe(false);
});
});
describe("setValue", function() {
describe("with a view model", function() {
it("should be able to set the value with inline data", function() {
var vm = new Ext.app.ViewModel({
data: {
theValue: {
foo: true,
baz: true
}
}
});
makeComponent({
renderTo: Ext.getBody(),
items: [{
name: 'foo'
}, {
name: 'bar'
}, {
name: 'baz'
}],
viewModel: vm,
bind: {
value: '{theValue}'
}
});
vm.notify();
expect(component.getValue()).toEqual({
foo: 'on',
baz: 'on'
});
});
it("should be able to set the value with a defined viewmodel", function() {
Ext.define('spec.Bar', {
extend: 'Ext.app.ViewModel',
alias: 'viewmodel.bar',
data: {
theValue: {
foo: true,
baz: true
}
}
});
makeComponent({
renderTo: Ext.getBody(),
items: [{
name: 'foo'
}, {
name: 'bar'
}, {
name: 'baz'
}],
viewModel: {
type: 'bar'
},
bind: {
value: '{theValue}'
}
});
component.getViewModel().notify();
expect(component.getValue()).toEqual({
foo: 'on',
baz: 'on'
});
Ext.undefine('spec.Bar');
Ext.Factory.viewModel.instance.clearCache();
});
});
});
}); | {
"content_hash": "2ce18bfb3f948994d7781a6728b3fe0b",
"timestamp": "",
"source": "github",
"line_count": 267,
"max_line_length": 132,
"avg_line_length": 34.77153558052434,
"alnum_prop": 0.4328953037483843,
"repo_name": "erick-christian/hwt-backend",
"id": "016ff95a01aeeac92cffc22c6916418feddbc47a",
"size": "9284",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gestion/recurso/framework/ext-5.1.3/test/specs/form/CheckboxGroup.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "8940"
},
{
"name": "ApacheConf",
"bytes": "14"
},
{
"name": "CSS",
"bytes": "75609178"
},
{
"name": "HTML",
"bytes": "4905468"
},
{
"name": "JavaScript",
"bytes": "67293340"
},
{
"name": "PHP",
"bytes": "19336265"
},
{
"name": "Python",
"bytes": "38400"
},
{
"name": "Ruby",
"bytes": "15395"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "6e026f6be77c4d1ab8417a3f04ae6725",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "92ec0a98f2b964d1c91f3c231283cb9e6a5c659c",
"size": "179",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Urticaceae/Gesnouinia/Gesnouinia boehmerioides/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:util="http://www.springframework.org/schema/util"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:c="http://www.springframework.org/schema/c"
xmlns:mvc="http://www.springframework.org/schema/mvc"
xmlns="http://www.springframework.org/schema/beans"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd
http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc.xsd
http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util.xsd">
</beans>
| {
"content_hash": "7b9738180ef7d7f57e4db22e20b044f3",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 131,
"avg_line_length": 67.57142857142857,
"alnum_prop": 0.7251585623678647,
"repo_name": "moghaddam/cas",
"id": "a3809b8ba8087bcad800a513986282c01fb06a43",
"size": "946",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "cas-server-webapp-reports/src/main/resources/META-INF/spring/reports-web-ctx-config.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "185031"
},
{
"name": "HTML",
"bytes": "5805"
},
{
"name": "Java",
"bytes": "2809589"
},
{
"name": "JavaScript",
"bytes": "41487"
},
{
"name": "Shell",
"bytes": "4061"
}
],
"symlink_target": ""
} |
"""
async requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
"""
import logging
__title__ = 'requests-futures'
__version__ = '0.9.7'
__build__ = 0x000000
__author__ = 'Ross McFarland'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Ross McFarland'
# Set default logging handler to avoid "No handler found" warnings.
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| {
"content_hash": "1fd0f36f4dec862f543a97fe32f537b5",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 67,
"avg_line_length": 21.8,
"alnum_prop": 0.6422018348623854,
"repo_name": "eenchev/idea-note-taking-app",
"id": "9ac9cd31585cf5bded7298b92a726cbed1572432",
"size": "590",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/requests_futures/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "460"
},
{
"name": "Python",
"bytes": "1214"
}
],
"symlink_target": ""
} |
/* ========cloud-zoom============*/
/* This is the moving lens square underneath the mouse pointer. */
.cloud-zoom-lens {
border: 1px solid #888 !important;
/* Set this to minus the border thickness. */
background-color:#f0f0f0;
cursor:move;
}
/* This is for the title text. */
.cloud-zoom-title {
position:absolute !important;
background-color:#888;
color:#fff;
font-family:'Ledger',Arial,Helvetica;
padding:3px;
width:100%;
text-align:center;
font-weight:bold;
font-size:10px;
top:0px;
}
/* This is the zoom window. */
.cloud-zoom-big {
border:1px solid #EBEBEB !important;
overflow:hidden;
margin-left:15px;
box-shadow: 1px 1px 2px 0 rgba(206, 206, 206, 0.4);
-webkit-box-shadow: 1px 1px 2px 0 rgba(206, 206, 206, 0.4);
-moz-box-shadow: 1px 1px 2px 0 rgba(206, 206, 206, 0.4);
-o-box-shadow: 1px 1px 2px 0 rgba(206, 206, 206, 0.4);
border-radius:5px;
-webkit-border-radius:5px;
-moz-border-radius:5px;
-o-border-radius:5px;
}
/* This is the loading message. */
.cloud-zoom-loading {
color:white;
background:#222;
padding:3px;
border:1px solid #000;
}
.product-image .mousetrap {
z-index:999 !important;
}
/**
* Lightbox
*/
/* ColorBox Core Style
-------------------------------------------------------------- */
#colorbox, #cboxOverlay, #cboxWrapper{position:absolute; top:0; left:0; z-index:9999; overflow:hidden;}
#cboxOverlay{position:fixed; width:100%; height:100%;}
#cboxMiddleLeft, #cboxBottomLeft{clear:left;}
#cboxContent{position:relative;}
#cboxLoadedContent{overflow:auto;}
#cboxTitle{margin:0;}
#cboxLoadingOverlay, #cboxLoadingGraphic{position:absolute; top:0; left:0; width:100%; height:100%;}
#cboxPrevious, #cboxNext, #cboxClose, #cboxSlideshow{cursor:pointer;}
.cboxPhoto{float:left; margin:auto; border:0; display:block; max-width:none; }
.cboxIframe{width:100%; height:100%; display:block; border:0;}
#colorbox, #cboxContent, #cboxLoadedContent{box-sizing:content-box;}
/* ColorBox skin.
The following styles are ordered & tabbed
in a way that represents the nesting of the generated HTML.
-------------------------------------------------------------- */
#cboxOverlay{background:url(../images/megnor/overlay.png) repeat 0 0;}
#colorbox{}
#cboxContent{background:#fff; overflow:hidden; border:none;/*10px solid #333*/ padding:10px; }
.cboxIframe{background:#fff;}
#cboxError{padding:50px; border:1px solid #ccc;}
#cboxLoadedContent{margin-bottom:40px; /*10px above buttons*/ }
#cboxTitle{position:absolute; bottom:17px;text-align:center; width:100%; color:#999999;}
#cboxCurrent{position:absolute; bottom:17px; left:80px; color:#999999;}
#cboxSlideshow{position:absolute; bottom:4px; right:30px; color:#0092ef;}
#cboxPrevious,
#cboxNext { position:absolute; background:url(../images/megnor/slider-arrows.png) no-repeat #ddd; width:30px; height:30px; text-indent:-9999px; }
#cboxPrevious:hover,
#cboxNext:hover { background-color:#333; }
#cboxPrevious{bottom:10px; left:10px; background-position:-10px -10px;}
#cboxPrevious:hover{background-position:-10px -60px;}
#cboxNext{bottom:10px; left:40px; background-position:-60px -10px;}
#cboxNext:hover{background-position:-60px -60px;}
#cboxLoadingOverlay{background:url(../images/megnor/loading_background.png) no-repeat center center;}
#cboxLoadingGraphic{background:url(../images/megnor/loading.gif) no-repeat center center;}
#cboxClose{position:absolute; bottom:10px; right:10px; background:url(../images/megnor/close-button.png) -10px -10px no-repeat #ddd; width:30px; height:30px; text-indent:-9999px;}
#cboxClose:hover{background-position:-10px -60px; background-color:#333;}
#cboxNext,
#cboxPrevious,
#cboxClose {
transition: background-color 300ms ease-in-out, background-position 300ms ease-in-out;
-moz-transition: background-color 300ms ease-in-out, background-position 300ms ease-in-out;
-webkit-transition: background-color 300ms ease-in-out, background-position 300ms ease-in-out;
-o-transition: background-color 300ms ease-in-out, background-position 300ms ease-in-out;
}
.zoom-btn-small {
bottom: 0px;
color: #888888;
line-height: 2.6666em;
padding: 0 1em;
position: absolute;
right: 0px;
z-index: 100;
}
| {
"content_hash": "28262d93782888ad222b55c34c36c6d4",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 187,
"avg_line_length": 37.64957264957265,
"alnum_prop": 0.6735527809307605,
"repo_name": "nitin-soni/symfony",
"id": "d72cffa771de9170922b74184ed1f5fca7a7d84b",
"size": "4405",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Bitcoin/SiteBundle/Resources/public/css/cloud-lightbox-zoom.css",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "532176"
},
{
"name": "Go",
"bytes": "14150"
},
{
"name": "JavaScript",
"bytes": "1635106"
},
{
"name": "PHP",
"bytes": "274718"
},
{
"name": "Perl",
"bytes": "26"
},
{
"name": "Python",
"bytes": "11688"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<android.support.design.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fitsSystemWindows="true"
tools:context="io.pivotal.safenotes.MainActivity">
<android.support.design.widget.AppBarLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:theme="@style/AppTheme.AppBarOverlay">
<android.support.v7.widget.Toolbar
android:id="@+id/toolbar"
android:layout_width="match_parent"
android:layout_height="?attr/actionBarSize"
android:background="?attr/colorPrimary"
app:popupTheme="@style/AppTheme.PopupOverlay" />
</android.support.design.widget.AppBarLayout>
<include layout="@layout/content_main" />
<android.support.design.widget.FloatingActionButton
android:id="@+id/fab"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="bottom|end"
android:layout_margin="@dimen/fab_margin"
android:src="@drawable/ic_note_add_white_24dp" />
</android.support.design.widget.CoordinatorLayout>
| {
"content_hash": "97dfa0f4b09fbd43788c5b39a3ed7877",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 107,
"avg_line_length": 41.088235294117645,
"alnum_prop": 0.6893342877594846,
"repo_name": "xtreme-rafael/safenotes-android",
"id": "10c69c63826434420080628f2955ab055e7b2e84",
"size": "1397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/res/layout/activity_main.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "5941"
}
],
"symlink_target": ""
} |
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using MahApps.Metro.Controls;
using MahApps.Metro.Tests.TestHelpers;
using Xunit;
namespace MahApps.Metro.Tests
{
public class MetroWindowTest : AutomationTestBase
{
[Fact]
public async Task MetroWindowSmokeTest()
{
await TestHost.SwitchToAppThread();
await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
}
[Fact]
public async Task ShowsRightWindowCommandsOnTopByDefault()
{
await TestHost.SwitchToAppThread();
var window = new MetroWindow();
Assert.Equal(WindowCommandsOverlayBehavior.Always, window.RightWindowCommandsOverlayBehavior);
}
[Fact]
public async Task IconShouldBeVisibleByDefault()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
var icon = window.GetPart<ContentControl>("PART_Icon");
Assert.Equal(Visibility.Visible, icon.Visibility);
}
[Fact]
public async Task IconCanOverlayHiddenTitlebar()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
window.IconOverlayBehavior = WindowCommandsOverlayBehavior.HiddenTitleBar;
window.ShowTitleBar = false;
var icon = window.GetPart<ContentControl>("PART_Icon");
Assert.Equal(Visibility.Visible, icon.Visibility);
}
private Button GetButton(MetroWindow window, string buttonName)
{
var windowButtonCommands = window.GetPart<WindowButtonCommands>("PART_WindowButtonCommands");
Assert.NotNull(windowButtonCommands);
var button = windowButtonCommands.Template.FindName(buttonName, windowButtonCommands) as Button;
Assert.NotNull(button);
return button;
}
[Fact]
public async Task MinMaxCloseButtonsShouldBeVisibleByDefault()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
var minButton = GetButton(window, "PART_Min");
var maxButton = GetButton(window, "PART_Max");
var closeButton = GetButton(window, "PART_Close");
// min/max/close should be visible
Assert.True(minButton.IsVisible);
Assert.True(maxButton.IsVisible);
Assert.True(closeButton.IsVisible);
Assert.Equal(ResizeMode.CanResize, window.ResizeMode);
}
[Fact]
public async Task MinMaxButtonsShouldBeHiddenWithNoResizeMode()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
var minButton = GetButton(window, "PART_Min");
var maxButton = GetButton(window, "PART_Max");
// min/max should be visible
Assert.True(minButton.IsVisible);
Assert.True(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanResize, window.ResizeMode);
window.ResizeMode = ResizeMode.NoResize;
// min/max should be hidden
Assert.False(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.NoResize, window.ResizeMode);
}
[Fact]
public async Task MaxButtonShouldBeHiddenWithCanMinimizeResizeMode()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
var minButton = GetButton(window, "PART_Min");
var maxButton = GetButton(window, "PART_Max");
// min/max should be visible
Assert.True(minButton.IsVisible);
Assert.True(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanResize, window.ResizeMode);
window.ResizeMode = ResizeMode.CanMinimize;
// min should be visible, max hidden
Assert.True(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanMinimize, window.ResizeMode);
}
[Fact]
public async Task MinMaxButtonsShouldBeToggled()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<MetroWindow>();
var minButton = GetButton(window, "PART_Min");
var maxButton = GetButton(window, "PART_Max");
// min/max should be visible
Assert.True(minButton.IsVisible);
Assert.True(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanResize, window.ResizeMode);
window.ResizeMode = ResizeMode.CanMinimize;
// min should be visible, max hidden
Assert.True(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanMinimize, window.ResizeMode);
window.ShowMinButton = false;
// min should be hidden
Assert.False(minButton.IsVisible);
window.ResizeMode = ResizeMode.NoResize;
// min/max should be hidden
Assert.False(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.NoResize, window.ResizeMode);
window.ShowMaxRestoreButton = false;
// max should be hidden
Assert.False(maxButton.IsVisible);
window.ResizeMode = ResizeMode.CanResizeWithGrip;
// min/max should be hidden
Assert.False(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.CanResizeWithGrip, window.ResizeMode);
window.ShowMinButton = true;
window.ShowMaxRestoreButton = true;
// min/max should be visible
Assert.True(minButton.IsVisible);
Assert.True(maxButton.IsVisible);
window.ResizeMode = ResizeMode.NoResize;
// min/max should be hidden
Assert.False(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.Equal(ResizeMode.NoResize, window.ResizeMode);
}
/// <summary>
/// #1362: ShowMinButton="False" and ShowMaxRestoreButton="False" not working
/// </summary>
[Fact]
public async Task MinMaxCloseButtonsShouldBeHidden()
{
await TestHost.SwitchToAppThread();
var window = await WindowHelpers.CreateInvisibleWindowAsync<HiddenMinMaxCloseButtonsWindow>();
var minButton = GetButton(window, "PART_Min");
var maxButton = GetButton(window, "PART_Max");
var closeButton = GetButton(window, "PART_Close");
// min/max/close should be hidden
Assert.False(minButton.IsVisible);
Assert.False(maxButton.IsVisible);
Assert.False(closeButton.IsVisible);
Assert.Equal(ResizeMode.CanResize, window.ResizeMode);
}
}
}
| {
"content_hash": "ccc73b2f5b843b68b879bc7391313490",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 108,
"avg_line_length": 34.9377990430622,
"alnum_prop": 0.6251711859764448,
"repo_name": "Sergeeeek/MinecraftLauncher",
"id": "bb1491d6352b17c550aba93dbd80cb15231121eb",
"size": "7304",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "MahApps.Metro-master/Mahapps.Metro.Tests/MetroWindowTest.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1361622"
},
{
"name": "PowerShell",
"bytes": "133"
}
],
"symlink_target": ""
} |
#include <etl/fixed_sized_memory_block_allocator.h>
| {
"content_hash": "9470e28865037454cc19ee968a4a75be",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 51,
"avg_line_length": 18,
"alnum_prop": 0.7592592592592593,
"repo_name": "ETLCPP/etl",
"id": "7643fbc1f3d390bae5b503ac856cb2bc920698eb",
"size": "1370",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/sanity-check/fixed_sized_memory_block_allocator.h.t.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1390"
},
{
"name": "C",
"bytes": "142129"
},
{
"name": "C++",
"bytes": "12234060"
},
{
"name": "CMake",
"bytes": "65410"
},
{
"name": "Meson",
"bytes": "9008"
},
{
"name": "Python",
"bytes": "8272"
},
{
"name": "Shell",
"bytes": "35420"
}
],
"symlink_target": ""
} |
## File type
### Using and extending
Tuna provides two types of files: `File` and `Image`. You can easily add yours by extending `AbstractFile` entity and `AbstractFileType` form.
You can use these types in entity as:
1. required field:
// AppBundle/Entity/Project.php
use TunaCMS\Bundle\FileBundle\Validator\Constraints as FileAssert;
/**
* @var File
*
* @FileAssert\FileNotNull
* @ORM\ManyToOne(targetEntity="TunaCMS\Bundle\FileBundle\Entity\File", cascade={"persist", "remove"})
**/
protected $file;
// AppBundle/Form/ProjectType.php
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder->add('image', ImageType::class, [
// set it to `false` to disable scaling of image
'image_filter' => 'some_filter_name', // defaults to 'tuna_admin_thumb'
]);
}
2. optional field (you can delete file by setting empty `path`):
// AppBundle/Entity/Project.php
/**
* @ORM\OneToOne(targetEntity="TunaCMS\Bundle\FileBundle\Entity\Image", cascade={"persist", "remove"})
* @ORM\JoinColumn(onDelete="SET NULL")
*/
protected $image;
// AppBundle/Form/ProjectType.php
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder->add('image', ImageType::class, [
'attr' => [
'deletable' => false
], // defaults to true
// set it to `false` to disable scaling of image
'image_filter' => 'some_filter_name', // defaults to 'tuna_admin_thumb'
];
}
You can change default file location via `tuna_cms_file` config (here's the defaults):
tuna_cms_file:
file_manager:
web_root_dir: '%kernel.root_dir%/../web' # path to symfony's web directory
tmp_path: uploads/tmp
upload_files_path: uploads/files
#### Custom data loader/cache resolver
To avoid conflicts with other bundles using Liip Imagine as image processing engine, Tuna uses custom data loader/cache resolver: `tuna`.
#### Custom filters
To add your custom filters to use with tuna images make sure that you've added `data_loader` option to filter configuration:
liip_imagine:
filter_sets:
person_thumb:
data_loader: tuna # you can drop this if no other bundle override liip_imagine default loader/resolver.
cache: tuna # you can drop this if no other bundle override liip_imagine default loader/resolver.
filters:
.. your filters
#### Rendering
FileBundle provides three twig helpers for easy file rendering:
* `tuna_image(AbstractFile, filter = null)` - generates assets path to image, additionally you can apply imagine filter:
<img src="{{ tuna_image(news.image, 'news_thumb') }}">
* `tuna_file(AbstractFile)` - generates assets path to file:
<a href="{{ tuna_file(attachment.file) }}"></a>
* `tuna_uploadDir(type)` - returns path to upload dir of given file (where type is `tmp_path|upload_files_path`), useful for placeholders:
previewTemplate: theme.tuna_image_preview(tuna_uploadDir('tmp_path')~'/__path__', form.vars.attr.deletable, image_filter)
| {
"content_hash": "d69c880750feb08439d5a27d912beba0",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 142,
"avg_line_length": 37.41304347826087,
"alnum_prop": 0.6147588611272516,
"repo_name": "Tuna-CMS/tuna-bundle",
"id": "4e6294dfa6c1ad576eeacf8bd522a4524ac3271d",
"size": "3442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/files.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "116812"
},
{
"name": "HTML",
"bytes": "76301"
},
{
"name": "JavaScript",
"bytes": "75271"
},
{
"name": "PHP",
"bytes": "361061"
},
{
"name": "Shell",
"bytes": "2042"
}
],
"symlink_target": ""
} |
package eu.inmite.android.lib.validations.form.validators;
import android.support.v4.util.LruCache;
import java.lang.annotation.Annotation;
import java.util.HashMap;
import java.util.Map;
import eu.inmite.android.lib.validations.form.annotations.ValidatorFor;
import eu.inmite.android.lib.validations.form.iface.IValidator;
/**
* @author Tomas Vondracek
*/
public class ValidatorFactory {
private static final int INSTANCE_CACHE_SIZE = 4;
private static final LruCache<Class<? extends IValidator>, IValidator> sCachedValidatorInstances = new LruCache<>(INSTANCE_CACHE_SIZE);
private static final Map<Class<? extends Annotation>, Class<? extends IValidator>> sValidators = new HashMap<>();
static {
// our default validators:
//noinspection unchecked
registerValidatorClasses(
CustomValidator.class,
LengthValidator.class,
NumberValueValidator.class,
LengthValidator.class,
ValueValidator.class,
NumberValueValidator.class,
LengthValidator.class,
ValueValidator.class,
NotEmptyValidator.class,
WeekendDateValidator.class,
FutureDateValidator.class,
RegExpValidator.class,
CheckedValidator.class);
}
public static void registerValidatorClasses(Class<? extends IValidator<?>>... classes) {
if (classes == null || classes.length == 0) {
return;
}
for (Class<? extends IValidator<?>> clazz : classes) {
final Annotation[] annotations = clazz.getAnnotations();
// search for @ValidatorFor annotation and read supported validations
for (Annotation annotation : annotations) {
if (annotation instanceof ValidatorFor) {
Class<? extends Annotation>[] validationAnnotations = ((ValidatorFor) annotation).value();
for (Class<? extends Annotation> validationAnnotation : validationAnnotations) {
sValidators.put(validationAnnotation, clazz);
}
break;
}
}
}
}
public static IValidator getValidator(Annotation annotation) throws IllegalAccessException, InstantiationException {
if (annotation == null) {
return null;
}
final Class<? extends IValidator> clazz = sValidators.get(annotation.annotationType());
IValidator validator = null;
if (clazz != null) {
validator = sCachedValidatorInstances.get(clazz);
if (validator == null) {
validator = clazz.newInstance();
sCachedValidatorInstances.put(clazz, validator);
}
}
return validator;
}
public static void clearCachedValidators() {
sCachedValidatorInstances.evictAll();
}
}
| {
"content_hash": "6953308d5b8de92324dc0f2e7814349e",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 136,
"avg_line_length": 29.773809523809526,
"alnum_prop": 0.7353058776489404,
"repo_name": "douglasjunior/android-validation-komensky",
"id": "b6daec6a5e7af6483f245561f2df7154bb65b976",
"size": "2501",
"binary": false,
"copies": "5",
"ref": "refs/heads/release",
"path": "library/src/main/java/eu/inmite/android/lib/validations/form/validators/ValidatorFactory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "102267"
}
],
"symlink_target": ""
} |
'use strict';
angular.module('nutritionApp.diariesControllers', []).
controller('DiaryListCtrl', ['$scope', '$routeParams', '$filter', 'localStorageService', 'growl', 'Diary', 'Food', 'Recipe', 'String', function($scope, $routeParams, $filter, localStorageService, growl, Diary, Food, Recipe, String) {
$scope.meals = {1: 'Petit déjeuner', 2: 'Déjeuner', 3: 'Dîner', 4: 'Collation 1', 5: 'Collation 2', 6: 'Collation 3'};
$scope.mealsMobile = {1: 'Petit déj\'', 2: 'Déjeuner', 3: 'Dîner', 4: 'Coll 1', 5: 'Coll 2', 6: 'Coll 3'};
$scope.formAddFood = {1: 'food', 2: 'food', 3: 'food', 4: 'food', 5: 'food', 6: 'food'};
$scope.showAllFoods = {1: false, 2: false, 3: false, 4: false, 5: false, 6: false};
$scope.food = {1: null, 2: null, 3: null, 4: null, 5: null, 6: null};
$scope.recipe = {1: null, 2: null, 3: null, 4: null, 5: null, 6: null};
$scope.quantity = {1: null, 2: null, 3: null, 4: null, 5: null, 6: null};
$scope.editQuantity = {};
$scope.deleteMobileButtons = {};
$scope.manualDiary = {1: {}, 2: {}, 3: {}, 4: {}, 5: {}, 6: {}};
$scope.Math = window.Math;
$scope.lockCopyMeal = false;
$scope.lockAddManual = false;
if (angular.isUndefined($routeParams.date)) {
$scope.date = new Date();
} else {
$scope.date = new Date($routeParams.date);
}
var dateSimple = $filter('date')($scope.date, 'yyyy-MM-dd');
var diariesParams = {date: dateSimple};
Diary.list(diariesParams, function(data) {
$scope.diaries = data.diaries;
$scope.totalMeals = data.totalMeals;
$scope.total = data.total;
$scope.goal = data.goal;
});
Food.listAll(function(data) {
$scope.foodsList = data.foods;
});
Recipe.listAll(function(data) {
$scope.recipes = data.recipes;
});
// Previous and next dates
$scope.previousDate = new Date($scope.date.getTime() - 24 * 60 * 60 * 1000);
$scope.nextDate = new Date($scope.date.getTime() + 24 * 60 * 60 * 1000);
// 7 previous dates
$scope.lastDates = [];
for (var i = 1; i <= 7; i++) {
var tmpDate = new Date($scope.date.getTime() - i * 24 * 60 * 60 * 1000);
$scope.lastDates.push(tmpDate);
}
// Functions
$scope.showEdit = function(event, diary, meal) {
if (event) event.preventDefault();
$scope.editQuantity[diary] = true;
return false;
}
$scope.saveEdit = function(event, diary, meal) {
if (event) event.preventDefault();
angular.forEach($scope.diaries[meal], function(d, k) {
if (d.id == diary) {
d.quantity = d.quantity.replace(',', '.');
Diary.put({id: d.id, quantity: d.quantity}, function(data) {
$scope.diaries[meal][k].quantity = data.diary.quantity;
$scope.diaries[meal][k].calories = data.diary.calories;
$scope.diaries[meal][k].proteins = data.diary.proteins;
$scope.diaries[meal][k].carbohydrates = data.diary.carbohydrates;
$scope.diaries[meal][k].lipids = data.diary.lipids;
$scope.totalMeals[meal].calories = data.totalMeal.calories;
$scope.totalMeals[meal].proteins = data.totalMeal.proteins;
$scope.totalMeals[meal].carbohydrates = data.totalMeal.carbohydrates;
$scope.totalMeals[meal].lipids = data.totalMeal.lipids;
$scope.total.calories = data.total.calories;
$scope.total.proteins = data.total.proteins;
$scope.total.carbohydrates = data.total.carbohydrates;
$scope.total.lipids = data.total.lipids;
$scope.editQuantity[diary] = false;
});
}
});
return false;
};
$scope.delete = function(event, diary, meal) {
if (event) event.preventDefault();
Diary.delete({id: diary}, function(data) {
if (data) {
angular.forEach($scope.diaries[meal], function(d, k) {
if (d.id == diary) {
$scope.diaries[meal].splice(k, 1);
$scope.totalMeals[meal] = data.totalMeal;
$scope.total = data.total;
}
});
} else {
growl.addErrorMessage('Error lors de la suppression');
}
});
return false;
};
$scope.copyMeal = function(event, meal, date) {
if (event) event.preventDefault();
if (!$scope.lockCopyMeal) {
$scope.lockCopyMeal = true;
Diary.copyMeal({meal: meal, from: $filter('date')(date, 'yyyy-MM-dd'), to: $filter('date')($scope.date, 'yyyy-MM-dd')}, function(data) {
$scope.diaries[meal] = data.diaries;
// TODO merge both arrays
$scope.totalMeals[meal] = data.totalMeal;
$scope.total = data.total;
$scope.lockCopyMeal = false;
});
}
return false;
};
$scope.copyDay = function(event, date) {
if (event) event.preventDefault();
Diary.copyDay({from: $filter('date')(date, 'yyyy-MM-dd'), to: $filter('date')($scope.date, 'yyyy-MM-dd')}, function(data) {
$scope.diaries = data.diaries;
$scope.totalMeals = data.totalMeals;
$scope.total = data.total;
});
return false;
};
$scope.changeForm = function(type, mealId, event) {
if (event) event.preventDefault();
$scope.formAddFood[mealId] = type;
$scope.focus = type + mealId;
$scope.quantity[mealId] = null;
$scope.food[mealId] = null;
$scope.recipe[mealId] = null;
$scope.manualDiary[mealId] = null;
};
$scope.onSelectFood = function(meal) {
$scope.focus = 'foodQuantity' + meal;
};
$scope.onSelectRecipe = function(meal) {
$scope.focus = 'recipeQuantity' + meal;
};
$scope.autocompleteComparator = function(actual, expected) {
actual = String.removeAccents(actual).toLowerCase();
expected = String.removeAccents(expected).toLowerCase();
return actual.indexOf(expected) > -1;
};
$scope.addFood = function(event, meal) {
if (event) event.preventDefault();
if (!$scope.lockAddFood && $scope.food[meal] != null && $scope.quantity[meal] != null) {
$scope.lockAddFood = true;
Diary.post({date: $filter('date')($scope.date, 'yyyy-MM-dd'), meal: meal, food: $scope.food[meal].id, quantity: $scope.quantity[meal]}, function(data) {
$scope.diaries[meal].push(data.diary);
$scope.totalMeals[meal] = data.totalMeal;
$scope.total = data.total;
$scope.food[meal] = '';
$scope.quantity[meal] = '';
$scope.lockAddFood = false;
$scope.focus = 'food' + meal;
});
}
return false;
};
$scope.addRecipe = function(event, meal) {
if (event) event.preventDefault();
if ($scope.recipe[meal].id != null && $scope.quantity[meal] != null) {
Diary.post({date: $filter('date')($scope.date, 'yyyy-MM-dd'), meal: meal, recipe: $scope.recipe[meal].id, quantity: $scope.quantity[meal]}, function(data) {
$scope.diaries[meal].push(data.diary);
$scope.totalMeals[meal] = data.totalMeal;
$scope.total = data.total;
$scope.recipe[meal] = '';
$scope.quantity[meal] = '';
$scope.formAddFood[meal] = 'food';
$scope.focus = 'food' + meal;
});
}
return false;
};
$scope.addManual = function(event, meal) {
if (event) event.preventDefault();
if (!$scope.lockAddManual) {
$scope.lockAddManual = true;
if (!angular.isUndefined($scope.manualDiary[meal]) && !angular.isUndefined($scope.manualDiary[meal].name) &&
!angular.isUndefined($scope.manualDiary[meal].quantity) && !angular.isUndefined($scope.manualDiary[meal].calories) &&
!angular.isUndefined($scope.manualDiary[meal].proteins) && !angular.isUndefined($scope.manualDiary[meal].carbohydrates) &&
!angular.isUndefined($scope.manualDiary[meal].lipids)) {
Diary.post({date: $filter('date')($scope.date, 'yyyy-MM-dd'), meal: meal, manual: $scope.manualDiary[meal]}, function(data) {
$scope.diaries[meal].push(data.diary);
$scope.totalMeals[meal] = data.totalMeal;
$scope.total = data.total;
$scope.manualDiary[meal] = {};
$scope.lockAddManual = false;
});
}
}
return false;
};
$scope.copy = function(event, diary, from, to) {
if (event) event.preventDefault();
Diary.copy({id: diary, meal: to}, function(data) {
angular.forEach($scope.diaries[from], function(d, k) {
if (d.id == diary) {
$scope.diaries[to].push($scope.diaries[from][k]);
var data = $scope.diaries[from][k];
$scope.updateTotalMeals(to, data);
$scope.updateTotal(data);
}
});
});
return false;
};
$scope.move = function(event, diary, from, to) {
if (event) event.preventDefault();
Diary.move({id: diary, meal: to}, function(data) {
angular.forEach($scope.diaries[from], function(d, k) {
if (d.id == diary) {
$scope.diaries[to].push($scope.diaries[from][k]);
var data = $scope.diaries[from][k];
$scope.updateTotalMeals(from, data, false);
$scope.diaries[from].splice(k, 1);
$scope.updateTotalMeals(to, data);
}
});
});
return false;
};
$scope.updateTotalMeals = function(meal, data, addition) {
if (addition === undefined) addition = true;
if (addition) {
$scope.totalMeals[meal].calories = Math.round(parseFloat($scope.totalMeals[meal].calories) + parseFloat(data.calories));
$scope.totalMeals[meal].proteins = Math.round(parseFloat($scope.totalMeals[meal].proteins) + parseFloat(data.proteins));
$scope.totalMeals[meal].carbohydrates = Math.round(parseFloat($scope.totalMeals[meal].carbohydrates) + parseFloat(data.carbohydrates));
$scope.totalMeals[meal].lipids = Math.round(parseFloat($scope.totalMeals[meal].lipids) + parseFloat(data.lipids));
} else {
$scope.totalMeals[meal].calories = Math.round(parseFloat($scope.totalMeals[meal].calories) - parseFloat(data.calories));
$scope.totalMeals[meal].proteins = Math.round(parseFloat($scope.totalMeals[meal].proteins) - parseFloat(data.proteins));
$scope.totalMeals[meal].carbohydrates = Math.round(parseFloat($scope.totalMeals[meal].carbohydrates) - parseFloat(data.carbohydrates));
$scope.totalMeals[meal].lipids = Math.round(parseFloat($scope.totalMeals[meal].lipids) - parseFloat(data.lipids));
}
};
$scope.updateTotal = function(data, addition) {
if (addition === undefined) addition = true;
if (addition) {
$scope.total.calories = Math.round(parseFloat($scope.total.calories) + parseFloat(data.calories));
$scope.total.proteins = Math.round(parseFloat($scope.total.proteins) + parseFloat(data.proteins));
$scope.total.carbohydrates = Math.round(parseFloat($scope.total.carbohydrates) + parseFloat(data.carbohydrates));
$scope.total.lipids = Math.round(parseFloat($scope.total.lipids) + parseFloat(data.lipids));
} else {
$scope.total.calories = Math.round(parseFloat($scope.total.calories) - parseFloat(data.calories));
$scope.total.proteins = Math.round(parseFloat($scope.total.proteins) - parseFloat(data.proteins));
$scope.total.carbohydrates = Math.round(parseFloat($scope.total.carbohydrates) - parseFloat(data.carbohydrates));
$scope.total.lipids = Math.round(parseFloat($scope.total.lipids) - parseFloat(data.lipids));
}
};
// Datepickers
$scope.openDTCopyDay = false;
$scope.showDTCopyDay = function(event) {
if (event) {
event.preventDefault();
event.stopPropagation();
}
if (!$scope.openDTCopyDay) {
$scope.openDTCopyDay = true;
} else {
$scope.openDTCopyDay = false;
}
return false;
};
$scope.$watch('dtCopyDay', function() {
if ($scope.dtCopyDay != undefined) {
$('#btn-copy-day').removeClass('open');
$scope.copyDay(event, $filter('date')($scope.dtCopyDay, 'yyyy-MM-dd'));
}
});
}]);
| {
"content_hash": "622247eaee6420b7c7ff5c9b02d3e819",
"timestamp": "",
"source": "github",
"line_count": 346,
"max_line_length": 235,
"avg_line_length": 36.1242774566474,
"alnum_prop": 0.5862068965517241,
"repo_name": "skurty/nutrition",
"id": "c0e3eab514b589bee0be15873fa884fbe5310fc0",
"size": "12505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/scripts/controllers/diaries.controller.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3320"
},
{
"name": "HTML",
"bytes": "66197"
},
{
"name": "JavaScript",
"bytes": "42446"
},
{
"name": "PHP",
"bytes": "101325"
},
{
"name": "Shell",
"bytes": "2072"
}
],
"symlink_target": ""
} |
package com.bastienleonard.tomate.ui.tasks.fragments;
import android.os.Bundle;
public final class DoingTasksFragment extends TasksFragment {
public static DoingTasksFragment newInstance(String listId) {
DoingTasksFragment f = new DoingTasksFragment();
Bundle args = new Bundle();
args.putString(ARG_LIST_ID, listId);
f.setArguments(args);
return f;
}
}
| {
"content_hash": "7609af1161d0924b0e765365fdf09288",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 65,
"avg_line_length": 31.076923076923077,
"alnum_prop": 0.7054455445544554,
"repo_name": "bastienleonard/tomate",
"id": "9da1074ee0b4f153bedd3c8505ee6d6fe7cf3ede",
"size": "404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/com/bastienleonard/tomate/ui/tasks/fragments/DoingTasksFragment.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "100880"
}
],
"symlink_target": ""
} |
<?php
namespace SpeckCatalog\Mapper;
class Availability extends AbstractMapper
{
protected $tableName = 'catalog_availability';
protected $model = '\SpeckCatalog\Model\Availability\Relational';
protected $tableKeyFields = array(
'product_id',
'uom_code',
'quantity',
'distributor_id'
);
protected $tableFields = array(
'product_id',
'uom_code',
'distributor_id',
'cost',
'quantity',
'distributor_uom_code',
'distributor_item_number'
);
public function getByProductUom($productId, $uomCode, $quantity)
{
$where = array(
'product_id' => $productId,
'uom_code' => $uomCode,
'quantity' => $quantity,
);
$select = $this->getSelect()
->where($where);
return $this->selectManyModels($select);
}
}
| {
"content_hash": "c2e53161952c1e5593e5b31a70818ced",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 69,
"avg_line_length": 25.02777777777778,
"alnum_prop": 0.5549389567147613,
"repo_name": "Xerkus/SpeckCatalog",
"id": "3e3451b84700905f61abb4bbb86c87fa07756d82",
"size": "901",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/SpeckCatalog/Mapper/Availability.php",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "2326"
},
{
"name": "HTML",
"bytes": "58992"
},
{
"name": "JavaScript",
"bytes": "10140"
},
{
"name": "PHP",
"bytes": "375738"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_79) on Sat Jul 29 21:10:31 PDT 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>com.fasterxml.jackson.jr.retrofit2 Class Hierarchy (jackson-jr-retrofit2 2.9.0 API)</title>
<meta name="date" content="2017-07-29">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="com.fasterxml.jackson.jr.retrofit2 Class Hierarchy (jackson-jr-retrofit2 2.9.0 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../com/fasterxml/jackson/jr/retrofit2/package-summary.html">Package</a></li>
<li>Class</li>
<li>Use</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/fasterxml/jackson/jr/retrofit2/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 class="title">Hierarchy For Package com.fasterxml.jackson.jr.retrofit2</h1>
</div>
<div class="contentContainer">
<h2 title="Class Hierarchy">Class Hierarchy</h2>
<ul>
<li type="circle">java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><span class="strong">Object</span></a>
<ul>
<li type="circle">retrofit2.Converter.Factory
<ul>
<li type="circle">com.fasterxml.jackson.jr.retrofit2.<a href="../../../../../com/fasterxml/jackson/jr/retrofit2/JacksonJrConverter.html" title="class in com.fasterxml.jackson.jr.retrofit2"><span class="strong">JacksonJrConverter</span></a><T></li>
</ul>
</li>
<li type="circle">com.fasterxml.jackson.jr.retrofit2.<a href="../../../../../com/fasterxml/jackson/jr/retrofit2/JacksonJrRequestBodyConverter.html" title="class in com.fasterxml.jackson.jr.retrofit2"><span class="strong">JacksonJrRequestBodyConverter</span></a><T> (implements retrofit2.Converter<F,T>)</li>
<li type="circle">com.fasterxml.jackson.jr.retrofit2.<a href="../../../../../com/fasterxml/jackson/jr/retrofit2/JacksonJrResponseArrayConverter.html" title="class in com.fasterxml.jackson.jr.retrofit2"><span class="strong">JacksonJrResponseArrayConverter</span></a><T> (implements retrofit2.Converter<F,T>)</li>
<li type="circle">com.fasterxml.jackson.jr.retrofit2.<a href="../../../../../com/fasterxml/jackson/jr/retrofit2/JacksonJrResponseConverter.html" title="class in com.fasterxml.jackson.jr.retrofit2"><span class="strong">JacksonJrResponseConverter</span></a><T> (implements retrofit2.Converter<F,T>)</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../com/fasterxml/jackson/jr/retrofit2/package-summary.html">Package</a></li>
<li>Class</li>
<li>Use</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/fasterxml/jackson/jr/retrofit2/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017 <a href="http://fasterxml.com/">FasterXML</a>. All rights reserved.</small></p>
</body>
</html>
| {
"content_hash": "8a8b1dd1091732912184129ab3224284",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 323,
"avg_line_length": 41.93893129770992,
"alnum_prop": 0.6483436476155806,
"repo_name": "FasterXML/jackson-jr",
"id": "9f3d83c494162fdc6cd3551155f36125295bdee4",
"size": "5494",
"binary": false,
"copies": "1",
"ref": "refs/heads/2.14",
"path": "docs/javadoc/jr-retrofit2/2.9/com/fasterxml/jackson/jr/retrofit2/package-tree.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "562477"
},
{
"name": "Logos",
"bytes": "7741"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Orleans;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.Streams;
using Orleans.TestingHost;
using TestExtensions;
using UnitTests.GrainInterfaces;
using UnitTests.Grains;
using Xunit;
using Xunit.Abstractions;
namespace UnitTests.StreamingTests
{
[TestCategory("Streaming"), TestCategory("Limits")]
public class StreamLimitTests : TestClusterPerTest
{
public const string AzureQueueStreamProviderName = StreamTestsConstants.AZURE_QUEUE_STREAM_PROVIDER_NAME;
public const string SmsStreamProviderName = StreamTestsConstants.SMS_STREAM_PROVIDER_NAME;
private static int MaxExpectedPerStream = 500;
private static int MaxConsumersPerStream;
private static int MaxProducersPerStream;
private const int MessagePipelineSize = 1000;
private const int InitPipelineSize = 500;
private IManagementGrain mgmtGrain;
private string StreamNamespace;
private readonly ITestOutputHelper output;
public override TestCluster CreateTestCluster()
{
var options = new TestClusterOptions();
options.ClusterConfiguration.AddMemoryStorageProvider("MemoryStore", numStorageGrains: 1);
options.ClusterConfiguration.AddAzureTableStorageProvider("AzureStore", deleteOnClear: true);
options.ClusterConfiguration.AddAzureTableStorageProvider("PubSubStore", deleteOnClear: true, useJsonFormat: false);
options.ClusterConfiguration.AddSimpleMessageStreamProvider(SmsStreamProviderName, fireAndForgetDelivery: false);
options.ClusterConfiguration.AddSimpleMessageStreamProvider("SMSProviderDoNotOptimizeForImmutableData", fireAndForgetDelivery: false, optimizeForImmutableData: false);
options.ClusterConfiguration.AddAzureQueueStreamProvider(AzureQueueStreamProviderName);
options.ClusterConfiguration.AddAzureQueueStreamProvider("AzureQueueProvider2");
options.ClusterConfiguration.Globals.MaxMessageBatchingSize = 100;
return new TestCluster(options);
}
public StreamLimitTests(ITestOutputHelper output)
{
this.output = output;
StreamNamespace = StreamTestsConstants.StreamLifecycleTestsNamespace;
mgmtGrain = GrainClient.GrainFactory.GetGrain<IManagementGrain>(0);
}
[Fact]
public async Task SMS_Limits_FindMax_Consumers()
{
// 1 Stream, 1 Producer, X Consumers
Guid streamId = Guid.NewGuid();
string streamProviderName = SmsStreamProviderName;
output.WriteLine("Starting search for MaxConsumersPerStream value using stream {0}", streamId);
IStreamLifecycleProducerGrain producer = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
await producer.BecomeProducer(streamId, this.StreamNamespace, streamProviderName);
int loopCount = 0;
try
{
// Loop until something breaks!
for (loopCount = 1; loopCount <= MaxExpectedPerStream; loopCount++)
{
IStreamLifecycleConsumerGrain consumer = GrainClient.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
await consumer.BecomeConsumer(streamId, this.StreamNamespace, streamProviderName);
}
}
catch (Exception exc)
{
output.WriteLine("Stopping loop at loopCount={0} due to exception {1}", loopCount, exc);
}
MaxConsumersPerStream = loopCount - 1;
output.WriteLine("Finished search for MaxConsumersPerStream with value {0}", MaxConsumersPerStream);
Assert.NotEqual(0, MaxConsumersPerStream); // "MaxConsumersPerStream should be greater than zero."
output.WriteLine("MaxConsumersPerStream={0}", MaxConsumersPerStream);
}
[Fact, TestCategory("Functional")]
public async Task SMS_Limits_FindMax_Producers()
{
// 1 Stream, X Producers, 1 Consumer
Guid streamId = Guid.NewGuid();
string streamProviderName = SmsStreamProviderName;
output.WriteLine("Starting search for MaxProducersPerStream value using stream {0}", streamId);
IStreamLifecycleConsumerGrain consumer = GrainClient.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
await consumer.BecomeConsumer(streamId, this.StreamNamespace, streamProviderName);
int loopCount = 0;
try
{
// Loop until something breaks!
for (loopCount = 1; loopCount <= MaxExpectedPerStream; loopCount++)
{
IStreamLifecycleProducerGrain producer = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
await producer.BecomeProducer(streamId, this.StreamNamespace, streamProviderName);
}
}
catch (Exception exc)
{
output.WriteLine("Stopping loop at loopCount={0} due to exception {1}", loopCount, exc);
}
MaxProducersPerStream = loopCount - 1;
output.WriteLine("Finished search for MaxProducersPerStream with value {0}", MaxProducersPerStream);
Assert.NotEqual(0, MaxProducersPerStream); // "MaxProducersPerStream should be greater than zero."
output.WriteLine("MaxProducersPerStream={0}", MaxProducersPerStream);
}
[Fact, TestCategory("Functional")]
public async Task SMS_Limits_P1_C128_S1()
{
// 1 Stream, 1 Producer, 128 Consumers
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, 128);
}
[Fact, TestCategory("Failures")]
public async Task SMS_Limits_P128_C1_S1()
{
// 1 Stream, 128 Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 128, 1);
}
[Fact, TestCategory("Failures")]
public async Task SMS_Limits_P128_C128_S1()
{
// 1 Stream, 128 Producers, 128 Consumers
await Test_Stream_Limits(
SmsStreamProviderName,
1, 128, 128);
}
[Fact, TestCategory("Failures")]
public async Task SMS_Limits_P1_C400_S1()
{
// 1 Stream, 1 Producer, 400 Consumers
int numConsumers = 400;
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, numConsumers);
}
[Fact, TestCategory("Burst")]
public async Task SMS_Limits_Max_Producers_Burst()
{
if (MaxProducersPerStream == 0) await SMS_Limits_FindMax_Producers();
output.WriteLine("Using MaxProducersPerStream={0}", MaxProducersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, MaxProducersPerStream, 1, useFanOut: true);
}
[Fact, TestCategory("Functional")]
public async Task SMS_Limits_Max_Producers_NoBurst()
{
if (MaxProducersPerStream == 0) await SMS_Limits_FindMax_Producers();
output.WriteLine("Using MaxProducersPerStream={0}", MaxProducersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, MaxProducersPerStream, 1, useFanOut: false);
}
[Fact, TestCategory("Burst")]
public async Task SMS_Limits_Max_Consumers_Burst()
{
if (MaxConsumersPerStream == 0) await SMS_Limits_FindMax_Consumers();
output.WriteLine("Using MaxConsumersPerStream={0}", MaxConsumersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, MaxConsumersPerStream, useFanOut: true);
}
[Fact]
public async Task SMS_Limits_Max_Consumers_NoBurst()
{
if (MaxConsumersPerStream == 0) await SMS_Limits_FindMax_Consumers();
output.WriteLine("Using MaxConsumersPerStream={0}", MaxConsumersPerStream);
// 1 Stream, Max Producers, 1 Consumer
await Test_Stream_Limits(
SmsStreamProviderName,
1, 1, MaxConsumersPerStream, useFanOut: false);
}
[Fact, TestCategory("Failures"), TestCategory("Burst")]
public async Task SMS_Limits_P9_C9_S152_Burst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x9 Producers, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 9, 9, useFanOut: true);
}
[Fact, TestCategory("Failures")]
public async Task SMS_Limits_P9_C9_S152_NoBurst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x9 Producers, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 9, 9, useFanOut: false);
}
[Fact, TestCategory("Failures"), TestCategory("Burst")]
public async Task SMS_Limits_P1_C9_S152_Burst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x1 Producer, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 1, 9, useFanOut: true);
}
[Fact, TestCategory("Failures")]
public async Task SMS_Limits_P1_C9_S152_NoBurst()
{
// 152 * 9 ~= 1360 target per second
// 152 Streams, x1 Producer, x9 Consumers
int numStreams = 152;
await Test_Stream_Limits(
SmsStreamProviderName,
numStreams, 1, 9, useFanOut: false);
}
[Fact(Skip = "Ignore"), TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_Subscribers_P0_C10_ManyStreams()
{
int numStreams = 2000;
int pipelineSize = 10000;
await Test_Stream_Churn_NumStreams(
SmsStreamProviderName,
pipelineSize,
numStreams,
numConsumers: 10,
numProducers: 0
);
}
//[Fact, TestCategory("Performance"), TestCategory("Burst")]
//public async Task SMS_Churn_Subscribers_P1_C9_ManyStreams_TimePeriod()
//{
// await Test_Stream_Churn_TimePeriod(
// StreamReliabilityTests.SMS_STREAM_PROVIDER_NAME,
// InitPipelineSize,
// TimeSpan.FromSeconds(60),
// numProducers: 1
// );
//}
[Fact, TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_FewPublishers_C9_ManyStreams()
{
int numProducers = 0;
int numStreams = 1000;
int pipelineSize = 100;
await Test_Stream_Churn_NumStreams_FewPublishers(
SmsStreamProviderName,
pipelineSize,
numStreams,
numProducers: numProducers,
warmUpPubSub: true
);
}
[Fact, TestCategory("Performance"), TestCategory("Burst")]
public async Task SMS_Churn_FewPublishers_C9_ManyStreams_PubSubDirect()
{
int numProducers = 0;
int numStreams = 1000;
int pipelineSize = 100;
await Test_Stream_Churn_NumStreams_FewPublishers(
SmsStreamProviderName,
pipelineSize,
numStreams,
numProducers: numProducers,
warmUpPubSub: true,
normalSubscribeCalls: false
);
}
#region Test execution methods
private Task Test_Stream_Churn_NumStreams_FewPublishers(
string streamProviderName,
int pipelineSize,
int numStreams,
int numConsumers = 9,
int numProducers = 4,
bool warmUpPubSub = true,
bool warmUpProducers = false,
bool normalSubscribeCalls = true)
{
output.WriteLine("Testing churn with {0} Streams on {1} Producers with {2} Consumers per Stream",
numStreams, numProducers, numConsumers);
AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
// Create streamId Guids
Guid[] streamIds = new Guid[numStreams];
for (int i = 0; i < numStreams; i++)
{
streamIds[i] = Guid.NewGuid();
}
int activeConsumerGrains = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(0, activeConsumerGrains); // "Initial Consumer count should be zero"
int activeProducerGrains = ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(0, activeProducerGrains); // "Initial Producer count should be zero"
if (warmUpPubSub)
{
WarmUpPubSub(streamProviderName, streamIds, pipeline);
pipeline.Wait();
int activePubSubGrains = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, activePubSubGrains); // "Initial PubSub count -- should all be warmed up"
}
Guid[] producerIds = new Guid[numProducers];
if (numProducers > 0 && warmUpProducers)
{
// Warm up Producers to pre-create grains
for (int i = 0; i < numProducers; i++)
{
producerIds[i] = Guid.NewGuid();
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(producerIds[i]);
Task promise = grain.Ping();
pipeline.Add(promise);
}
pipeline.Wait();
int activePublisherGrains = ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(numProducers, activePublisherGrains); // "Initial Publisher count -- should all be warmed up"
}
var promises = new List<Task>();
Stopwatch sw = Stopwatch.StartNew();
if (numProducers > 0)
{
// Producers
for (int i = 0; i < numStreams; i++)
{
Guid streamId = streamIds[i];
Guid producerId = producerIds[i % numProducers];
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(producerId);
Task promise = grain.BecomeProducer(streamId, this.StreamNamespace, streamProviderName);
promises.Add(promise);
pipeline.Add(promise);
}
pipeline.Wait();
promises.Clear();
}
// Consumers
for (int i = 0; i < numStreams; i++)
{
Guid streamId = streamIds[i];
Task promise = SetupOneStream(streamId, streamProviderName, pipeline, numConsumers, 0, normalSubscribeCalls);
promises.Add(promise);
}
pipeline.Wait();
Task.WhenAll(promises).Wait();
sw.Stop();
int consumerCount = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(activeConsumerGrains + (numStreams * numConsumers), consumerCount); // "The right number of Consumer grains are active"
int producerCount = ActiveGrainCount(typeof(StreamLifecycleProducerGrain).FullName);
Assert.Equal(activeProducerGrains + (numStreams * numProducers), producerCount); // "The right number of Producer grains are active"
int pubSubCount = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, pubSubCount); // "Final PubSub count -- no more started"
TimeSpan elapsed = sw.Elapsed;
int totalSubscriptions = numStreams * numConsumers;
double rps = totalSubscriptions / elapsed.TotalSeconds;
output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
Assert.NotEqual(0.0, rps); // "RPS greater than zero"
return TaskDone.Done;
}
private Task Test_Stream_Churn_NumStreams(
string streamProviderName,
int pipelineSize,
int numStreams,
int numConsumers = 9,
int numProducers = 1,
bool warmUpPubSub = true,
bool normalSubscribeCalls = true)
{
output.WriteLine("Testing churn with {0} Streams with {1} Consumers and {2} Producers per Stream NormalSubscribe={3}",
numStreams, numConsumers, numProducers, normalSubscribeCalls);
AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
var promises = new List<Task>();
// Create streamId Guids
Guid[] streamIds = new Guid[numStreams];
for (int i = 0; i < numStreams; i++)
{
streamIds[i] = Guid.NewGuid();
}
if (warmUpPubSub)
{
WarmUpPubSub(streamProviderName, streamIds, pipeline);
pipeline.Wait();
int activePubSubGrains = ActiveGrainCount(typeof(PubSubRendezvousGrain).FullName);
Assert.Equal(streamIds.Length, activePubSubGrains); // "Initial PubSub count -- should all be warmed up"
}
int activeConsumerGrains = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(0, activeConsumerGrains); // "Initial Consumer count should be zero"
Stopwatch sw = Stopwatch.StartNew();
for (int i = 0; i < numStreams; i++)
{
Task promise = SetupOneStream(streamIds[i], streamProviderName, pipeline, numConsumers, numProducers, normalSubscribeCalls);
promises.Add(promise);
}
Task.WhenAll(promises).Wait();
sw.Stop();
int consumerCount = ActiveGrainCount(typeof(StreamLifecycleConsumerGrain).FullName);
Assert.Equal(activeConsumerGrains + (numStreams * numConsumers), consumerCount); // "The correct number of new Consumer grains are active"
TimeSpan elapsed = sw.Elapsed;
int totalSubscriptions = numStreams * numConsumers;
double rps = totalSubscriptions / elapsed.TotalSeconds;
output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
Assert.NotEqual(0.0, rps); // "RPS greater than zero"
return TaskDone.Done;
}
//private async Task Test_Stream_Churn_TimePeriod(
// string streamProviderName,
// int pipelineSize,
// TimeSpan duration,
// int numConsumers = 9,
// int numProducers = 1)
//{
// output.WriteLine("Testing Subscription churn for duration {0} with {1} Consumers and {2} Producers per Stream",
// duration, numConsumers, numProducers);
// AsyncPipeline pipeline = new AsyncPipeline(pipelineSize);
// var promises = new List<Task>();
// Stopwatch sw = Stopwatch.StartNew();
// for (int i = 0; sw.Elapsed <= duration; i++)
// {
// Guid streamId = Guid.NewGuid();
// Task promise = SetupOneStream(streamId, streamProviderName, pipeline, numConsumers, numProducers);
// promises.Add(promise);
// }
// await Task.WhenAll(promises);
// sw.Stop();
// TimeSpan elapsed = sw.Elapsed;
// int totalSubscription = numSt* numConsumers);
// double rps = totalSubscription/elapsed.TotalSeconds;
// output.WriteLine("Subscriptions-per-second = {0} during period {1}", rps, elapsed);
// Assert.NotEqual(0.0, rps, "RPS greater than zero");
//}
private void WarmUpPubSub(string streamProviderName, Guid[] streamIds, AsyncPipeline pipeline)
{
int numStreams = streamIds.Length;
// Warm up PubSub for the appropriate streams
for (int i = 0; i < numStreams; i++)
{
Guid streamId = streamIds[i];
string extKey = streamProviderName + "_" + StreamNamespace;
IPubSubRendezvousGrain pubsub = GrainClient.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId, extKey, null);
Task promise = pubsub.Validate();
pipeline.Add(promise);
}
pipeline.Wait();
}
private static bool producersFirst = true;
private SimpleGrainStatistic[] grainCounts;
private Task SetupOneStream(
Guid streamId, string streamProviderName,
AsyncPipeline pipeline,
int numConsumers,
int numProducers,
bool normalSubscribeCalls)
{
//output.WriteLine("Initializing Stream {0} with Consumers={1} Producers={2}", streamId, numConsumers, numProducers);
List<Task> promises = new List<Task>();
if (producersFirst && numProducers > 0)
{
// Producers
var p1 = SetupProducers(streamId, this.StreamNamespace, streamProviderName, pipeline, numProducers);
promises.AddRange(p1);
}
// Consumers
if (numConsumers > 0)
{
var c = SetupConsumers(streamId, this.StreamNamespace, streamProviderName, pipeline, numConsumers, normalSubscribeCalls);
promises.AddRange(c);
}
if (!producersFirst && numProducers > 0)
{
// Producers
var p2 = SetupProducers(streamId, this.StreamNamespace, streamProviderName, pipeline, numProducers);
promises.AddRange(p2);
}
return Task.WhenAll(promises);
}
private static IList<Task> SetupProducers(Guid streamId, string streamNamespace, string streamProviderName, AsyncPipeline pipeline, int numProducers)
{
var producers = new List<IStreamLifecycleProducerGrain>();
var promises = new List<Task>();
for (int loopCount = 0; loopCount < numProducers; loopCount++)
{
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
producers.Add(grain);
Task promise = grain.BecomeProducer(streamId, streamNamespace, streamProviderName);
if (loopCount == 0)
{
// First call for this stream, so wait for call to complete successfully so we know underlying infrastructure is set up.
promise.Wait();
}
promises.Add(promise);
pipeline.Add(promise);
}
return promises;
}
private static IList<Task> SetupConsumers(Guid streamId, string streamNamespace, string streamProviderName, AsyncPipeline pipeline, int numConsumers, bool normalSubscribeCalls)
{
var consumers = new List<IStreamLifecycleConsumerGrain>();
var promises = new List<Task>();
long consumerIdStart = random.Next();
for (int loopCount = 0; loopCount < numConsumers; loopCount++)
{
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
consumers.Add(grain);
Task promise;
if (normalSubscribeCalls)
{
promise = grain.BecomeConsumer(streamId, streamNamespace, streamProviderName);
}
else
{
promise = grain.TestBecomeConsumerSlim(streamId, streamNamespace, streamProviderName);
}
//if (loopCount == 0)
//{
// // First call for this stream, so wait for call to complete successfully so we know underlying infrastructure is set up.
// promise.Wait();
//}
promises.Add(promise);
pipeline.Add(promise);
}
return promises;
}
private async Task Test_Stream_Limits(
string streamProviderName,
int numStreams,
int numProducers,
int numConsumers,
int numMessages = 1,
bool useFanOut = true)
{
output.WriteLine("Testing {0} Streams x Producers={1} Consumers={2} per stream with {3} messages each",
1, numProducers, numConsumers, numMessages);
Stopwatch sw = Stopwatch.StartNew();
var promises = new List<Task<double>>();
for (int s = 0; s < numStreams; s++)
{
Guid streamId = Guid.NewGuid();
Task<double> promise = Task.Run(
() => TestOneStream(streamId, streamProviderName, numProducers, numConsumers, numMessages, useFanOut));
promises.Add(promise);
if (!useFanOut)
{
await promise;
}
}
if (useFanOut)
{
output.WriteLine("Test: Waiting for {0} streams to finish", promises.Count);
}
double rps = (await Task.WhenAll(promises)).Sum();
promises.Clear();
output.WriteLine("Got total {0} RPS on {1} streams, or {2} RPS per streams",
rps, numStreams, rps/numStreams);
sw.Stop();
int totalMessages = numMessages * numStreams * numProducers;
output.WriteLine("Sent {0} messages total on {1} Streams from {2} Producers to {3} Consumers in {4} at {5} RPS",
totalMessages, numStreams, numStreams * numProducers, numStreams * numConsumers,
sw.Elapsed, totalMessages / sw.Elapsed.TotalSeconds);
}
private async Task<double> TestOneStream(Guid streamId, string streamProviderName,
int numProducers, int numConsumers, int numMessages,
bool useFanOut = true)
{
output.WriteLine("Testing Stream {0} with Producers={1} Consumers={2} x {3} messages",
streamId, numProducers, numConsumers, numMessages);
Stopwatch sw = Stopwatch.StartNew();
List<IStreamLifecycleConsumerGrain> consumers = new List<IStreamLifecycleConsumerGrain>();
List<IStreamLifecycleProducerGrain> producers = new List<IStreamLifecycleProducerGrain>();
await InitializeTopology(streamId, this.StreamNamespace, streamProviderName,
numProducers, numConsumers,
producers, consumers, useFanOut);
var promises = new List<Task>();
// Producers send M message each
int item = 1;
AsyncPipeline pipeline = new AsyncPipeline(MessagePipelineSize);
foreach (var grain in producers)
{
for (int m = 0; m < numMessages; m++)
{
Task promise = grain.SendItem(item++);
if (useFanOut)
{
pipeline.Add(promise);
promises.Add(promise);
}
else
{
await promise;
}
}
}
if (useFanOut)
{
//output.WriteLine("Test: Waiting for {0} producers to finish sending {1} messages", producers.Count, promises.Count);
await Task.WhenAll(promises);
promises.Clear();
}
var pubSub = StreamTestUtils.GetStreamPubSub();
// Check Consumer counts
int consumerCount = await pubSub.ConsumerCount(streamId, streamProviderName, StreamNamespace);
Assert.Equal(numConsumers, consumerCount); // "ConsumerCount for Stream {0}", streamId
// Check Producer counts
int producerCount = await pubSub.ProducerCount(streamId, streamProviderName, StreamNamespace);
Assert.Equal(numProducers, producerCount); // "ProducerCount for Stream {0}", streamId
// Check message counts received by consumers
int totalMessages = (numMessages + 1) * numProducers;
foreach (var grain in consumers)
{
int count = await grain.GetReceivedCount();
Assert.Equal(totalMessages, count); // "ReceivedCount for Consumer grain {0}", grain.GetPrimaryKey());
}
double rps = totalMessages/sw.Elapsed.TotalSeconds;
//output.WriteLine("Sent {0} messages total from {1} Producers to {2} Consumers in {3} at {4} RPS",
// totalMessages, numProducers, numConsumers,
// sw.Elapsed, rps);
return rps;
}
private static async Task InitializeTopology(Guid streamId, string streamNamespace, string streamProviderName,
int numProducers, int numConsumers,
List<IStreamLifecycleProducerGrain> producers, List<IStreamLifecycleConsumerGrain> consumers,
bool useFanOut)
{
long nextGrainId = random.Next();
//var promises = new List<Task>();
AsyncPipeline pipeline = new AsyncPipeline(InitPipelineSize);
// Consumers
long consumerIdStart = nextGrainId;
for (int loopCount = 0; loopCount < numConsumers; loopCount++)
{
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleConsumerGrain>(Guid.NewGuid());
consumers.Add(grain);
Task promise = grain.BecomeConsumer(streamId, streamNamespace, streamProviderName);
if (useFanOut)
{
pipeline.Add(promise);
//promises.Add(promise);
//if (loopCount%WaitBatchSize == 0)
//{
// output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", promises.Count);
// await Task.WhenAll(promises);
// promises.Clear();
//}
}
else
{
await promise;
}
}
if (useFanOut)
{
//output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", promises.Count);
//await Task.WhenAll(promises);
//promises.Clear();
//output.WriteLine("InitializeTopology: Waiting for {0} consumers to initialize", pipeline.Count);
pipeline.Wait();
}
nextGrainId += numConsumers;
// Producers
long producerIdStart = nextGrainId;
pipeline = new AsyncPipeline(InitPipelineSize);
for (int loopCount = 0; loopCount < numProducers; loopCount++)
{
var grain = GrainClient.GrainFactory.GetGrain<IStreamLifecycleProducerGrain>(Guid.NewGuid());
producers.Add(grain);
Task promise = grain.BecomeProducer(streamId, streamNamespace, streamProviderName);
if (useFanOut)
{
pipeline.Add(promise);
//promises.Add(promise);
}
else
{
await promise;
}
}
if (useFanOut)
{
//output.WriteLine("InitializeTopology: Waiting for {0} producers to initialize", promises.Count);
//await Task.WhenAll(promises);
//promises.Clear();
//output.WriteLine("InitializeTopology: Waiting for {0} producers to initialize", pipeline.Count);
pipeline.Wait();
}
//nextGrainId += numProducers;
}
private int ActiveGrainCount(string grainTypeName)
{
grainCounts = mgmtGrain.GetSimpleGrainStatistics().Result; // Blocking Wait
int grainCount = grainCounts
.Where(g => g.GrainType == grainTypeName)
.Select(s => s.ActivationCount)
.Sum();
return grainCount;
}
#endregion
}
} | {
"content_hash": "40c3c2de2d1ed429efd8f1d4e8bf4ddb",
"timestamp": "",
"source": "github",
"line_count": 836,
"max_line_length": 184,
"avg_line_length": 40.211722488038276,
"alnum_prop": 0.5747389713537794,
"repo_name": "rrector/orleans",
"id": "e0d6229071a28f5b34efb9b9b44c2538005f8676",
"size": "33619",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test/TesterInternal/StreamingTests/StreamLimitTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "30075"
},
{
"name": "C#",
"bytes": "7191839"
},
{
"name": "F#",
"bytes": "3772"
},
{
"name": "GLSL",
"bytes": "74"
},
{
"name": "Groovy",
"bytes": "1226"
},
{
"name": "HTML",
"bytes": "234868"
},
{
"name": "PLpgSQL",
"bytes": "53084"
},
{
"name": "PowerShell",
"bytes": "112032"
},
{
"name": "Protocol Buffer",
"bytes": "1683"
},
{
"name": "Smalltalk",
"bytes": "1584"
},
{
"name": "Visual Basic",
"bytes": "25531"
}
],
"symlink_target": ""
} |
import time, parallel, threading, Tkinter
from optparse import OptionParser
class parallelBuffer(threading.Thread):
def __init__(self, pwmHertz = 60.0):
"""
Initilization.
@param pwmHertz Optional parameter to set PWM frequency.
"""
threading.Thread.__init__(self, name='ParallelBufferThread')
self.pwmTotalCycleTime = 1.0 / pwmHertz
self.daemon = True
self.running = False
self.onTime = self.pwmTotalCycleTime
self.offTime = 0
def run(self):
"""
Starts the buffer.
"""
self.running = True
while(self.running):
p.setData(self.dataOn)
time.sleep(self.onTime)
p.setData(self.dataOff)
time.sleep(self.offTime)
def setDataOn(self, data):
"""
Sets the data to be set when PMW is cycled on.
"""
self.dataOn = data
def setDataOff(self, data):
"""
Sets the data to be set when PMW is cycled off.
"""
self.dataOff = data
def setPWMLevel(self, data):
"""
Takes % to set PWM at.
"""
self.onTime = self.pwmTotalCycleTime * float(data)/100.0
self.offTime = self.pwmTotalCycleTime - self.onTime
def stop(self):
"""
Stops the buffer.
"""
self.running = False
class newLightTimer(threading.Thread):
def __init__(self, Slider, portCode=1):
threading.Thread.__init__(self, name='LightingThread')
self.portCode=portCode
self.slider=Slider
self.daemon = True
def internalRunLights(self):
level=self.slider.getLevel()
totalCycleTime=1.0/80.0
onTime=totalCycleTime*(float(level)/100.0)
offTime=totalCycleTime-onTime
while(self.running):
if(level!=self.slider.getLevel()):
level=self.slider.getLevel()
onTime=totalCycleTime*(float(level)/100.0)
offTime=totalCycleTime-onTime
p.setData(self.portCode)
time.sleep(onTime)
p.setData(0)
time.sleep(offTime)
def run(self):
self.running=True
self.internalRunLights()
def stop(self):
self.running=False
class newLightSlider(object):
def __init__(self, TkWindow, callback, startValue=0, title=None):
self.levelVar=Tkinter.IntVar(value=startValue)
scale=Tkinter.Scale(TkWindow, command = callback, variable = self.levelVar, label=title, from_=100, to=0)
scale.pack(side=Tkinter.RIGHT)
def getLevel(self):
try:
return self.levelVar.get()
except:
return 0
def GUItest():
# Init.
p.setData(000)
pB = parallelBuffer(80.0)
# Start with relay off.
pB.setDataOn(002)
pB.setDataOff(000)
# GUI Init.
window = Tkinter.Tk()
window.title("LED")
relayStatus = Tkinter.IntVar()
def checkRelayStatus():
if not relayStatus.get():
pB.setDataOn(002)
pB.setDataOff(000)
elif relayStatus.get():
pB.setDataOn(003)
pB.setDataOff(001)
radio = Tkinter.Checkbutton(window, variable = relayStatus, command = checkRelayStatus, text = "Relay")
radio.pack(side = Tkinter.TOP)
slide1 = Tkinter.Scale(window, command = pB.setPWMLevel, label = "Lights", from_ = 100, to = 0)
slide1.pack(side = Tkinter.TOP)
headColor = '#3C3B37'
window.configure(background=headColor)
radio.configure(background=headColor, highlightbackground=headColor)
slide1.configure(background=headColor, highlightbackground=headColor)
#timer1 = newLightTimer(slide1, 2)
#slide2 = newLightSlider(window, 100, 'Light 2')
#timer2 = newLightTimer(slide1, 2)
#timer1.start()
#timer2.start()
# Start buffer then GUI.
pB.start()
window.mainloop()
window.quit()
return
def verbtoseTest():
print('10%')
runLights(5, 10, 001)
print('20%')
runLights(5, 20, 001)
print('30%')
runLights(5, 30, 001)
print('40%')
runLights(5, 40, 001)
print('50%')
runLights(5, 50, 001)
print('60%')
runLights(5, 60, 001)
print('70%')
runLights(5, 70, 001)
print('80%')
runLights(5, 80, 001)
print('90%')
runLights(5, 90, 001)
print('100%')
runLights(5, 100, 001)
print('Finished')
return
def quickTest(var):
print('Started')
l=1
while(l<=var):
runLights(0.1, l, 001)
l+=1
print('Finshed')
return
def runLights(runningTime, powerPercent, portCode):
"""
Runs lights at specified power (%) for about specified time (seconds). Requires parallel module and time module.
"""
totalCycleTime = 1.0 / 80.0
onTime = totalCycleTime * (float(powerPercent) / 100.0)
offTime = totalCycleTime - onTime
iterations = int(runningTime * 80)
i=0
while(i < iterations):
p.setData(portCode)
time.sleep(onTime)
i += 1
p.setData(0)
time.sleep(offTime)
return
def userInterface():
choice = raw_input("[V]erbtose test, [Q]uick test, G[U]I, or [E]xit: ")
if choice == 'V' or choice == 'v':
verbtoseTest()
return 'continue'
elif choice == 'Q' or choice == 'q':
quickTest(100)
return 'continue'
elif choice == 'U' or choice == 'u':
GUItest()
return 'continue'
else:
return 'exit'
print('Welcome to parallel control tester!')
p = parallel.Parallel()
# Setup the command line arguments.
optp = OptionParser()
# Output verbosity options.
optp.add_option('-u', '--gui', help='Open GUI',
action='store_const', dest='gui',
const=True, default=False)
options, args = optp.parse_args()
if (options.gui):
GUItest();
else:
while(True):
if(userInterface() == 'exit'):
p.setData(000)
break
exit()
| {
"content_hash": "dd663cbcb67ffc2d3864613ece6aed15",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 114,
"avg_line_length": 27.77948717948718,
"alnum_prop": 0.6479601255307366,
"repo_name": "JoshuaJB/pyParallel-PWM",
"id": "697e21924c67c522c725b3dc52d7dd7a67274786",
"size": "5435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "parallelcrl.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5435"
}
],
"symlink_target": ""
} |
package http
import (
"github.com/dvln/testify/mock"
"net/http"
)
// TestRoundTripper DEPRECATED USE net/http/httptest
type TestRoundTripper struct {
mock.Mock
}
// RoundTrip DEPRECATED USE net/http/httptest
func (t *TestRoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
args := t.Called(req)
return args.Get(0).(*http.Response), args.Error(1)
}
| {
"content_hash": "29e71c93a99aeacc3a87883561f69538",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 81,
"avg_line_length": 21.941176470588236,
"alnum_prop": 0.7345844504021448,
"repo_name": "dvln/testify",
"id": "bcc60ed1ef5af41cca8eac549e04b6ed3284682d",
"size": "373",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "http/test_round_tripper.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "223154"
}
],
"symlink_target": ""
} |
package com.beust.jcommander.validators;
import com.beust.jcommander.IParameterValidator;
import com.beust.jcommander.ParameterException;
/**
* This is the default value of the validateWith attribute.
*
* @author Cedric Beust <cedric@beust.com>
*/
public class NoValidator implements IParameterValidator {
public void validate(String parameterName, String parameterValue)
throws ParameterException {
}
}
| {
"content_hash": "d6584d5a0713f04f3e67e60ace8f7dcb",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 67,
"avg_line_length": 22.31578947368421,
"alnum_prop": 0.7783018867924528,
"repo_name": "jeffoffutt/muJava",
"id": "f1b4df22fcb6dc0800e2d0f1bcde2b0499424232",
"size": "1157",
"binary": false,
"copies": "25",
"ref": "refs/heads/master",
"path": "src/com/beust/jcommander/validators/NoValidator.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1201601"
}
],
"symlink_target": ""
} |
if (typeof Meteor !== 'undefined') return;
var Fs = require('fs');
var Minimist = require('minimist');
var Paths = require('./paths');
var Step = require('./step');
var Utils = require('./utils');
var git = Utils.git;
// Automatically invoke a method by the provided arguments
(function () {
var argv = Minimist(process.argv.slice(2), {
string: ['message', 'm']
});
// The first argument will be the rebase file path provided to us by git
var method = argv._[0];
var rebaseFilePath = argv._[1];
var message = argv.message || argv.m;
var rebaseFileContent = Fs.readFileSync(rebaseFilePath, 'utf8');
var newRebaseFileContent;
// Edit rebase content
switch (method) {
case 'edit': newRebaseFileContent = editStep(rebaseFileContent); break;
case 'reword': newRebaseFileContent = rewordStep(rebaseFileContent, message); break;
}
// If content was edited
if (newRebaseFileContent) {
// Rewrite the rebase file
Fs.writeFileSync(rebaseFilePath, newRebaseFileContent);
}
})();
// Edit the last step in the rebase file
function editStep(rebaseFileContent) {
var operations = disassemblyOperations(rebaseFileContent);
// If rewording
if (!operations) {
// Update commit's step number
var stepDescriptor = Step.descriptor(rebaseFileContent);
if (!stepDescriptor) return;
var nextStep = Step.next(1);
return 'Step ' + nextStep + ': ' + stepDescriptor.message;
}
// If rebasing, edit the first commit
operations[0].method = 'edit';
// Creating a clone of the operations array otherwise splices couldn't be applied
// without aborting the itration. In addition we hold an offset variable to handle
// the changes that are made in the array's length
operations.slice().reduce(function (offset, operation, index) {
// Reword commit
operations.splice(index + ++offset, 0, {
method: 'exec',
command: [
'GIT_EDITOR="node ' + Paths.git.helpers.editor + ' edit"',
'git commit --amend',
].join(' ')
});
return offset;
}, 0);
return assemblyOperations(operations);
}
// Reword the last step in the rebase file
function rewordStep(rebaseFileContent, message) {
var operations = disassemblyOperations(rebaseFileContent);
// If rewording
if (!operations) {
// Replace original message with the provided message
var stepDescriptor = Step.descriptor(rebaseFileContent);
if (!stepDescriptor) return;
return 'Step ' + stepDescriptor.number + ': ' + message;
}
// If rebasing, reword the first commit
operations.splice(1, 0, {
method: 'exec',
command: [
'GIT_EDITOR="node ' + Paths.git.helpers.editor + ' reword --message=\'' + message + '\'"',
'git commit --amend',
].join(' ')
});
return assemblyOperations(operations);
}
// Convert rebase file content to operations array
function disassemblyOperations(rebaseFileContent) {
var operations = rebaseFileContent.match(/^[a-z]+\s.{7}.*$/mg);
if (!operations) return;
return operations.map(function (line) {
var split = line.split(' ');
return {
method: split[0],
hash: split[1],
message: split.slice(2).join(' ')
};
});
}
// Convert operations array to rebase file content
function assemblyOperations(operations) {
return operations
.map(function (operation) {
return Object.keys(operation)
.map(function (k) { return operation[k] })
.join(' ');
})
.join('\n') + '\n';
} | {
"content_hash": "df31d8d0d12c7ba390b3a38ca6d77dbd",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 96,
"avg_line_length": 27.650793650793652,
"alnum_prop": 0.6624569460390356,
"repo_name": "michaelb-01/pipe",
"id": "608c7148917aee3ab1da226fb3b850190ec50e88",
"size": "3484",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "git-helpers/editor.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "68729"
},
{
"name": "HTML",
"bytes": "76417"
},
{
"name": "JavaScript",
"bytes": "16138"
},
{
"name": "Python",
"bytes": "1603833"
},
{
"name": "TypeScript",
"bytes": "132915"
}
],
"symlink_target": ""
} |
<?php
namespace Drupal\Tests\migrate\Kernel;
use Drupal\migrate\MigrateExecutable;
use Drupal\taxonomy\Entity\Term;
use Drupal\taxonomy\Entity\Vocabulary;
/**
* Tests setting of bundles on content entity migrations.
*
* @group migrate
*/
class MigrateBundleTest extends MigrateTestBase {
/**
* Modules to enable.
*
* @var array
*/
public static $modules = ['taxonomy', 'text', 'user'];
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
$this->installEntitySchema('user');
$this->installEntitySchema('taxonomy_vocabulary');
$this->installEntitySchema('taxonomy_term');
$this->installConfig(['taxonomy']);
// Set up two vocabularies (taxonomy bundles).
Vocabulary::create(['vid' => 'tags', 'name' => 'Tags']);
Vocabulary::create(['vid' => 'categories', 'name' => 'Categories']);
}
/**
* Tests setting the bundle in the destination.
*/
public function testDestinationBundle() {
$term_data_rows = [
['id' => 1, 'name' => 'Category 1'],
];
$ids = ['id' => ['type' => 'integer']];
$definition = [
'id' => 'terms',
'migration_tags' => ['Bundle test'],
'source' => [
'plugin' => 'embedded_data',
'data_rows' => $term_data_rows,
'ids' => $ids,
],
'process' => [
'tid' => 'id',
'name' => 'name',
],
'destination' => [
'plugin' => 'entity:taxonomy_term',
'default_bundle' => 'categories',
],
'migration_dependencies' => [],
];
$term_migration = \Drupal::service('plugin.manager.migration')->createStubMigration($definition);
// Import and validate the term entity was created with the correct bundle.
$term_executable = new MigrateExecutable($term_migration, $this);
$term_executable->import();
/** @var \Drupal\taxonomy\Entity\Term $term */
$term = Term::load(1);
$this->assertEquals($term->bundle(), 'categories');
}
/**
* Tests setting the bundle in the process pipeline.
*/
public function testProcessBundle() {
$term_data_rows = [
['id' => 1, 'vocab' => 'categories', 'name' => 'Category 1'],
['id' => 2, 'vocab' => 'tags', 'name' => 'Tag 1'],
];
$ids = ['id' => ['type' => 'integer']];
$definition = [
'id' => 'terms',
'migration_tags' => ['Bundle test'],
'source' => [
'plugin' => 'embedded_data',
'data_rows' => $term_data_rows,
'ids' => $ids,
],
'process' => [
'tid' => 'id',
'vid' => 'vocab',
'name' => 'name',
],
'destination' => [
'plugin' => 'entity:taxonomy_term',
],
'migration_dependencies' => [],
];
$term_migration = \Drupal::service('plugin.manager.migration')->createStubMigration($definition);
// Import and validate the term entities were created with the correct bundle.
$term_executable = new MigrateExecutable($term_migration, $this);
$term_executable->import();
/** @var \Drupal\taxonomy\Entity\Term $term */
$term = Term::load(1);
$this->assertEquals($term->bundle(), 'categories');
$term = Term::load(2);
$this->assertEquals($term->bundle(), 'tags');
}
/**
* Tests setting bundles both in process and destination.
*/
public function testMixedBundles() {
$term_data_rows = [
['id' => 1, 'vocab' => 'categories', 'name' => 'Category 1'],
['id' => 2, 'name' => 'Tag 1'],
];
$ids = ['id' => ['type' => 'integer']];
$definition = [
'id' => 'terms',
'migration_tags' => ['Bundle test'],
'source' => [
'plugin' => 'embedded_data',
'data_rows' => $term_data_rows,
'ids' => $ids,
],
'process' => [
'tid' => 'id',
'vid' => 'vocab',
'name' => 'name',
],
'destination' => [
'plugin' => 'entity:taxonomy_term',
// When no vocab is provided, the destination bundle is applied.
'default_bundle' => 'tags',
],
'migration_dependencies' => [],
];
$term_migration = \Drupal::service('plugin.manager.migration')->createStubMigration($definition);
// Import and validate the term entities were created with the correct bundle.
$term_executable = new MigrateExecutable($term_migration, $this);
$term_executable->import();
/** @var \Drupal\taxonomy\Entity\Term $term */
$term = Term::load(1);
$this->assertEquals($term->bundle(), 'categories');
$term = Term::load(2);
$this->assertEquals($term->bundle(), 'tags');
}
}
| {
"content_hash": "4981be484f4aaff0985ff91418053730",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 101,
"avg_line_length": 29.371794871794872,
"alnum_prop": 0.5547795722391968,
"repo_name": "leorawe/sci-base",
"id": "b6c3051330679d3ae20ed4576eb6d7b37d5d11e0",
"size": "4582",
"binary": false,
"copies": "45",
"ref": "refs/heads/master",
"path": "web/core/modules/migrate/tests/src/Kernel/MigrateBundleTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "891567"
},
{
"name": "Gherkin",
"bytes": "3374"
},
{
"name": "HTML",
"bytes": "809692"
},
{
"name": "JavaScript",
"bytes": "1514397"
},
{
"name": "PHP",
"bytes": "36312357"
},
{
"name": "Ruby",
"bytes": "63696"
},
{
"name": "Shell",
"bytes": "59930"
}
],
"symlink_target": ""
} |
def is_preprocessed_formdata(valuelist):
if len(valuelist) != 1:
return False
value = valuelist[0]
return isinstance(value, (dict, list))
| {
"content_hash": "58e4a826b9f77fecab5b8ee614b8d759",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 42,
"avg_line_length": 31.6,
"alnum_prop": 0.6582278481012658,
"repo_name": "ThiefMaster/indico",
"id": "487ac47216c8a71c020282c58ffdaf187dcf5909",
"size": "372",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "indico/web/forms/fields/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "34704"
},
{
"name": "HTML",
"bytes": "1411006"
},
{
"name": "JavaScript",
"bytes": "2083786"
},
{
"name": "Mako",
"bytes": "1527"
},
{
"name": "Python",
"bytes": "5133951"
},
{
"name": "SCSS",
"bytes": "476568"
},
{
"name": "Shell",
"bytes": "3877"
},
{
"name": "TeX",
"bytes": "23327"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
} |
<?php
namespace SlimBoobooWhoops;
use Exception\BooBoo;
use Whoops\Run;
use Whoops\Handler\PrettyPageHandler;
use Whoops\Handler\JsonResponseHandler;
class Middleware {
protected $app;
protected $lastAction;
protected $logger;
public function __construct($app, array $defaultPaths = null, \Psr\Log\LoggerInterface $logger = null) {
$this->app = $app;
$this->logger = $logger;
if( ! is_null($defaultPaths)) {
foreach($defaultPaths as $format => $path) {
BooBoo::defaultErrorPath($format, $path);
}
}
}
public function __invoke(\Psr\Http\Message\ServerRequestInterface $request, \Psr\Http\Message\ResponseInterface $response, $next) {
$container = $this->app->getContainer();
$settings = $container['settings'];
// Enable PrettyPageHandler with editor options
$prettyPageHandler = new PrettyPageHandler();
// Enable JsonResponseHandler when request is AJAX
$jsonResponseHandler = new JsonResponseHandler();
$jsonResponseHandler->onlyForAjaxRequests(true);
// Add more information to the PrettyPageHandler
$prettyPageHandler->addDataTable('Slim Application', [
'Application Class' => get_class($this->app),
'Script Name' => $this->app->environment->get('SCRIPT_NAME'),
'Request URI' => $this->app->environment->get('PATH_INFO') ?: '<none>',
]);
$prettyPageHandler->addDataTable('Slim Application (Request)', array(
'Accept Charset' => $this->app->request->getHeader('ACCEPT_CHARSET') ?: '<none>',
'Content Charset' => $this->app->request->getContentCharset() ?: '<none>',
'Path' => $this->app->request->getUri()->getPath(),
'Query String' => $this->app->request->getUri()->getQuery() ?: '<none>',
'HTTP Method' => $this->app->request->getMethod(),
'Base URL' => (string) $this->app->request->getUri(),
'Scheme' => $this->app->request->getUri()->getScheme(),
'Port' => $this->app->request->getUri()->getPort(),
'Host' => $this->app->request->getUri()->getHost(),
));
// Set Whoops to default exception handler
$whoops = new \Whoops\Run;
$whoops->pushHandler($prettyPageHandler);
$whoops->pushHandler($jsonResponseHandler);
if(!empty($logger = $this->logger)) {
$whoops->pushHandler(function ($exception, $inspector, $run) use($logger) {
$logger->error($exception->getMessage());
});
}
$whoops->register();
// Overwrite the errorHandler
$container['errorHandler'] = function($c) use ($whoops) {
return function($request, $response, $exception) use ($whoops) {
if($exception instanceof BooBoo) {
// Store the BooBoo error body response in a buffer
ob_start();
BooBoo::exceptionHandler($exception);
$buffer = ob_get_contents();
ob_end_clean();
// By creating a new response object, all the headers set by BooBoo get resynced
$response = new \HTTP\Response();
return $response->overwrite($buffer);
}
else {
$handler = \Whoops\Run::EXCEPTION_HANDLER;
ob_start();
$whoops->$handler($exception);
$content = ob_get_clean();
$code = $exception instanceof HttpException ? $exception->getStatusCode() : 500;
return $response
->withStatus($code)
->withHeader('Content-type', 'text/html')
->write($content);
}
};
};
return $next($request, $response);
}
}
| {
"content_hash": "df410e930a51389678779b05c4444c65",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 132,
"avg_line_length": 34.46153846153846,
"alnum_prop": 0.6051897321428571,
"repo_name": "marcoazn89/slim-booboo-whoops",
"id": "701f61829f2d263aec971635af62050dde23a4f0",
"size": "3584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Middleware.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "4493"
}
],
"symlink_target": ""
} |
package soa;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.Service;
/**
* The class KnowledgeBaseSearcherClient is using the Knowledge Base in order to access the service
*/
public class KnowledgeBaseSearcherClient {
private Searchable searcher;
public KnowledgeBaseSearcherClient(){
this("localhost", 9999, "/soa/searcher");
}
public KnowledgeBaseSearcherClient(String host){
this(host, 9999,"/soa/searcher");
}
/**
* Constructor
* host, port and path will be added to http://localhost:9999/soa/searcher?wsdl
* @param host - of service
* @param port - of service
* @param path - of service
*/
public KnowledgeBaseSearcherClient(String host, int port,String path){
// URL to the wsdl File
URL url = null;
try {
// url = new URL("http://localhost:9999/soa/searcher?wsdl");
url = new URL("http://"+host+":"+port+""+path+"?wsdl");
} catch (MalformedURLException e) {
}
//1st argument service URI, refer to wsdl document above
//2nd argument is service name, refer to wsdl document above
QName qname = new QName("http://soa/", "KnowledgeBaseSearcherService");
//QName qname = new QName("KnowledgeBaseSearcherService");
// Get Service
Service service = Service.create(url, qname);
// Get Searchable Object
searcher = service.getPort(Searchable.class);
this.search();
}
/**
* Loop the client for requests
*/
public void search(){
// read line
BufferedReader buffer=new BufferedReader(new InputStreamReader(System.in));
String line="";
System.out.println("To end the Client, write stop or end...\n");
System.out.println("Please put in the search question... \n ");
// ru until stop or end was written
while(!line.equalsIgnoreCase("stop")||!line.equalsIgnoreCase("end")) {
try {
line = buffer.readLine();
} catch (IOException e) {
}
// if a search question was written.. .call searcher and wait for answer
if(!line.equalsIgnoreCase("")){
System.out.println("\n"+searcher.search(line));
System.out.println("Please put in a new search question... \n ");
}
}
System.out.println("Bye....");
}
}
| {
"content_hash": "655dd0c3aeac2ae4cd27f966aacaaeb2",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 99,
"avg_line_length": 31.585365853658537,
"alnum_prop": 0.6108108108108108,
"repo_name": "hsiegel-tgm/SOAWebS",
"id": "0263d2394b1a94eeef2bf1589e74280df2386b38",
"size": "2590",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ABGEBEN/src/soa/KnowledgeBaseSearcherClient.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "100252"
},
{
"name": "JavaScript",
"bytes": "2389"
},
{
"name": "PHP",
"bytes": "5800"
},
{
"name": "TeX",
"bytes": "30922"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://www.netbeans.org/ns/project/1">
<type>org.netbeans.modules.php.project</type>
<configuration>
<data xmlns="http://www.netbeans.org/ns/php-project/1">
<name>php-crypt-helper</name>
</data>
</configuration>
</project>
| {
"content_hash": "43de659f415fbb8ab3520d313736b575",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 63,
"avg_line_length": 35.333333333333336,
"alnum_prop": 0.6257861635220126,
"repo_name": "pigochu/php-crypt-helper",
"id": "9fc3c384fa131460b329ef5e3a1a5a8505261e1d",
"size": "318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nbproject/project.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "19933"
}
],
"symlink_target": ""
} |
#pragma once
#include <aws/sts/STS_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
namespace Aws
{
namespace STS
{
class STSEndpointRules
{
public:
static Aws::String GetRulesAsString();
static const Aws::Vector<char> Rules;
};
} // namespace STS
} // namespace Aws
| {
"content_hash": "a1d1e98e711d5a4d232e9678a4960606",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 48,
"avg_line_length": 17.894736842105264,
"alnum_prop": 0.7205882352941176,
"repo_name": "aws/aws-sdk-cpp",
"id": "61e8b73e6c912a91d18068fce3c4c66f6edef17e",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "aws-cpp-sdk-sts/include/aws/sts/STSEndpointRules.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "309797"
},
{
"name": "C++",
"bytes": "476866144"
},
{
"name": "CMake",
"bytes": "1245180"
},
{
"name": "Dockerfile",
"bytes": "11688"
},
{
"name": "HTML",
"bytes": "8056"
},
{
"name": "Java",
"bytes": "413602"
},
{
"name": "Python",
"bytes": "79245"
},
{
"name": "Shell",
"bytes": "9246"
}
],
"symlink_target": ""
} |
package org.owasp.dependencycheck;
import org.junit.Before;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;
import org.owasp.dependencycheck.analyzer.AnalysisPhase;
import org.owasp.dependencycheck.dependency.Dependency;
import org.owasp.dependencycheck.utils.Settings;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Assume;
import org.owasp.dependencycheck.dependency.EvidenceType;
import org.owasp.dependencycheck.utils.FileUtils;
/**
* @author Mark Rekveld
*/
public class EngineModeIT extends BaseTest {
@Rule
public TemporaryFolder tempDir = new TemporaryFolder();
@Rule
public TestName testName = new TestName();
private String originalDataDir = null;
@Before
@Override
public void setUp() throws Exception {
super.setUp();
// Have to use System properties as the Settings object pulls from the
// system properties before configured properties
originalDataDir = getSettings().getString(Settings.KEYS.DATA_DIRECTORY);
System.setProperty(Settings.KEYS.DATA_DIRECTORY, tempDir.newFolder().getAbsolutePath());
}
@After
@Override
public void tearDown() throws Exception {
try {
//delete temp files
FileUtils.delete(getSettings().getDataDirectory());
//Reset system property to original value just to be safe for other tests.
System.setProperty(Settings.KEYS.DATA_DIRECTORY, originalDataDir);
System.clearProperty(Settings.KEYS.H2_DATA_DIRECTORY);
} catch (IOException ex) {
throw new RuntimeException(ex);
} finally {
super.tearDown();
}
}
@Test
public void testEvidenceCollectionAndEvidenceProcessingModes() throws Exception {
Dependency[] dependencies;
try (Engine engine = new Engine(Engine.Mode.EVIDENCE_COLLECTION, getSettings())) {
engine.openDatabase(); //does nothing in the current mode
assertDatabase(false);
for (AnalysisPhase phase : Engine.Mode.EVIDENCE_COLLECTION.getPhases()) {
assertThat(engine.getAnalyzers(phase), is(notNullValue()));
}
for (AnalysisPhase phase : Engine.Mode.EVIDENCE_PROCESSING.getPhases()) {
assertThat(engine.getAnalyzers(phase), is(nullValue()));
}
File file = BaseTest.getResourceAsFile(this, "struts2-core-2.1.2.jar");
engine.scan(file);
engine.analyzeDependencies();
dependencies = engine.getDependencies();
assertThat(dependencies.length, is(1));
Dependency dependency = dependencies[0];
assertTrue(dependency.getEvidence(EvidenceType.VENDOR).toString().toLowerCase().contains("apache"));
assertTrue(dependency.getVendorWeightings().contains("apache"));
assertTrue(dependency.getVulnerabilities().isEmpty());
}
try (Engine engine = new Engine(Engine.Mode.EVIDENCE_PROCESSING, getSettings())) {
engine.openDatabase();
assertDatabase(true);
for (AnalysisPhase phase : Engine.Mode.EVIDENCE_PROCESSING.getPhases()) {
assertThat(engine.getAnalyzers(phase), is(notNullValue()));
}
for (AnalysisPhase phase : Engine.Mode.EVIDENCE_COLLECTION.getPhases()) {
assertThat(engine.getAnalyzers(phase), is(nullValue()));
}
engine.addDependency(dependencies[0]);
engine.analyzeDependencies();
Dependency dependency = dependencies[0];
assertFalse(dependency.getVulnerabilities().isEmpty());
}
}
@Test
public void testStandaloneMode() throws Exception {
try (Engine engine = new Engine(Engine.Mode.STANDALONE, getSettings())) {
engine.openDatabase();
assertDatabase(true);
for (AnalysisPhase phase : Engine.Mode.STANDALONE.getPhases()) {
assertThat(engine.getAnalyzers(phase), is(notNullValue()));
}
File file = BaseTest.getResourceAsFile(this, "struts2-core-2.1.2.jar");
engine.scan(file);
engine.analyzeDependencies();
Dependency[] dependencies = engine.getDependencies();
assertThat(dependencies.length, is(1));
Dependency dependency = dependencies[0];
assertTrue(dependency.getEvidence(EvidenceType.VENDOR).toString().toLowerCase().contains("apache"));
assertTrue(dependency.getVendorWeightings().contains("apache"));
assertFalse(dependency.getVulnerabilities().isEmpty());
}
}
private void assertDatabase(boolean exists) throws Exception {
Assume.assumeThat(getSettings().getString(Settings.KEYS.DB_DRIVER_NAME), is("org.h2.Driver"));
Path directory = getSettings().getDataDirectory().toPath();
assertThat(Files.exists(directory), is(true));
assertThat(Files.isDirectory(directory), is(true));
Path database = directory.resolve(getSettings().getString(Settings.KEYS.DB_FILE_NAME));
assertThat(Files.exists(database), is(exists));
}
}
| {
"content_hash": "1ed8e12b4015675a33a785dfcafcbfc0",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 112,
"avg_line_length": 42.21052631578947,
"alnum_prop": 0.6683291770573566,
"repo_name": "awhitford/DependencyCheck",
"id": "a7122b22fc07afe990e0eff0e44a035f87de9a3f",
"size": "5614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/test/java/org/owasp/dependencycheck/EngineModeIT.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "287"
},
{
"name": "C",
"bytes": "3939"
},
{
"name": "C++",
"bytes": "2713"
},
{
"name": "CMake",
"bytes": "427624"
},
{
"name": "Cuda",
"bytes": "384"
},
{
"name": "Dockerfile",
"bytes": "1223"
},
{
"name": "Groovy",
"bytes": "16657"
},
{
"name": "Java",
"bytes": "2199600"
},
{
"name": "JavaScript",
"bytes": "1149733"
},
{
"name": "M4",
"bytes": "66845"
},
{
"name": "PLSQL",
"bytes": "2981"
},
{
"name": "Python",
"bytes": "7490"
},
{
"name": "Ruby",
"bytes": "25706"
},
{
"name": "SQLPL",
"bytes": "2470"
},
{
"name": "Shell",
"bytes": "7274"
},
{
"name": "Swift",
"bytes": "1221"
}
],
"symlink_target": ""
} |
title: "Visualising multidimensional data: Cooperation & conflict patterns over space and time in R"
date: 2017-11-04
lastmod: 2022-07-22
draft: false
tags: ["R", "ggplot2", "gganimate"]
categories: ["data visualisation"]
featuredImage: "images/antique-world-map.jpg"
---
<br/>
For this post, I've managed to find some extremely interesting historical event data offered by the [Cline Center](http://www.clinecenter.illinois.edu/) on [this page](http://www.clinecenter.illinois.edu/data/event/phoenix/ ). As you will see, this dataset can be quite challenging because of the sheer number of dimensions you could look at. With so many options, it becomes tricky to create visualisations with the 'right' level of granularity: not so high-level that any interesting patterns are obscured, but not too detailed and overcrowded either.
To quote the Cline Center's own description of this data, it:
> [...] covers the period 1945-2015 and includes several million events extracted from 14 million news stories. This data was produced using [...] content from the **New York Times** (1945-2005), the **BBC**'s Monitoring Summary of World Broadcasts (1979-2015) and the **CIA**’s Foreign Broadcast Information Service (1995-2004). It documents the agents, locations, and issues at stake in a wide variety of conflict, cooperation and communicative events [...].
Of the three sources, below we'll explore the BBC dataset ("BBC Summary of World Broadcasts "), since it spans a fairly large period (1979 - 2015), and is also the largest among the three datasets offered. It can be downloaded [here](https://uofi.box.com/s/zp4mppzcpdvgs82rzwpme13xt6z4hq6j), and also comes with some metadata (in .csv format) presented [here](https://uofi.box.com/s/1ftwk1rt743ynl31voz37bmv23y6nrva). Finally, you can also check out the variable codebook [here](https://uofi.box.com/s/bmh9i39m6bf0vhnuebtf3ak3j6uxy2le).
<br/>
## Overview of the data
Before we dive in, it's important to mention a few things about the structure of this BBC dataset. Here is a window onto how the original data looked, before I implemented any edits of my own:
<img src="images/FullBBCEventDataView.png" alt="Full BBC Event Data View" width="100%">
One thing you may notice, for instance, is that some characteristics of this dataset go against the set of data guidelines I've [previously recommended](http://datapowered.io/post/2017-10-18-post-data-guidelines/): e.g., the `source` and `target` variables do not contain atomic values, but rather string together multiple values, as well as some punctuation characters. This and various other issues had to be addressed before actually starting to look at the data. If you're interested, you can see my `R` code for this [at the bottom of this page](#DataCleaningAndManipulations).
<br/>
## Finding a suitable level of granularity
First thing's first: picking outcome variable(s). Of all those present, the `goldstein` variable appears most appropriate: it is continuous and measures [conflict and cooperation](http://web.pdx.edu/~kinsella/jgscale.html) in world events. We've also got a secondary measure for 'event importance': `total_sources` (or the number of media sources which picked up a given event).
Next, let's decide what these variables could vary across. Because various options are available, it's difficult to choose what to show in graphs - or equally, how to show as much as you can, but in the cleanest, simplest way. One idea would be to average the Goldstein scale by year, within countries, and within country groups (labeled somewhat confusingly here as `continent`). Alternatively, we could pool all the data, and only look at country-wide averages. With 227 country / location names present in the data (!), measured across several decades each, it's perhaps not immediately obvious what the best way forward is...
So we'll use a trick to get the best of both worlds: we'll create _yearly_ averages for each country, plus _country-wide_ averages for both our outcomes (`goldstein` and `total_sources`). We'll actually be needing both levels of granularity within the same plot: a (pseudo-)caterpillar plot coming up soon.
{{< highlight r "linenos=table, linenostart=1" >}}
# Averaging Goldstein ratings over country and by year:
goldstein_country_in_continent_by_year <-
data.table( aggregate( cbind( goldstein, total_sources ) ~
year + countryname + continent,
FUN = mean,
data = full_BBC_events_with_meta ) )
goldstein_country_in_continent <-
data.table( aggregate( cbind( goldstein, total_sources ) ~
countryname + continent,
FUN = mean,
data = full_BBC_events_with_meta ) )
# Now sorting the levels of the countryname variable within "continents" (zones):
goldstein_country_in_continent <- goldstein_country_in_continent[ order( continent, goldstein ), ]
correct_order <- unique( goldstein_country_in_continent$countryname )
setnames( goldstein_country_in_continent,
c( "goldstein", "total_sources" ),
c( "country_goldstein", "country_total_sources" ) )
two_level_aggregates <- join( goldstein_country_in_continent_by_year,
goldstein_country_in_continent )
two_level_aggregates[ , countryname := ordered( countryname, levels = correct_order ) ]
{{< / highlight >}}
<br/>
## Plotting the full data
So I've gotten a bit creative here in trying to include all the countries available, within all the geographical zones. Because this graph would become completely illegible if I had included yearly datapoints too, I've instead resorted to showing the value _ranges_ for each country (connecting the min and max rating achieved by each country between 1977-2015).
{{< highlight r "linenos=table, linenostart=1" >}}
# Here we split the graph by area ("continent") - and show each one separately within its own panel:
ggplot( two_level_aggregates[ year > 1977, ],
aes( x = goldstein, y = countryname,
group = countryname, color = continent ) ) +
geom_vline( xintercept = 0, color = "black", lty = "dashed", size = 0.3 ) +
geom_vline( xintercept = -5, color = "black", lty = "dashed", size = 0.3 ) +
geom_vline( xintercept = 5, color = "black", lty = "dashed", size = 0.3 ) +
geom_line( stat = "identity" ) +
geom_point( aes( x = country_goldstein, y = countryname, color = continent ) ) +
facet_wrap( ~ continent, ncol = 4, scales = "free" ) +
xlim( -10, 10 ) +
theme( axis.text.x = element_text( angle = 90, hjust = 1, size = 12 ),
text = element_text( size = 22 ) ) +
guides( color = FALSE ) +
labs( color = "Area", x = "Average Goldstein rating", y = "Country" )
{{< / highlight >}}
And the caterpillar plot below is the result - with countries ordered within each panel by the grand mean (country-level average) on the Goldstein scale, across the whole period assessed, and with lines representing the range of values.
<img src="images/CaterpillarPlotByContinent.png" alt="Caterpillar Plot By Continent" width="100%">
<br/>
## Plotting slices of data
After looking at the big picture above (quite literally, too...), we might be interested to explore one specific area in more detail. This would also help with reducing the amount of information conveyed in a single plot. Say, for instance, we want to look at the types of Goldstein ratings for Central Europe only. We could slice the data accordingly, and visualize whereabouts on this scale the values cluster more for each country. So let's create a violin plot:
{{< highlight r "linenos=table, linenostart=1" >}}
violin <- ggplot( two_level_aggregates[ year > 1977 & continent == "Central Europe", ],
aes( y = goldstein, x = countryname,
group = countryname, fill = countryname ) ) +
geom_hline( yintercept = 0, color = "black", lty = "dashed", size = 0.3 ) +
geom_hline( yintercept = -5, color = "black", lty = "dashed", size = 0.3 ) +
geom_hline( yintercept = 5, color = "black", lty = "dashed", size = 0.3 ) +
geom_violin( position = "dodge",
draw_quantiles = c( 0.25, 0.50, 0.75),
trim = TRUE ) +
geom_text( aes( x = countryname, y = 8.5, label = countryname ), angle = 90, size = 6 ) +
ylim( -10, 10 ) +
guides( fill = FALSE ) +
theme( axis.text.y = element_text( size = 15 ),
axis.title.y = element_text( size = 25 ),
axis.title.x = element_blank(),
axis.text.x = element_blank(),
axis.ticks.x = element_blank() ) +
labs( y = "Average Goldstein rating" )
png( "ViolinCentralEurope.png", width = 800, height = 800 )
print( violin )
dev.off()
{{< / highlight >}}
This is the result. It's interesting to see that of the bunch, and according to this data, Turkey spends the most time on the fringes between cooperation and conflict, whereas e.g., Romania situates itself slightly above the midpoint of the scale the whole time (each violin shows a country's data between 1977-2015).
<img src="images/ViolinCentralEurope.png" alt="Violin Central Europe" width="100%">
<br/>
## Using animations to map extra dimensions
Ok, so if you know the usual ways to simplify plots (e.g., those discussed by Stephen Few in [Solutions to the Problem of Over-Plotting in Graphs, 2008](http://mail.perceptualedge.com/articles/visual_business_intelligence/over-plotting_in_graphs.pdf)), here is another trick: using animations. In `R`, you can do this with package `gganimate`, and get something like this:
<img src="images/Caribbean.gif" alt="Caribbean trends" width="100%">
Not only does this solve the problem of including the yearly data in a way that's easier to take in, but we've also managed to map the 'event importance' measure onto the size of points! And all this without things looking ridiculously overcrowded.
Furthermore, the plot also reveals something interesting about this dataset: the way the Cline Center quantified text data from newspapers does present at least some degree of validity. As an example, you will see a data point for Haiti flash towards the negative end of the Goldstein scale in 1985. A quick [search on Google](https://nvdatabase.swarthmore.edu/content/haitians-overthrow-regime-1984-1986) reveals that this is related to a period of violent protests, and ultimately to president Duvalier being overthrown in Haiti:
> In July of 1985, a referendum increased Duvalier’s power, angering much of the populace. In November 1985, opposition held protests in cities around the country. Law enforcement killed and arrested many protesters across the country.
But, returning to our topic here, you can create the .gif above with:
{{< highlight r "linenos=table, linenostart=1" >}}
p <- ggplot( goldstein_country_in_continent_by_year[ continent == "Caribbean", ],
aes( x = goldstein, y = countryname,
group = countryname,
color = countryname,
size = total_sources,
frame = year,
cumulative = FALSE ) ) +
geom_vline( xintercept = 0, color = "black", lty = "dashed", size = 0.3 ) +
geom_vline( xintercept = -5, color = "black", lty = "dashed", size = 0.3 ) +
geom_vline( xintercept = 5, color = "black", lty = "dashed", size = 0.3 ) +
geom_point( ) + guides( color = FALSE ) +
xlim( -10, 10 ) +
theme( text = element_text( size = 16 ) ) +
labs( size = "Event\n importance", x = "Average Goldstein rating", y = "Country" ) +
facet_wrap( ~ continent, ncol = 2, scales = "free" )
gganimate( p, filename = "~/Desktop/Caribbean.gif", ani.width = 1400, ani.height = 500 )
{{< / highlight >}}
You can also extend the plot to contain multiple panels, but that gets really hard to follow, really fast - so it's probably something to avoid if possible... But if you want to try, here is how:
{{< highlight r "linenos=table, linenostart=1" >}}
# We're also using a trick to substitute the dataset from before with a new one
# but all of this without repeating any of the ggplot2 syntax above:
p2 <- p %+% goldstein_country_in_continent_by_year[ continent == "Western Africa" | continent == "Eastern Africa", ]
gganimate( p2, filename = "~/Desktop/EasternVsWesternAfrica.gif", ani.width = 1200, ani.height = 500 )
{{< / highlight >}}
And you will get:
<img src="images/EasternVsWesternAfrica.gif" alt="Eastern vs Western Africa trends" width="100%">
Ok, these are my current thoughts on how to tackle data with nested (years within countries) and crossed (countries between zones) variables. If you've been following up to this point, any comments and suggestions are welcome. By the way, all the work described here is [on Github](https://github.com/TheDataLabScotland/Data_Team_Blog_Posts/tree/master/HistoricalEventsData) too.
<br/>
## Going further: Data cleaning & manipulations
See below for all the preliminary work that had to be carried out before actually plotting the data. Some of the things I've done include replacing unusual values with a missing code, or actually excluding missing data, as well as stripping unnecessary punctuation characters.
I also brought in external data from the `rworldmaps` package, and replaced country `ISO3` codes with the full country names, besides adding in a grouping variable for countries (i.e., the Stern report country classification, which groups countries into 24 categories). I've done the same for the so-called CAMEO `code` variable, i.e., I got the CAMEO labels from the Cline codebook, and substituted them for the numeric codes in the data, to get more legible output.
I also split up any concatenated values I saw in order to get atomic measures. For instance, I split the `source` and `target` variables across multiple new columns. And I've done the same for the `eid` variable (event ID), which originally concatenates both the source and the event identifier (the latter being a numeric code).
In addition, I created measures for how many sources or targets an event is associated with, and used this as a proxy for **'event importance'**: presumably, the bigger the event, the more sources it's picked up by, and the more targets it involves.
Finally, I also merged the actual BBC event data with the its metadata (article publication dates and so on). Here is the code for all this:
***
{{< highlight r "linenos=table, linenostart=1" >}}
# Here are the packages we will need:
library( data.table )
library( ggplot2 )
library( stringi )
library( stringr )
library( plyr )
library( rworldmap )
library( devtools )
# install_github( "dgrtwo/gganimate")
library( gganimate )
setwd( "your/path/goes/here" )
# Get data ----------------------------------------------------------------
BBC_events <- fread( "BBC_Summary_of_World_Broadcasts_1979_2015_.csv", na.strings = "" )
BBC_meta <- fread( "BBC_Summary_of_World_Broadcasts_1979_2015_MetaData.csv", na.strings = "" )
# Understanding and tidying the data --------------------------------------
# In keeping with a common data management recommendation, values within columns should represent atomic values
# i.e., each cell should contain just one value, instead of two or more. So we shall try to fix this for events so that event *sources* and *ids* are kept separate:
BBC_events[ , EIDSource := gsub( "[[:digit:]]","", BBC_events$eid ) ]
BBC_events[ , EIDEvent := gsub( "[[:alpha:]]","", BBC_events$eid ) ]
# Exclude events that have no date, or no lat and long, just in case we want to create a map of these data points ourselves later:
BBC_events <- BBC_events[ ! is.na( story_date ), ]
BBC_events[ , story_date := as.Date( story_date, format = "%m/%d/%Y" ) ]
BBC_events <- BBC_events[ ! is.na( lat ) & ! is.na( lon ), ]
# These source and target codes don't mean much in themselves.
# They seem to just be ISO3 country codes.
# So I'm going to match them up to the full country names, extracted from the rworldmap package.
data( countryRegions, envir = environment(), package = "rworldmap" )
# Can also get continents / Stern report area classifications, from the very same rworldmap package:
BBC_events[ , continent := mapvalues( BBC_events$countryname,
from = countryRegions$ISO3,
to = countryRegions$GEO3 ) ]
# Get proper country names from ISO3 codes:
BBC_events[ , countryname := mapvalues( BBC_events$countryname,
from = countryRegions$ISO3,
to = countryRegions$ADMIN ) ]
# There are a few codes in this BBC dataset that are unrecognized, and which should be replaced with missing values:
unrecognized_iso <- unique( BBC_events$countryname )[ nchar( unique( BBC_events$countryname ) ) < 4 ]
BBC_events[ , countryname := ifelse( countryname %in% unrecognized_iso, NA, countryname ) ]
BBC_events[ , continent := ifelse( continent %in% unrecognized_iso, NA, continent ) ]
# Cleaning source / target variables:
# Remove punctuation from these character vars:
BBC_events[ , target := str_trim( str_extract( target, "[[:alpha:]]+" ), side = "both" ) ]
BBC_events[ , source := str_trim( str_extract( source, "[[:alpha:]]+" ), side = "both" ) ]
table( nchar( BBC_events$target ) )
table( nchar( BBC_events$source ) )
# Gotta split strings into groups of 3 ...
targets_list <- stri_extract_all_regex( BBC_events$target, '.{1,3}' )
targets_data_matrix <- plyr::ldply( targets_list, rbind )
setnames( targets_data_matrix, paste( "target", 1:9, sep = "_" ) )
sources_list <- stri_extract_all_regex( BBC_events$source, '.{1,3}' )
sources_data_matrix <- plyr::ldply( sources_list, rbind )
setnames( sources_data_matrix, paste( "source", 1:9, sep = "_" ) )
# Get a measure of how many sources and or targets an entry has.
# Presumably, the bigger the event, the more sources it's picked up by, and the more targets it involves.
BBC_events[ , total_sources := unlist( lapply( sources_list, length ) ) ]
BBC_events[ , total_targets := unlist( lapply( targets_list, length ) ) ]
# Replace each code by its actual meaning to help with deciphering dataset:
source_target_dict <- fread( "SourceOrTargetCodes_ClineBBCData.csv" )
targets_data_matrix <- data.table( mapvalues( as.matrix( targets_data_matrix ),
from = source_target_dict$Code,
to = source_target_dict$`Source/Target` ) )
sources_data_matrix <- data.table( mapvalues( as.matrix( sources_data_matrix ),
from = source_target_dict$Code,
to = source_target_dict$`Source/Target` ) )
full_BBC_events <- data.table( BBC_events, sources_data_matrix, targets_data_matrix )
# Joining meta data with main data:
setnames( BBC_meta, "pubdate", "story_date" )
BBC_meta[ , story_date := as.Date( story_date, format = "%m/%d/%Y" ) ]
full_BBC_events_with_meta <- join( full_BBC_events, BBC_meta, by = c( "aid", "story_date" ) )
# Remove unnecessary columns for simplicity:
full_BBC_events_with_meta$original_source <- NULL
full_BBC_events_with_meta$process <- NULL
# Trying to understand the structure of this dataset:
table( table( full_BBC_events_with_meta$eid ) ) # Seems like events are unique
table( table( full_BBC_events_with_meta$aid ) ) # Seems like articles repeat themselves, confusingly
table( table( full_BBC_events_with_meta$code ) ) # An event's code for: Conflict and Mediation Event Observation (CAMEO code)
table( table( full_BBC_events_with_meta$root_code ) ) # Super-ordinate CAMEO code, with following dictionary:
# Replacing CAMEO codes with their labels, for more meaningful output to be possible later:
CAMEO_root_code <- 1 : 20
CAMEO_label <- c( "Make public statement", "Appeal", "Express intent to cooperate",
"Consult", "Engage in diplomatic cooperation",
"Engage in diplomatic cooperation", "Provide aid", "Yield",
"Investigate", "Demand", "Disapprove", "Reject", "Threaten",
"Protest", "Exhibit force posture", "Reduce relations", "Coerce",
"Assault", "Fight", "Use unconventional mass violence")
full_BBC_events_with_meta[ , root_code := mapvalues( root_code, CAMEO_root_code, CAMEO_label ) ]
{{< / highlight >}}
> This content was first published on [The Data Team @ The Data Lab blog](https://thedatateam.silvrback.com/analysing-historical-event-data-in-r).
| {
"content_hash": "8a084223ee5374cea806fef0f26838d9",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 629,
"avg_line_length": 58.92857142857143,
"alnum_prop": 0.6975030303030303,
"repo_name": "DataPowered/DataPowered.io_site",
"id": "a17d6d5785c5935602509b0573951e0cc0cdf400",
"size": "20633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "content/posts/2017-10-19-analysing-historical-event-data/index.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "149662"
},
{
"name": "R",
"bytes": "5580"
}
],
"symlink_target": ""
} |
"""This example gets all custom targeting keys and the values. To create custom
targeting keys and values, run create_custom_targeting_keys_and_values.py."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
custom_targeting_service = client.GetService(
'CustomTargetingService', version='v201208')
filter_statement = {'query': 'LIMIT 500'}
# Get custom targeting keys by statement.
response = custom_targeting_service.GetCustomTargetingKeysByStatement(
filter_statement)[0]
keys = []
if 'results' in response:
keys = response['results']
# Display results.
if keys:
key_ids = [key['id'] for key in keys]
filter_statement = {'query': ('WHERE customTargetingKeyId IN (%s)'
% ', '.join(key_ids))}
# Get custom targeting values by statement.
response = custom_targeting_service.GetCustomTargetingValuesByStatement(
filter_statement)[0]
values = []
if 'results' in response:
values = response['results']
# Create map of custom targeting key id to custom targeting values.
key_value_map = {}
for key in keys:
for value in values:
if key['id'] == value['customTargetingKeyId']:
if key['id'] not in key_value_map.keys():
key_value_map[key['id']] = []
key_value_map[key['id']].append(value)
break
# Display results.
for key in keys:
print ('Custom targeting key with id \'%s\', name \'%s\', display name '
'\'%s\', and type \'%s\' was found.'
%(key['id'], key['name'], key['displayName'], key['type']))
if key['id'] in key_value_map.keys():
for value in key_value_map[key['id']]:
print ('\tCustom targeting value with id \'%s\', name \'%s\', and '
'display name \'%s\' was found.'
% (value['id'], value['name'], value['displayName']))
else:
print 'No keys were found.'
| {
"content_hash": "41d65748b71e4d7223c8be7db40986ee",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 80,
"avg_line_length": 34.53030303030303,
"alnum_prop": 0.6375603334795963,
"repo_name": "caioserra/apiAdwords",
"id": "dccaed6457e2771b5545ddbc81c6e60b6258adf4",
"size": "2897",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/dfp/v201208/get_all_custom_targeting_keys_and_values.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "47375"
},
{
"name": "Python",
"bytes": "3481410"
},
{
"name": "Shell",
"bytes": "14782"
}
],
"symlink_target": ""
} |
//
// format.h
// fast_science
//
//
// Created by Manuel on 20.06.15.
//
//
#ifndef file_formats_format_h
#define file_formats_format_h
# include <string>
namespace fs {
/**
* This is a base class for any file format.
* TODO At the moment there is no real function but maybe it is still good to
* have this base class for future expandations.
*/
class Format {
public:
/**
* A standart constructor
*/
Format();
/**
* A copy constructor
*/
Format(Format const & original);
/**
* The = operator.
*/
Format& operator= (Format const & original);
/**
* The destructor.
*/
virtual ~Format();
};
}
#endif
| {
"content_hash": "caa5d765d08c4c8c55466a49265d2031",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 80,
"avg_line_length": 15.333333333333334,
"alnum_prop": 0.5448369565217391,
"repo_name": "manuSrep/fast_science",
"id": "a1d40edbf2aee0b82c119751760dd1e14a6bf2fa",
"size": "736",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scr/generic/file_formats/format.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "134238"
},
{
"name": "Makefile",
"bytes": "4499"
}
],
"symlink_target": ""
} |
import unittest
import tempfile
import os
import shutil
from arena.local_arena import LocalIOArena
from arena.hand_log import HandLog
from pokeher.actions import GameAction
class PyArenaTest(unittest.TestCase):
def test_arena_methods(self):
"""Make sure the arena can do everything it needs to"""
arena = LocalIOArena()
self.assertTrue(arena.run)
self.assertTrue(arena.load_bot)
self.assertTrue(arena.bot_count)
self.assertTrue(arena.play_match)
def test_load_bots(self):
"""See if we can load a bot"""
with LocalIOArena() as arena:
arena.load_bot("pokeher/theaigame_bot.py")
self.assertEqual(arena.bot_count(), 1)
stacks = arena.bot_stacks()
self.assertEqual(stacks['bot_0'], 1000)
self.assertEqual(stacks.keys(), ['bot_0'])
def test_load_bad_filename(self):
"""Don't want load_bot exploding on us"""
arena = LocalIOArena()
arena.load_bot("asdlfj23u90dj")
self.assertTrue(arena)
self.assertEqual(arena.bot_count(), 0)
def test_pot_splitting(self):
arena = LocalIOArena()
winnings = arena.split_pot(pot=16, num_winners=2)
self.assertEqual(len(winnings), 2)
for prize in winnings:
self.assertEqual(prize, 8)
def test_uneven_pot_splitting(self):
arena = LocalIOArena()
winnings = arena.split_pot(pot=15, num_winners=2)
self.assertEqual(len(winnings), 2)
self.assertIn(7, winnings)
self.assertIn(8, winnings)
def test_hand_log_writing(self):
arena = LocalIOArena()
arena.key = "fake-uuid-woo-boom"
temp = tempfile.mkdtemp()
arena.output_directory = temp
arena.current_round = 0
log = HandLog({})
log.unix_epoch_s = lambda: 10
log.action("bot_1", GameAction(GameAction.FOLD))
arena.write_hand_log(log)
written_file = os.path.join(temp, arena.key, "hand_0.json")
written_handle = open(written_file, 'r')
contents = written_handle.read()
self.assertEquals(contents, '{"initial_stacks": {}, "actions": [{"player": "bot_1", "data": 0, "event": "Fold", "ts": 10}]}')
shutil.rmtree(temp)
| {
"content_hash": "0099edcae5a3a19ecb5cb843f38863b1",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 133,
"avg_line_length": 33.92537313432836,
"alnum_prop": 0.6159260888693356,
"repo_name": "gnmerritt/poker",
"id": "e9c6308bc1ce08ad03912fb1895c18c098ed2594",
"size": "2273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_arena.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "668"
},
{
"name": "Python",
"bytes": "174467"
},
{
"name": "Shell",
"bytes": "113"
}
],
"symlink_target": ""
} |
<?php
namespace gossi\trixionary\responder\json\model;
use gossi\trixionary\model\Position;
use gossi\trixionary\model\Skill;
use gossi\trixionary\model\Sport;
use keeko\framework\domain\payload\Found;
use keeko\framework\domain\payload\NotFound;
use keeko\framework\foundation\AbstractPayloadResponder;
use Symfony\Component\HttpFoundation\JsonResponse;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\Routing\Exception\ResourceNotFoundException;
use Tobscure\JsonApi\Document;
use Tobscure\JsonApi\Parameters;
use Tobscure\JsonApi\Resource;
/**
* Automatically generated JsonResponder for Reads a position
*
* @author Thomas Gossmann
*/
class PositionReadJsonResponder extends AbstractPayloadResponder {
/**
* @param Request $request
* @param Found $payload
*/
public function found(Request $request, Found $payload) {
$params = new Parameters($request->query->all());
$serializer = Position::getSerializer();
$resource = new Resource($payload->getModel(), $serializer);
$resource = $resource->with($params->getInclude(['sport', 'skill']));
$resource = $resource->fields($params->getFields([
'position' => Position::getSerializer()->getFields(),
'sport' => Sport::getSerializer()->getFields(),
'skill' => Skill::getSerializer()->getFields()
]));
$document = new Document($resource);
return new JsonResponse($document->toArray(), 200);
}
/**
* @param Request $request
* @param NotFound $payload
*/
public function notFound(Request $request, NotFound $payload) {
throw new ResourceNotFoundException($payload->getMessage());
}
/**
*/
protected function getPayloadMethods() {
return [
'keeko\framework\domain\payload\Found' => 'found',
'keeko\framework\domain\payload\NotFound' => 'notFound'
];
}
}
| {
"content_hash": "be9ca41a43194e4b6c3a958a13840f83",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 71,
"avg_line_length": 30.305084745762713,
"alnum_prop": 0.7354586129753915,
"repo_name": "gossi/trixionary",
"id": "0cded7778e6ed61050ec38a00a749f6c180a5ea6",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/responder/json/model/PositionReadJsonResponder.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "3872582"
}
],
"symlink_target": ""
} |
//
// SPPersonalViewController.m
// SportsPage
//
// Created by absolute on 2016/10/18.
// Copyright © 2016年 Absolute. All rights reserved.
//
#import "SPPersonalViewController.h"
//个人信息ViewController
#import "SPPersonalInfoViewController.h"
//系统设置ViewController
#import "SPPersonalSysSettingViewController.h"
//请求数据
#import "SPUserBusinessUnit.h"
#import "SPPersonalInfoModel.h"
//Flag请求
#import "SPAuthBusinessUnit.h"
//HeaderView
#import "SPPersonalMainHeaderView.h"
//表单Cell
#import "SPPersonalMainTableViewCell.h"
//我的运动页ViewController
#import "SPPersonalMainSportsPageViewController.h"
//创建运动页ViewController
#import "SPCreateSportsViewController.h"
//进行中,待结算,待评价,全部记录ViewController
#import "SPPersonalEventStatusViewController.h"
//我的钱包ViewController
#import "SPPersonalAccountViewController.h"
//我的俱乐部ViewController
#import "SPPersonalClubViewController.h"
//实名认证ViewController
#import "SPPersonalInputViewController.h"
//意见反馈ViewController
#import "SPPersonalFeedbackViewController.h"
//关于我们ViewController
#import "SPPersonalAboutUsViewController.h"
//透明指示器View
#import "MBProgressHUD.h"
//分享
#import "SPSportsPageShareView.h"
#import "WXApi.h"
#import <TencentOpenAPI/TencentOAuth.h>
#import <TencentOpenAPI/QQApiInterface.h>
@interface SPPersonalViewController () <UIGestureRecognizerDelegate,UITableViewDelegate,UITableViewDataSource,personalMainHeaderProtocol,SPSportsPageShareViewProtocol,TencentSessionDelegate> {
SPPersonalInfoModel *_personalInfoModel;
BOOL _flag;
SPPersonalMainHeaderView *_mainHeaderView;
UIImageView *_windowImageViewBG;
SPSportsPageShareView *_shareView;
}
@property (weak, nonatomic) IBOutlet UITableView *tableView;
@property (weak, nonatomic) IBOutlet UIButton *sysSettingButton;
@end
@implementation SPPersonalViewController
- (void)viewDidLoad {
[super viewDidLoad];
//[self checkFlag];
[self setUp];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
[self networkRequset];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
- (void)dealloc {
NSLog(@"%s",__func__);
}
- (void)checkFlag {
_flag = false;
[[SPAuthBusinessUnit shareInstance] getGlobalConfSuccessful:^(NSString *successsfulString) {
if ([successsfulString isEqualToString:@"on"]) {
[UIView animateWithDuration:0.3 animations:^{
_flag = true;
[_tableView reloadData];
}];
} else {
NSLog(@"网络请求失败");
}
} failure:^(NSString *errorString) {
NSLog(@"网络请求失败");
}];
}
- (void)networkRequset {
// [MBProgressHUD showHUDAddedTo:self.view animated:true];
NSString *userId = [[NSUserDefaults standardUserDefaults] stringForKey:@"userId"];
[[SPUserBusinessUnit shareInstance] getMineInfoWithUserId:userId successful:^(NSString *successsfulString, JSONModel *model) {
if ([successsfulString isEqualToString:@"successful"]) {
_personalInfoModel = (SPPersonalInfoModel *)model;
[_mainHeaderView setUpHeaderImageView:_personalInfoModel.user.portrait userName:_personalInfoModel.user.nick];
NSLog(@"Mine页面刷新数据");
[_tableView reloadData];
// [MBProgressHUD hideHUDForView:self.view animated:true];
} else {
NSLog(@"%@",successsfulString);
// [MBProgressHUD hideHUDForView:self.view animated:true];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[SPGlobalConfig showTextOfHUD:successsfulString ToView:self.view];
});
}
} failure:^(NSString *errorString) {
NSLog(@"%@",errorString);
// [MBProgressHUD hideHUDForView:self.view animated:true];
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.5 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[SPGlobalConfig showTextOfHUD:@"网络请求失败" ToView:self.view];
});
}];
}
#pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizerShouldBegin:(UIGestureRecognizer *)gestureRecognizer {
if (self.navigationController && self.navigationController.viewControllers.count == 1) {
return false;
}
return true;
}
#pragma mark -SetUp
- (void)setUp {
[self setUpNav];
[self setUpUI];
[self setUpTableView];
}
- (void)setUpNav {
if ([self.navigationController respondsToSelector:@selector(interactivePopGestureRecognizer)]) {
self.navigationController.interactivePopGestureRecognizer.delegate = self;
}
}
- (void)setUpUI {
self.automaticallyAdjustsScrollViewInsets = false;
self.view.backgroundColor = [SPGlobalConfig anyColorWithRed:239 green:239 blue:243 alpha:1];
[_sysSettingButton setTitleColor:[UIColor grayColor] forState:UIControlStateHighlighted];
}
- (void)setUpTableView {
_tableView.delegate = self;
_tableView.dataSource = self;
_tableView.separatorStyle = UITableViewCellSeparatorStyleNone;
_tableView.showsVerticalScrollIndicator = false;
_tableView.backgroundColor = [SPGlobalConfig anyColorWithRed:239 green:239 blue:243 alpha:1];
[self setUpHeader];
}
- (void)setUpHeader {
_mainHeaderView = [[[NSBundle mainBundle] loadNibNamed:@"SPPersonalMainHeaderView" owner:nil options:nil] lastObject];
_mainHeaderView.frame = CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_WIDTH*362/375);
_mainHeaderView.delegate = self;
UIView *header = [[UIView alloc] initWithFrame:CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_WIDTH*362/375)];
[header addSubview:_mainHeaderView];
_tableView.tableHeaderView = header;
}
#pragma mark - Action
- (IBAction)systemSettingAction:(UIButton *)sender {
SPPersonalSysSettingViewController *sysSettingViewController = [[SPPersonalSysSettingViewController alloc] init];
sysSettingViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:sysSettingViewController animated:true];
}
#pragma mark - UITableViewDelegate
- (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath {
return SCREEN_WIDTH*2/15;
}
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
return 4;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
if (section == 2) {
return 3;
} else {
return 1;
}
}
- (CGFloat)tableView:(UITableView *)tableView heightForFooterInSection:(NSInteger)section {
if (section != 3) {
return SCREEN_WIDTH*2/125;
} else {
return 0;
}
}
- (UIView *)tableView:(UITableView *)tableView viewForFooterInSection:(NSInteger)section {
UIView *view = [[UIView alloc] initWithFrame:CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_WIDTH*2/125)];
view.backgroundColor = [SPGlobalConfig anyColorWithRed:239 green:239 blue:243 alpha:1];
return view;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
SPPersonalMainTableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"PersonalMainCell"];
if (!cell) {
[tableView registerNib:[UINib nibWithNibName:@"SPPersonalMainTableViewCell" bundle:[NSBundle mainBundle]] forCellReuseIdentifier:@"PersonalMainCell"];
cell = [tableView dequeueReusableCellWithIdentifier:@"PersonalMainCell"];
}
cell.selectionStyle = UITableViewCellSelectionStyleNone;
return cell;
}
- (void)tableView:(UITableView *)tableView willDisplayCell:(UITableViewCell *)cell forRowAtIndexPath:(NSIndexPath *)indexPath {
if (indexPath.section == 0) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_wallet" title:@"我的钱包"];
[(SPPersonalMainTableViewCell *)cell setUpWithContent:[NSString stringWithFormat:@"余额%@",_personalInfoModel.account.balance]];
} else if (indexPath.section == 1) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_club" title:@"我的俱乐部"];
} else if (indexPath.section == 2) {
if (indexPath.row == 0) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_nameConfirm" title:@"实名认证"];
if ([_personalInfoModel.user.valid isEqualToString:@"1"]) {
[(SPPersonalMainTableViewCell *)cell setUpWithContent:@"未认证"];
[(SPPersonalMainTableViewCell *)cell setUpMoreImage:@"Mine_More"];
//[(SPPersonalMainTableViewCell *)cell setUpMoreImageHidden:false];
cell.userInteractionEnabled = true;
} else if ([_personalInfoModel.user.valid isEqualToString:@"2"]) {
[(SPPersonalMainTableViewCell *)cell setUpWithContent:@"认证中"];
[(SPPersonalMainTableViewCell *)cell setUpMoreImage:@"Mine_personal_nameConfirm_ing"];
//[(SPPersonalMainTableViewCell *)cell setUpMoreImageHidden:true];
cell.userInteractionEnabled = false;
} else if ([_personalInfoModel.user.valid isEqualToString:@"3"]) {
[(SPPersonalMainTableViewCell *)cell setUpWithContent:@"已认证"];
[(SPPersonalMainTableViewCell *)cell setUpMoreImage:@"Mine_personal_nameConfirm_successful"];
//[(SPPersonalMainTableViewCell *)cell setUpMoreImageHidden:true];
cell.userInteractionEnabled = false;
}
} else if (indexPath.row == 1) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_feedback" title:@"意见反馈"];
} else if (indexPath.row == 2) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_aboutus" title:@"关于我们"];
}
} else if (indexPath.section == 3) {
[(SPPersonalMainTableViewCell *)cell setUpWithImageName:@"Mine_main_share" title:@"分享App"];
}
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
[tableView deselectRowAtIndexPath:indexPath animated:true];
if (indexPath.section == 0) {
SPPersonalAccountViewController *accountViewController = [[SPPersonalAccountViewController alloc] init];
if (_personalInfoModel.account.balance.length != 0) {
accountViewController.balanceStr = _personalInfoModel.account.balance;
} else {
accountViewController.balanceStr = @"";
}
accountViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:accountViewController animated:true];
} else if (indexPath.section == 1) {
SPPersonalClubViewController *personalClubViewController = [[SPPersonalClubViewController alloc] init];
personalClubViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:personalClubViewController animated:true];
} else if (indexPath.section == 2) {
if (indexPath.row == 0) {
SPPersonalInputViewController *inputViewController = [[SPPersonalInputViewController alloc] init];
inputViewController.navTitleStr = @"实名认证";
inputViewController.personalMainCell = (SPPersonalMainTableViewCell *)[tableView cellForRowAtIndexPath:indexPath];
inputViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:inputViewController animated:true];
} else if (indexPath.row == 1) {
SPPersonalFeedbackViewController *feedbackViewController = [[SPPersonalFeedbackViewController alloc] init];
feedbackViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:feedbackViewController animated:true];
} else if (indexPath.row == 2) {
SPPersonalAboutUsViewController *aboutUsViewController = [[SPPersonalAboutUsViewController alloc] init];
aboutUsViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:aboutUsViewController animated:true];
}
} else if (indexPath.section == 3) {
if (!_windowImageViewBG) {
_windowImageViewBG = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT)];
_windowImageViewBG.image = [UIImage imageNamed:@"Sports_create_windowBG"];
_windowImageViewBG.alpha = 0;
}
[self.view addSubview:_windowImageViewBG];
if (!_shareView) {
_shareView = [[[NSBundle mainBundle] loadNibNamed:@"SPSportsPageShareView" owner:nil options:nil] lastObject];
[_shareView isNotNeedToSportsPageShareType];
_shareView.frame = CGRectMake(0, SCREEN_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT);
_shareView.delegate = self;
}
[self.view addSubview:_shareView];
[UIView animateWithDuration:0.3 animations:^{
_windowImageViewBG.alpha = 1;
_shareView.shareView.alpha = 1;
_shareView.frame = CGRectMake(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT);
self.tabBarController.tabBar.alpha = 0;
}];
}
}
#pragma mark - PersonalMainHeaderProtocol
- (void)sendActionWithTitle:(NSString *)title {
if ([title isEqualToString:@"头像"]) {
SPPersonalInfoViewController *personalInfoViewController = [[SPPersonalInfoViewController alloc] init];
personalInfoViewController.hidesBottomBarWhenPushed = true;
personalInfoViewController.userInfoModel = _personalInfoModel.user;
[self.navigationController pushViewController:personalInfoViewController animated:true];
} else if ([title isEqualToString:@"我的运动页"]) {
SPPersonalMainSportsPageViewController *sportsPageViewController = [[SPPersonalMainSportsPageViewController alloc] init];
sportsPageViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:sportsPageViewController animated:true];
} else if ([title isEqualToString:@"创建运动页"]) {
SPCreateSportsViewController *createSportsViewController = [[SPCreateSportsViewController alloc] init];
createSportsViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:createSportsViewController animated:true];
} else if ([title isEqualToString:@"进行中"]) {
SPPersonalEventStatusViewController *eventViewController = [[SPPersonalEventStatusViewController alloc] init];
eventViewController.selectIndex = 1;
eventViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:eventViewController animated:true];
} else if ([title isEqualToString:@"待结算"]) {
SPPersonalEventStatusViewController *eventViewController = [[SPPersonalEventStatusViewController alloc] init];
eventViewController.selectIndex = 2;
eventViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:eventViewController animated:true];
} else if ([title isEqualToString:@"待评价"]) {
SPPersonalEventStatusViewController *eventViewController = [[SPPersonalEventStatusViewController alloc] init];
eventViewController.selectIndex = 3;
eventViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:eventViewController animated:true];
} else if ([title isEqualToString:@"全部记录"]) {
SPPersonalEventStatusViewController *eventViewController = [[SPPersonalEventStatusViewController alloc] init];
eventViewController.hidesBottomBarWhenPushed = true;
[self.navigationController pushViewController:eventViewController animated:true];
}
}
#pragma mark - SPSportsPageShareViewProtocol
- (void)cancelShareView {
[UIView animateWithDuration:0.5 animations:^{
_windowImageViewBG.alpha = 0;
_shareView.shareView.alpha = 0;
_shareView.frame = CGRectMake(0, SCREEN_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT);
self.tabBarController.tabBar.alpha = 1;
} completion:^(BOOL finished) {
_windowImageViewBG.alpha = 1;
[_windowImageViewBG removeFromSuperview];
_shareView.frame = CGRectMake(0, SCREEN_HEIGHT, SCREEN_WIDTH, SCREEN_HEIGHT);
[_shareView removeFromSuperview];
}];
}
- (void)finishedShareToSportsPage {
}
- (void)finishedShareToWeChatFriends {
WXWebpageObject *webpageObject = [WXWebpageObject object];
webpageObject.webpageUrl = @"http://a.app.qq.com/o/simple.jsp?pkgname=com.sportspage";
WXMediaMessage *message = [WXMediaMessage message];
message.title = @"运动页";
message.description = @"您的运动管理平台";
UIImage *thumbImage = [self imageWithImage:[UIImage imageNamed:@"icon_appIcon"] scaledToSize:CGSizeMake(100, 100)];
[message setThumbImage:thumbImage];
message.mediaObject = webpageObject;
SendMessageToWXReq *req = [[SendMessageToWXReq alloc] init];
req.bText = false;
req.message = message;
req.scene = WXSceneSession;
if (![WXApi sendReq:req]) {
[SPGlobalConfig showTextOfHUD:@"分享失败" ToView:self.view];
}
}
- (void)finishedShareToWeChatTimeLine {
WXWebpageObject *webpageObject = [WXWebpageObject object];
webpageObject.webpageUrl = @"http://a.app.qq.com/o/simple.jsp?pkgname=com.sportspage";
WXMediaMessage *message = [WXMediaMessage message];
message.title = @"运动页";
message.description = @"您的运动管理平台";
UIImage *thumbImage = [self imageWithImage:[UIImage imageNamed:@"icon_appIcon"] scaledToSize:CGSizeMake(100, 100)];
[message setThumbImage:thumbImage];
message.mediaObject = webpageObject;
SendMessageToWXReq *req = [[SendMessageToWXReq alloc] init];
req.bText = false;
req.message = message;
req.scene = WXSceneTimeline;
if (![WXApi sendReq:req]) {
[SPGlobalConfig showTextOfHUD:@"分享失败" ToView:self.view];
}
}
- (void)finishedShareToQQ {
[[TencentOAuth alloc] initWithAppId:KEY_TENCENT andDelegate:self];
NSString *url = @"http://a.app.qq.com/o/simple.jsp?pkgname=com.sportspage";
QQApiNewsObject *newsObject = [QQApiNewsObject objectWithURL:[NSURL URLWithString:url]
title:@"运动页"
description:@"您的运动管理平台"
previewImageURL:[NSURL URLWithString:@"http://www.sportspage.cn/home/Tpl//Index/images/logo.jpg"]];
SendMessageToQQReq *req = [SendMessageToQQReq reqWithContent:newsObject];
QQApiSendResultCode sent = [QQApiInterface sendReq:req];
if (sent == EQQAPISENDSUCESS) {
NSLog(@"分享成功");
} else if (sent == EQQAPIVERSIONNEEDUPDATE) {
NSLog(@"当前QQ版本太低,需要更新至新版本才可以支持");
} else {
NSLog(@"分享失败");
}
}
- (void)finishedShareToQQZone {
[[TencentOAuth alloc] initWithAppId:KEY_TENCENT andDelegate:self];
NSString *url = @"http://a.app.qq.com/o/simple.jsp?pkgname=com.sportspage";
QQApiNewsObject *newsObject = [QQApiNewsObject objectWithURL:[NSURL URLWithString:url]
title:@"运动页"
description:@"您的运动管理平台"
previewImageURL:[NSURL URLWithString:@"http://www.sportspage.cn/home/Tpl//Index/images/logo.jpg"]];
SendMessageToQQReq *req = [SendMessageToQQReq reqWithContent:newsObject];
QQApiSendResultCode sent = [QQApiInterface SendReqToQZone:req];
if (sent == EQQAPISENDSUCESS) {
NSLog(@"分享成功");
} else if (sent == EQQAPIVERSIONNEEDUPDATE) {
NSLog(@"当前QQ版本太低,需要更新至新版本才可以支持");
} else {
NSLog(@"分享失败");
}
}
- (UIImage *)imageWithImage:(UIImage*)image scaledToSize:(CGSize)newSize {
UIGraphicsBeginImageContext(newSize);
[image drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *data = UIImageJPEGRepresentation(newImage, 0.3);
return [UIImage imageWithData:data];
}
#pragma mark - TencentSessionDelegate
- (void)tencentDidLogin {
}
- (void)tencentDidNotLogin:(BOOL)cancelled {
}
- (void)tencentDidNotNetWork {
}
@end
| {
"content_hash": "f905861767e6a462defc7e0bc3a5cfce",
"timestamp": "",
"source": "github",
"line_count": 486,
"max_line_length": 192,
"avg_line_length": 41.8724279835391,
"alnum_prop": 0.6976904176904177,
"repo_name": "zhujunxxxxx/SportsPage",
"id": "c9b5fd1adaa796019a8e15876ae38240bd05c052",
"size": "20877",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iso/SportsPage/SportsPage/SPPersonalViewController.m",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2598"
},
{
"name": "HTML",
"bytes": "5571"
},
{
"name": "Java",
"bytes": "1001409"
},
{
"name": "Objective-C",
"bytes": "3916052"
},
{
"name": "Ruby",
"bytes": "43"
},
{
"name": "Shell",
"bytes": "8237"
}
],
"symlink_target": ""
} |
@interface MyDelegate : NSObject <LSBatchDelegate>
@property (nonatomic, readwrite) BOOL didFinish;
@property (nonatomic, readwrite) BOOL didAbort;
@property (nonatomic, readwrite) BOOL hasError;
@property (nonatomic, copy) NSError *err;
@end
@implementation MyDelegate
@synthesize didFinish;
@synthesize didAbort;
@synthesize hasError;
@synthesize err;
- (id) init {
[super init];
self.didFinish = NO;
self.didAbort = NO;
self.hasError = NO;
self.err = nil;
return self;
}
- (void) batchDidFinish: (id <LSBatch>) batch {
self.didFinish = YES;
}
- (void) batch: (id <LSBatch>) batch didFailWithError: (id <NSObject>) error {
self.hasError = YES;
self.err = (NSError *) error;
}
- (void) batchDidAbort: (id <LSBatch>) batch {
self.didAbort = YES;
}
@end
int
main (void) {
__block MyDelegate *delegate = (MyDelegate.alloc.init);
LSBatch *batch = [LSBatch new: 2];
assert(batch);
assert(2 == batch.concurrency);
// set delegate
[batch delegate: delegate];
__block BOOL worker1 = NO;
__block BOOL worker2 = NO;
__block BOOL worker3 = NO;
__block BOOL worker4 = NO;
[batch push: ^(LSBatchNextCallback next) {
worker1 = YES;
next(nil);
}];
assert(1 == batch.length);
[batch push: ^(LSBatchNextCallback next) {
worker2 = YES;
next(nil);
}];
assert(2 == batch.length);
[batch push: ^(LSBatchNextCallback next) {
worker3 = YES;
next(nil);
}];
assert(3 == batch.length);
[batch push: ^(LSBatchNextCallback next) {
worker4 = YES;
next(nil);
}];
assert(4 == batch.length);
[batch end: ^(NSError *err) {
assert(nil == err);
assert(worker1);
assert(worker2);
assert(worker3);
assert(worker4);
assert(delegate.didFinish);
exit(0);
}];
CFRunLoopRun();
return 0;
}
| {
"content_hash": "80949ef7d2b560cd7821cbbfaaad599b",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 78,
"avg_line_length": 19.148936170212767,
"alnum_prop": 0.6427777777777778,
"repo_name": "littlstar/LSBatch",
"id": "1295aa736b4eb87cb4c697da12f945ba583c2b70",
"size": "1849",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/delegate.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1383"
},
{
"name": "Objective-C",
"bytes": "13287"
},
{
"name": "Ruby",
"bytes": "752"
}
],
"symlink_target": ""
} |
package com.hazelcast.client.impl.protocol.codec;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.Generated;
import com.hazelcast.client.impl.protocol.codec.builtin.*;
import com.hazelcast.client.impl.protocol.codec.custom.*;
import javax.annotation.Nullable;
import static com.hazelcast.client.impl.protocol.ClientMessage.*;
import static com.hazelcast.client.impl.protocol.codec.builtin.FixedSizeTypesCodec.*;
/*
* This file is auto-generated by the Hazelcast Client Protocol Code Generator.
* To change this file, edit the templates or the protocol
* definitions on the https://github.com/hazelcast/hazelcast-client-protocol
* and regenerate it.
*/
/**
* Returns the number of available permits.
*/
@Generated("4e65fe9993801c72167d91ca80122fa2")
public final class SemaphoreAvailablePermitsCodec {
//hex: 0x0C0600
public static final int REQUEST_MESSAGE_TYPE = 787968;
//hex: 0x0C0601
public static final int RESPONSE_MESSAGE_TYPE = 787969;
private static final int REQUEST_INITIAL_FRAME_SIZE = PARTITION_ID_FIELD_OFFSET + INT_SIZE_IN_BYTES;
private static final int RESPONSE_RESPONSE_FIELD_OFFSET = RESPONSE_BACKUP_ACKS_FIELD_OFFSET + BYTE_SIZE_IN_BYTES;
private static final int RESPONSE_INITIAL_FRAME_SIZE = RESPONSE_RESPONSE_FIELD_OFFSET + INT_SIZE_IN_BYTES;
private SemaphoreAvailablePermitsCodec() {
}
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public static class RequestParameters {
/**
* CP group id of this ISemaphore instance
*/
public com.hazelcast.cp.internal.RaftGroupId groupId;
/**
* Name of this ISemaphore instance
*/
public java.lang.String name;
}
public static ClientMessage encodeRequest(com.hazelcast.cp.internal.RaftGroupId groupId, java.lang.String name) {
ClientMessage clientMessage = ClientMessage.createForEncode();
clientMessage.setRetryable(true);
clientMessage.setOperationName("Semaphore.AvailablePermits");
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[REQUEST_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, REQUEST_MESSAGE_TYPE);
encodeInt(initialFrame.content, PARTITION_ID_FIELD_OFFSET, -1);
clientMessage.add(initialFrame);
RaftGroupIdCodec.encode(clientMessage, groupId);
StringCodec.encode(clientMessage, name);
return clientMessage;
}
public static SemaphoreAvailablePermitsCodec.RequestParameters decodeRequest(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
RequestParameters request = new RequestParameters();
//empty initial frame
iterator.next();
request.groupId = RaftGroupIdCodec.decode(iterator);
request.name = StringCodec.decode(iterator);
return request;
}
public static ClientMessage encodeResponse(int response) {
ClientMessage clientMessage = ClientMessage.createForEncode();
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[RESPONSE_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, RESPONSE_MESSAGE_TYPE);
encodeInt(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET, response);
clientMessage.add(initialFrame);
return clientMessage;
}
/**
* number of available permits
*/
public static int decodeResponse(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
ClientMessage.Frame initialFrame = iterator.next();
return decodeInt(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET);
}
}
| {
"content_hash": "612e81e56fb78a8bbebdcffd80d396cc",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 128,
"avg_line_length": 41.42553191489362,
"alnum_prop": 0.7362609142270159,
"repo_name": "emre-aydin/hazelcast",
"id": "bd36c72f93d18b0fae8fce8ba67da28e603b8457",
"size": "4519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hazelcast/src/main/java/com/hazelcast/client/impl/protocol/codec/SemaphoreAvailablePermitsCodec.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1261"
},
{
"name": "C",
"bytes": "353"
},
{
"name": "Java",
"bytes": "39634758"
},
{
"name": "Shell",
"bytes": "29479"
}
],
"symlink_target": ""
} |
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-ex1-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/">
<img src="/images/xapbrand-small.png" style="height:21px;" class="pull-left">
</a>
</div>
<div class="collapse navbar-collapse navbar-ex1-collapse">
<ul class="nav navbar-nav">
<li>
<a href="#" class="dropdown-toggle" data-toggle="dropdown">All Versions<b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="/xap121.html">12.1</a></li>
<li><a href="/xap120.html">12.0</a></li>
<li><a href="/xap110.html">11.0</a></li>
<li><a href="/xap102.html">10.2</a></li>
<li><a href="/xap101.html">10.1</a></li>
<li><a href="/xap100.html">10.0</a></li>
<li><a href="/xap97">9.7</a></li>
<li><a href="/archive.html">Archive</a></li>
</ul>
</li>
<li>
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Resources <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="/product_overview">Product Overview</a></li>
<li><a href="/early_access">Early Access</a></li>
<li><a href="/faq">Frequently Asked Questions</a></li>
<li><a href="/api_documentation">API Docs</a></li>
<li><a href="/videos">Videos</a></li>
<li><a href="/sbp">Solutions & Patterns</a></li>
<li><a href="http://blog.gigaspaces.com" target="_blank">Blog</a></li>
<li><a href="http://ask.gigaspaces.org" target="_blank">Forum</a></li>
</ul>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
<li>
<button type="button" onclick="location.href='http://www.gigaspaces.com/xap-download'"
class="btn btn-success navbar-btn download-button">Get Latest GA
</button>
<button type="button" onclick="location.href='https://github.com/xap'"
class="btn btn-primary navbar-btn download-button">
<i class="fa fa-github" aria-hidden="true"></i>
Fork me on Github
</button>
</li>
<form class="navbar-form navbar-left" role="search" action="#" method="get">
<div class="form-group">
<input id="q" type="text" class="form-control"
style="width:220px;border-top-right-radius: 0px;border-bottom-right-radius: 0px;background-color: white;"
placeholder="Search">
</div>
<button id="go-search" type="submit" class="btn btn-default ladda-button search-button"
style="margin-left: -7px;border-top-left-radius: 0px;border-bottom-left-radius: 0px;"
data-style="zoom-in">
<i id="search-icon" class="fa fa-search"></i>
</button>
</form>
</ul>
</div>
</div>
</div>
</div>
<!--script>
$('[target=_blank]').append(" <i class=\"fa fa-external-link\" style=\"font-size:10px\"></i>");
</script-->
| {
"content_hash": "abe2298585cc44adb1b3ac5f9b656bf2",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 136,
"avg_line_length": 47.04597701149425,
"alnum_prop": 0.4576105546054239,
"repo_name": "croffler/documentation",
"id": "aa4599948c5d338a97b874c57820a99d11235ff5",
"size": "4094",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "themes/xap-theme/layouts/partials/navbar.html",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
/**
* @author dgeorge
*
* $Id: AdminEditUserPopulateAction.java,v 1.6 2006-05-15 15:42:34 georgeda Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.5 2005/11/11 15:38:47 georgeda
* Use constant for action
*
* Revision 1.4 2005/10/28 12:47:26 georgeda
* Added delete functionality
*
* Revision 1.3 2005/10/24 13:28:17 georgeda
* Cleanup changes
*
* Revision 1.2 2005/10/17 16:30:24 georgeda
* Cleanup
*
* Revision 1.1 2005/10/17 13:28:45 georgeda
* Initial revision
*
*
*/
package gov.nih.nci.camod.webapp.action;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.domain.ContactInfo;
import gov.nih.nci.camod.domain.Person;
import gov.nih.nci.camod.service.impl.PersonManagerSingleton;
import gov.nih.nci.camod.webapp.form.EditUserForm;
import java.util.Iterator;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
/**
*
* Used to populate the form used to assign roles to a user.
*
*/
public class AdminEditUserPopulateAction extends BaseAction
{
/**
* Action used to populate the various admin lists for the curation process
*/
public ActionForward execute(ActionMapping inMapping,
ActionForm inForm,
HttpServletRequest inRequest,
HttpServletResponse inResponse) throws Exception
{
log.trace("Entering execute");
EditUserForm theForm = (EditUserForm) inForm;
String theAction = (String) inRequest.getParameter(Constants.Parameters.ACTION);
String theForward = "next";
// Add a brand new user
if (theAction.equals("Add"))
{
theForm.setLastName(null);
theForm.setFirstName(null);
theForm.setUsername(null);
theForm.setId(null);
theForm.setPrincipalInvestigator(false);
}
// Update an existing user
else
{
try
{
Person thePerson = PersonManagerSingleton.instance().get(theForm.getId());
if (thePerson == null)
{
throw new IllegalArgumentException("Unknown user id: " + theForm.getId());
}
theForm.setLastName(thePerson.getLastName());
theForm.setFirstName(thePerson.getFirstName());
theForm.setUsername(thePerson.getUsername());
Set theContactInfos = thePerson.getContactInfoCollection();
if (theContactInfos != null && theContactInfos.size() > 0)
{
Iterator theIterator = theContactInfos.iterator();
ContactInfo theContactInfo = (ContactInfo) theIterator.next();
theForm.setAffiliation(theContactInfo.getInstitute());
theForm.setPhone(theContactInfo.getPhone());
}
Boolean isPI = thePerson.getIsPrincipalInvestigator();
if (isPI != null)
{
theForm.setPrincipalInvestigator(isPI.booleanValue());
}
else
{
theForm.setPrincipalInvestigator(false);
}
}
catch (Exception e)
{
theForward = "failure";
log.error("Unable to get user: ", e);
// Encountered an error saving the model.
// created a new model successfully
ActionMessages theMsg = new ActionMessages();
theMsg.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("errors.admin.message"));
saveErrors(inRequest, theMsg);
}
}
log.trace("Exiting execute");
return inMapping.findForward(theForward);
}
} | {
"content_hash": "98e39ac10f53916dd134e41cce6fbf4a",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 101,
"avg_line_length": 31.8,
"alnum_prop": 0.5982099661344944,
"repo_name": "NCIP/camod",
"id": "9803a1f37b03806c1844125884c2c145ca646e0f",
"size": "4315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "software/camod/src/gov/nih/nci/camod/webapp/action/AdminEditUserPopulateAction.java",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "571"
},
{
"name": "CSS",
"bytes": "763585"
},
{
"name": "Groovy",
"bytes": "2514"
},
{
"name": "HTML",
"bytes": "1773080"
},
{
"name": "Java",
"bytes": "4572314"
},
{
"name": "JavaScript",
"bytes": "1171793"
},
{
"name": "PLSQL",
"bytes": "80403"
},
{
"name": "Shell",
"bytes": "23550"
},
{
"name": "XSLT",
"bytes": "190799"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_25) on Wed Jan 08 11:58:51 EST 2014 -->
<title>Uses of Class cpsr.planning.ertapprox.actionensembles.ActionERTQPlanner</title>
<meta name="date" content="2014-01-08">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class cpsr.planning.ertapprox.actionensembles.ActionERTQPlanner";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../cpsr/planning/ertapprox/actionensembles/ActionERTQPlanner.html" title="class in cpsr.planning.ertapprox.actionensembles">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?cpsr/planning/ertapprox/actionensembles/class-use/ActionERTQPlanner.html" target="_top">Frames</a></li>
<li><a href="ActionERTQPlanner.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class cpsr.planning.ertapprox.actionensembles.ActionERTQPlanner" class="title">Uses of Class<br>cpsr.planning.ertapprox.actionensembles.ActionERTQPlanner</h2>
</div>
<div class="classUseContainer">No usage of cpsr.planning.ertapprox.actionensembles.ActionERTQPlanner</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../cpsr/planning/ertapprox/actionensembles/ActionERTQPlanner.html" title="class in cpsr.planning.ertapprox.actionensembles">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?cpsr/planning/ertapprox/actionensembles/class-use/ActionERTQPlanner.html" target="_top">Frames</a></li>
<li><a href="ActionERTQPlanner.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "7f042f3869776a38e54972b91d4124a3",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 177,
"avg_line_length": 38.017391304347825,
"alnum_prop": 0.6322049405306496,
"repo_name": "williamleif/PSRToolbox",
"id": "60c5d11a0db8ee1b683d2c1ea60688ed6c41fc92",
"size": "4372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/cpsr/planning/ertapprox/actionensembles/class-use/ActionERTQPlanner.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Groff",
"bytes": "688"
},
{
"name": "Java",
"bytes": "319900"
}
],
"symlink_target": ""
} |
"use strict";
const Result = require("./Result.js");
const ResultType = require("../enums/ResultType.js");
class PreconditionResult extends Result {
constructor(options) {
super(options);
this.reason = options.reason;
PreconditionResult.validate(this);
}
static fromSuccess() {
return new PreconditionResult({type: ResultType.Success});
}
static fromFailure(command, reason) {
return new PreconditionResult({
command,
reason,
type: ResultType.Precondition
});
}
static validate(res) {
if(res.type !== ResultType.Success && typeof res.reason !== "string")
throw TypeError("PreconditionResult#reason must be a string.");
}
}
module.exports = PreconditionResult;
| {
"content_hash": "36e1e2271f9e8f2476c94e23095391a9",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 73,
"avg_line_length": 23.677419354838708,
"alnum_prop": 0.6811989100817438,
"repo_name": "RealBlazeIt/Patron",
"id": "ca1865f30d11cc1d3695175c1d330defdaab5239",
"size": "1515",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/results/PreconditionResult.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "18804"
}
],
"symlink_target": ""
} |
module Snorlax
class Base < ::ApplicationController
def self.snorlax_used_rest!(controller)
controller.class_eval do
if defined? CanCan::AccessDenied
rescue_from(CanCan::AccessDenied) { |e| respond_with_standard_error e, 403 }
end
if defined? Pundit::NotAuthorizedError
rescue_from(Pundit::NotAuthorizedError) { |e| respond_with_standard_error e, 403 }
end
rescue_from(ActionController::UnpermittedParameters) { |e| respond_with_standard_error e, 400 }
rescue_from(ActionController::ParameterMissing) { |e| respond_with_standard_error e, 400 }
rescue_from(ActiveRecord::RecordNotFound) { |e| respond_with_standard_error e, 404 }
def show
respond_with_resource
end
def index
instantiate_collection
respond_with_collection
end
def create
instantiate_resource
create_action
respond_with_resource
end
def create_action
resource.save
end
def update
load_resource
update_action
respond_with_resource
end
def update_action
resource.update(resource_params)
end
def destroy
load_resource
destroy_action
destroy_response
end
private
def collection
instance_variable_get :"@#{resource_name.pluralize}"
end
def resource
instance_variable_get :"@#{resource_name}"
end
def resource=(value)
instance_variable_set :"@#{resource_name}", value
end
def collection=(value)
instance_variable_set :"@#{resource_name.pluralize}", value
end
def instantiate_resource
self.resource = resource_class.new(resource_params)
end
def instantiate_collection(timeframe_collection: true, page_collection: true)
collection = accessible_records
collection = yield collection if block_given?
collection = timeframe_collection collection if timeframe_collection
collection = page_collection collection if page_collection
self.collection = collection.to_a
end
def timeframe_collection(collection)
if resource_class.try(:has_timeframe?) && (params[:since] || params[:until])
parse_date_parameters # I feel like Rails should do this for me..
collection.within(params[:since], params[:until], params[:timeframe_for])
else
collection
end
end
def parse_date_parameters
%w(since until).each { |field| params[field] = DateTime.parse(params[field].to_s) if params[field] }
end
def page_collection(collection)
collection.offset(params[:from].to_i).limit((params[:per] || default_page_size).to_i)
end
def accessible_records
if current_user.is_logged_in?
visible_records
else
public_records
end
end
def visible_records
raise NotImplementedError.new
end
def public_records
raise NotImplementedError.new
end
def default_page_size
50
end
def destroy_action
resource.destroy
end
def destroy_response
render json: {success: 'success'}
end
def load_resource
self.resource = resource_class.find(params[:id])
end
def resource_params
permitted_params.send resource_name
end
def resource_symbol
resource_name.to_sym
end
def resource_name
controller_name.singularize
end
def resource_class
resource_name.camelize.constantize
end
def resource_serializer
"#{resource_name}_serializer".camelize.constantize
end
def respond_with_resource(scope: default_scope, serializer: resource_serializer, root: serializer_root)
if resource.errors.empty?
respond_with_collection(resources: [resource], scope: scope, serializer: serializer, root: root)
else
respond_with_errors
end
end
def respond_with_collection(resources: collection, scope: default_scope, serializer: resource_serializer, root: serializer_root)
render json: resources, scope: scope, each_serializer: serializer, root: root
end
def respond_with_standard_error(error, status)
render json: {exception: error.to_s}, root: false, status: status
end
def respond_with_errors
render json: {errors: resource.errors.as_json}, root: false, status: 422
end
def serializer_root
controller_name
end
def default_scope
{}
end
end
end
snorlax_used_rest!(self)
end
end
| {
"content_hash": "4c93d6b07edf12b08211dbb6ff0e43ac",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 136,
"avg_line_length": 27.717391304347824,
"alnum_prop": 0.5858823529411765,
"repo_name": "loomio/snorlax",
"id": "12190c0f1f0421e22ec29f4e2070208321838821",
"size": "5100",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/controllers/snorlax/base.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "7554"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
} |
import BaseHTTPServer
import bisect
import cgi
import cmd
import codecs
import ctypes
import datetime
import disasm
import mmap
import optparse
import os
import re
import sys
import types
import urllib
import urlparse
import v8heapconst
import webbrowser
PORT_NUMBER = 8081
USAGE="""usage: %prog [OPTIONS] [DUMP-FILE]
Minidump analyzer.
Shows the processor state at the point of exception including the
stack of the active thread and the referenced objects in the V8
heap. Code objects are disassembled and the addresses linked from the
stack (e.g. pushed return addresses) are marked with "=>".
Examples:
$ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp"""
DEBUG=False
def DebugPrint(s):
if not DEBUG: return
print s
class Descriptor(object):
"""Descriptor of a structure in a memory."""
def __init__(self, fields):
self.fields = fields
self.is_flexible = False
for _, type_or_func in fields:
if isinstance(type_or_func, types.FunctionType):
self.is_flexible = True
break
if not self.is_flexible:
self.ctype = Descriptor._GetCtype(fields)
self.size = ctypes.sizeof(self.ctype)
def Read(self, memory, offset):
if self.is_flexible:
fields_copy = self.fields[:]
last = 0
for name, type_or_func in fields_copy:
if isinstance(type_or_func, types.FunctionType):
partial_ctype = Descriptor._GetCtype(fields_copy[:last])
partial_object = partial_ctype.from_buffer(memory, offset)
type = type_or_func(partial_object)
if type is not None:
fields_copy[last] = (name, type)
last += 1
else:
last += 1
complete_ctype = Descriptor._GetCtype(fields_copy[:last])
else:
complete_ctype = self.ctype
return complete_ctype.from_buffer(memory, offset)
@staticmethod
def _GetCtype(fields):
class Raw(ctypes.Structure):
_fields_ = fields
_pack_ = 1
def __str__(self):
return "{" + ", ".join("%s: %s" % (field, self.__getattribute__(field))
for field, _ in Raw._fields_) + "}"
return Raw
def FullDump(reader, heap):
"""Dump all available memory regions."""
def dump_region(reader, start, size, location):
print
while start & 3 != 0:
start += 1
size -= 1
location += 1
is_executable = reader.IsProbableExecutableRegion(location, size)
is_ascii = reader.IsProbableASCIIRegion(location, size)
if is_executable is not False:
lines = reader.GetDisasmLines(start, size)
for line in lines:
print FormatDisasmLine(start, heap, line)
print
if is_ascii is not False:
# Output in the same format as the Unix hd command
addr = start
for slot in xrange(location, location + size, 16):
hex_line = ""
asc_line = ""
for i in xrange(0, 16):
if slot + i < location + size:
byte = ctypes.c_uint8.from_buffer(reader.minidump, slot + i).value
if byte >= 0x20 and byte < 0x7f:
asc_line += chr(byte)
else:
asc_line += "."
hex_line += " %02x" % (byte)
else:
hex_line += " "
if i == 7:
hex_line += " "
print "%s %s |%s|" % (reader.FormatIntPtr(addr),
hex_line,
asc_line)
addr += 16
if is_executable is not True and is_ascii is not True:
print "%s - %s" % (reader.FormatIntPtr(start),
reader.FormatIntPtr(start + size))
for slot in xrange(start,
start + size,
reader.PointerSize()):
maybe_address = reader.ReadUIntPtr(slot)
heap_object = heap.FindObject(maybe_address)
print "%s: %s" % (reader.FormatIntPtr(slot),
reader.FormatIntPtr(maybe_address))
if heap_object:
heap_object.Print(Printer())
print
reader.ForEachMemoryRegion(dump_region)
# Heap constants generated by 'make grokdump' in v8heapconst module.
INSTANCE_TYPES = v8heapconst.INSTANCE_TYPES
KNOWN_MAPS = v8heapconst.KNOWN_MAPS
KNOWN_OBJECTS = v8heapconst.KNOWN_OBJECTS
# Set of structures and constants that describe the layout of minidump
# files. Based on MSDN and Google Breakpad.
MINIDUMP_HEADER = Descriptor([
("signature", ctypes.c_uint32),
("version", ctypes.c_uint32),
("stream_count", ctypes.c_uint32),
("stream_directories_rva", ctypes.c_uint32),
("checksum", ctypes.c_uint32),
("time_date_stampt", ctypes.c_uint32),
("flags", ctypes.c_uint64)
])
MINIDUMP_LOCATION_DESCRIPTOR = Descriptor([
("data_size", ctypes.c_uint32),
("rva", ctypes.c_uint32)
])
MINIDUMP_STRING = Descriptor([
("length", ctypes.c_uint32),
("buffer", lambda t: ctypes.c_uint8 * (t.length + 2))
])
MINIDUMP_DIRECTORY = Descriptor([
("stream_type", ctypes.c_uint32),
("location", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
])
MD_EXCEPTION_MAXIMUM_PARAMETERS = 15
MINIDUMP_EXCEPTION = Descriptor([
("code", ctypes.c_uint32),
("flags", ctypes.c_uint32),
("record", ctypes.c_uint64),
("address", ctypes.c_uint64),
("parameter_count", ctypes.c_uint32),
("unused_alignment", ctypes.c_uint32),
("information", ctypes.c_uint64 * MD_EXCEPTION_MAXIMUM_PARAMETERS)
])
MINIDUMP_EXCEPTION_STREAM = Descriptor([
("thread_id", ctypes.c_uint32),
("unused_alignment", ctypes.c_uint32),
("exception", MINIDUMP_EXCEPTION.ctype),
("thread_context", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
])
# Stream types.
MD_UNUSED_STREAM = 0
MD_RESERVED_STREAM_0 = 1
MD_RESERVED_STREAM_1 = 2
MD_THREAD_LIST_STREAM = 3
MD_MODULE_LIST_STREAM = 4
MD_MEMORY_LIST_STREAM = 5
MD_EXCEPTION_STREAM = 6
MD_SYSTEM_INFO_STREAM = 7
MD_THREAD_EX_LIST_STREAM = 8
MD_MEMORY_64_LIST_STREAM = 9
MD_COMMENT_STREAM_A = 10
MD_COMMENT_STREAM_W = 11
MD_HANDLE_DATA_STREAM = 12
MD_FUNCTION_TABLE_STREAM = 13
MD_UNLOADED_MODULE_LIST_STREAM = 14
MD_MISC_INFO_STREAM = 15
MD_MEMORY_INFO_LIST_STREAM = 16
MD_THREAD_INFO_LIST_STREAM = 17
MD_HANDLE_OPERATION_LIST_STREAM = 18
MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE = 80
MINIDUMP_FLOATING_SAVE_AREA_X86 = Descriptor([
("control_word", ctypes.c_uint32),
("status_word", ctypes.c_uint32),
("tag_word", ctypes.c_uint32),
("error_offset", ctypes.c_uint32),
("error_selector", ctypes.c_uint32),
("data_offset", ctypes.c_uint32),
("data_selector", ctypes.c_uint32),
("register_area", ctypes.c_uint8 * MD_FLOATINGSAVEAREA_X86_REGISTERAREA_SIZE),
("cr0_npx_state", ctypes.c_uint32)
])
MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE = 512
# Context flags.
MD_CONTEXT_X86 = 0x00010000
MD_CONTEXT_X86_CONTROL = (MD_CONTEXT_X86 | 0x00000001)
MD_CONTEXT_X86_INTEGER = (MD_CONTEXT_X86 | 0x00000002)
MD_CONTEXT_X86_SEGMENTS = (MD_CONTEXT_X86 | 0x00000004)
MD_CONTEXT_X86_FLOATING_POINT = (MD_CONTEXT_X86 | 0x00000008)
MD_CONTEXT_X86_DEBUG_REGISTERS = (MD_CONTEXT_X86 | 0x00000010)
MD_CONTEXT_X86_EXTENDED_REGISTERS = (MD_CONTEXT_X86 | 0x00000020)
def EnableOnFlag(type, flag):
return lambda o: [None, type][int((o.context_flags & flag) != 0)]
MINIDUMP_CONTEXT_X86 = Descriptor([
("context_flags", ctypes.c_uint32),
# MD_CONTEXT_X86_DEBUG_REGISTERS.
("dr0", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
("dr1", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
("dr2", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
("dr3", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
("dr6", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
("dr7", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_DEBUG_REGISTERS)),
# MD_CONTEXT_X86_FLOATING_POINT.
("float_save", EnableOnFlag(MINIDUMP_FLOATING_SAVE_AREA_X86.ctype,
MD_CONTEXT_X86_FLOATING_POINT)),
# MD_CONTEXT_X86_SEGMENTS.
("gs", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_SEGMENTS)),
("fs", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_SEGMENTS)),
("es", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_SEGMENTS)),
("ds", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_SEGMENTS)),
# MD_CONTEXT_X86_INTEGER.
("edi", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
("esi", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
("ebx", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
("edx", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
("ecx", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
("eax", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_INTEGER)),
# MD_CONTEXT_X86_CONTROL.
("ebp", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
("eip", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
("cs", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
("eflags", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
("esp", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
("ss", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_X86_CONTROL)),
# MD_CONTEXT_X86_EXTENDED_REGISTERS.
("extended_registers",
EnableOnFlag(ctypes.c_uint8 * MD_CONTEXT_X86_EXTENDED_REGISTERS_SIZE,
MD_CONTEXT_X86_EXTENDED_REGISTERS))
])
MD_CONTEXT_ARM = 0x40000000
MD_CONTEXT_ARM_INTEGER = (MD_CONTEXT_ARM | 0x00000002)
MD_CONTEXT_ARM_FLOATING_POINT = (MD_CONTEXT_ARM | 0x00000004)
MD_FLOATINGSAVEAREA_ARM_FPR_COUNT = 32
MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT = 8
MINIDUMP_FLOATING_SAVE_AREA_ARM = Descriptor([
("fpscr", ctypes.c_uint64),
("regs", ctypes.c_uint64 * MD_FLOATINGSAVEAREA_ARM_FPR_COUNT),
("extra", ctypes.c_uint64 * MD_FLOATINGSAVEAREA_ARM_FPEXTRA_COUNT)
])
MINIDUMP_CONTEXT_ARM = Descriptor([
("context_flags", ctypes.c_uint32),
# MD_CONTEXT_ARM_INTEGER.
("r0", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r1", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r2", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r3", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r4", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r5", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r6", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r7", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r8", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r9", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r10", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r11", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("r12", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("sp", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("lr", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("pc", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_ARM_INTEGER)),
("cpsr", ctypes.c_uint32),
("float_save", EnableOnFlag(MINIDUMP_FLOATING_SAVE_AREA_ARM.ctype,
MD_CONTEXT_ARM_FLOATING_POINT))
])
MD_CONTEXT_AMD64 = 0x00100000
MD_CONTEXT_AMD64_CONTROL = (MD_CONTEXT_AMD64 | 0x00000001)
MD_CONTEXT_AMD64_INTEGER = (MD_CONTEXT_AMD64 | 0x00000002)
MD_CONTEXT_AMD64_SEGMENTS = (MD_CONTEXT_AMD64 | 0x00000004)
MD_CONTEXT_AMD64_FLOATING_POINT = (MD_CONTEXT_AMD64 | 0x00000008)
MD_CONTEXT_AMD64_DEBUG_REGISTERS = (MD_CONTEXT_AMD64 | 0x00000010)
MINIDUMP_CONTEXT_AMD64 = Descriptor([
("p1_home", ctypes.c_uint64),
("p2_home", ctypes.c_uint64),
("p3_home", ctypes.c_uint64),
("p4_home", ctypes.c_uint64),
("p5_home", ctypes.c_uint64),
("p6_home", ctypes.c_uint64),
("context_flags", ctypes.c_uint32),
("mx_csr", ctypes.c_uint32),
# MD_CONTEXT_AMD64_CONTROL.
("cs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
# MD_CONTEXT_AMD64_SEGMENTS
("ds", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
("es", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
("fs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
("gs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)),
# MD_CONTEXT_AMD64_CONTROL.
("ss", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)),
("eflags", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_AMD64_CONTROL)),
# MD_CONTEXT_AMD64_DEBUG_REGISTERS.
("dr0", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("dr1", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("dr2", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("dr3", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("dr6", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("dr7", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
# MD_CONTEXT_AMD64_INTEGER.
("rax", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("rcx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("rdx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("rbx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
# MD_CONTEXT_AMD64_CONTROL.
("rsp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
# MD_CONTEXT_AMD64_INTEGER.
("rbp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("rsi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("rdi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r8", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r9", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r10", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r11", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r12", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r13", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r14", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
("r15", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)),
# MD_CONTEXT_AMD64_CONTROL.
("rip", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)),
# MD_CONTEXT_AMD64_FLOATING_POINT
("sse_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
MD_CONTEXT_AMD64_FLOATING_POINT)),
("vector_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26),
MD_CONTEXT_AMD64_FLOATING_POINT)),
("vector_control", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_FLOATING_POINT)),
# MD_CONTEXT_AMD64_DEBUG_REGISTERS.
("debug_control", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("last_branch_to_rip", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("last_branch_from_rip", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("last_exception_to_rip", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_DEBUG_REGISTERS)),
("last_exception_from_rip", EnableOnFlag(ctypes.c_uint64,
MD_CONTEXT_AMD64_DEBUG_REGISTERS))
])
MINIDUMP_MEMORY_DESCRIPTOR = Descriptor([
("start", ctypes.c_uint64),
("memory", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
])
MINIDUMP_MEMORY_DESCRIPTOR64 = Descriptor([
("start", ctypes.c_uint64),
("size", ctypes.c_uint64)
])
MINIDUMP_MEMORY_LIST = Descriptor([
("range_count", ctypes.c_uint32),
("ranges", lambda m: MINIDUMP_MEMORY_DESCRIPTOR.ctype * m.range_count)
])
MINIDUMP_MEMORY_LIST_Mac = Descriptor([
("range_count", ctypes.c_uint32),
("junk", ctypes.c_uint32),
("ranges", lambda m: MINIDUMP_MEMORY_DESCRIPTOR.ctype * m.range_count)
])
MINIDUMP_MEMORY_LIST64 = Descriptor([
("range_count", ctypes.c_uint64),
("base_rva", ctypes.c_uint64),
("ranges", lambda m: MINIDUMP_MEMORY_DESCRIPTOR64.ctype * m.range_count)
])
MINIDUMP_THREAD = Descriptor([
("id", ctypes.c_uint32),
("suspend_count", ctypes.c_uint32),
("priority_class", ctypes.c_uint32),
("priority", ctypes.c_uint32),
("ted", ctypes.c_uint64),
("stack", MINIDUMP_MEMORY_DESCRIPTOR.ctype),
("context", MINIDUMP_LOCATION_DESCRIPTOR.ctype)
])
MINIDUMP_THREAD_LIST = Descriptor([
("thread_count", ctypes.c_uint32),
("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count)
])
MINIDUMP_THREAD_LIST_Mac = Descriptor([
("thread_count", ctypes.c_uint32),
("junk", ctypes.c_uint32),
("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count)
])
MINIDUMP_VS_FIXEDFILEINFO = Descriptor([
("dwSignature", ctypes.c_uint32),
("dwStrucVersion", ctypes.c_uint32),
("dwFileVersionMS", ctypes.c_uint32),
("dwFileVersionLS", ctypes.c_uint32),
("dwProductVersionMS", ctypes.c_uint32),
("dwProductVersionLS", ctypes.c_uint32),
("dwFileFlagsMask", ctypes.c_uint32),
("dwFileFlags", ctypes.c_uint32),
("dwFileOS", ctypes.c_uint32),
("dwFileType", ctypes.c_uint32),
("dwFileSubtype", ctypes.c_uint32),
("dwFileDateMS", ctypes.c_uint32),
("dwFileDateLS", ctypes.c_uint32)
])
MINIDUMP_RAW_MODULE = Descriptor([
("base_of_image", ctypes.c_uint64),
("size_of_image", ctypes.c_uint32),
("checksum", ctypes.c_uint32),
("time_date_stamp", ctypes.c_uint32),
("module_name_rva", ctypes.c_uint32),
("version_info", MINIDUMP_VS_FIXEDFILEINFO.ctype),
("cv_record", MINIDUMP_LOCATION_DESCRIPTOR.ctype),
("misc_record", MINIDUMP_LOCATION_DESCRIPTOR.ctype),
("reserved0", ctypes.c_uint32 * 2),
("reserved1", ctypes.c_uint32 * 2)
])
MINIDUMP_MODULE_LIST = Descriptor([
("number_of_modules", ctypes.c_uint32),
("modules", lambda t: MINIDUMP_RAW_MODULE.ctype * t.number_of_modules)
])
MINIDUMP_MODULE_LIST_Mac = Descriptor([
("number_of_modules", ctypes.c_uint32),
("junk", ctypes.c_uint32),
("modules", lambda t: MINIDUMP_RAW_MODULE.ctype * t.number_of_modules)
])
MINIDUMP_RAW_SYSTEM_INFO = Descriptor([
("processor_architecture", ctypes.c_uint16)
])
MD_CPU_ARCHITECTURE_X86 = 0
MD_CPU_ARCHITECTURE_ARM = 5
MD_CPU_ARCHITECTURE_AMD64 = 9
class FuncSymbol:
def __init__(self, start, size, name):
self.start = start
self.end = self.start + size
self.name = name
def __cmp__(self, other):
if isinstance(other, FuncSymbol):
return self.start - other.start
return self.start - other
def Covers(self, addr):
return (self.start <= addr) and (addr < self.end)
class MinidumpReader(object):
"""Minidump (.dmp) reader."""
_HEADER_MAGIC = 0x504d444d
def __init__(self, options, minidump_name):
self.minidump_name = minidump_name
self.minidump_file = open(minidump_name, "r")
self.minidump = mmap.mmap(self.minidump_file.fileno(), 0, mmap.MAP_PRIVATE)
self.header = MINIDUMP_HEADER.Read(self.minidump, 0)
if self.header.signature != MinidumpReader._HEADER_MAGIC:
print >>sys.stderr, "Warning: Unsupported minidump header magic!"
DebugPrint(self.header)
directories = []
offset = self.header.stream_directories_rva
for _ in xrange(self.header.stream_count):
directories.append(MINIDUMP_DIRECTORY.Read(self.minidump, offset))
offset += MINIDUMP_DIRECTORY.size
self.arch = None
self.exception = None
self.exception_context = None
self.memory_list = None
self.memory_list64 = None
self.module_list = None
self.thread_map = {}
self.symdir = options.symdir
self.modules_with_symbols = []
self.symbols = []
# Find MDRawSystemInfo stream and determine arch.
for d in directories:
if d.stream_type == MD_SYSTEM_INFO_STREAM:
system_info = MINIDUMP_RAW_SYSTEM_INFO.Read(
self.minidump, d.location.rva)
self.arch = system_info.processor_architecture
assert self.arch in [MD_CPU_ARCHITECTURE_AMD64,
MD_CPU_ARCHITECTURE_ARM,
MD_CPU_ARCHITECTURE_X86]
assert not self.arch is None
for d in directories:
DebugPrint(d)
if d.stream_type == MD_EXCEPTION_STREAM:
self.exception = MINIDUMP_EXCEPTION_STREAM.Read(
self.minidump, d.location.rva)
DebugPrint(self.exception)
if self.arch == MD_CPU_ARCHITECTURE_X86:
self.exception_context = MINIDUMP_CONTEXT_X86.Read(
self.minidump, self.exception.thread_context.rva)
elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
self.exception_context = MINIDUMP_CONTEXT_AMD64.Read(
self.minidump, self.exception.thread_context.rva)
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
self.exception_context = MINIDUMP_CONTEXT_ARM.Read(
self.minidump, self.exception.thread_context.rva)
DebugPrint(self.exception_context)
elif d.stream_type == MD_THREAD_LIST_STREAM:
thread_list = MINIDUMP_THREAD_LIST.Read(self.minidump, d.location.rva)
if ctypes.sizeof(thread_list) + 4 == d.location.data_size:
thread_list = MINIDUMP_THREAD_LIST_Mac.Read(
self.minidump, d.location.rva)
assert ctypes.sizeof(thread_list) == d.location.data_size
DebugPrint(thread_list)
for thread in thread_list.threads:
DebugPrint(thread)
self.thread_map[thread.id] = thread
elif d.stream_type == MD_MODULE_LIST_STREAM:
assert self.module_list is None
self.module_list = MINIDUMP_MODULE_LIST.Read(
self.minidump, d.location.rva)
if ctypes.sizeof(self.module_list) + 4 == d.location.data_size:
self.module_list = MINIDUMP_MODULE_LIST_Mac.Read(
self.minidump, d.location.rva)
assert ctypes.sizeof(self.module_list) == d.location.data_size
DebugPrint(self.module_list)
elif d.stream_type == MD_MEMORY_LIST_STREAM:
print >>sys.stderr, "Warning: This is not a full minidump!"
assert self.memory_list is None
self.memory_list = MINIDUMP_MEMORY_LIST.Read(
self.minidump, d.location.rva)
if ctypes.sizeof(self.memory_list) + 4 == d.location.data_size:
self.memory_list = MINIDUMP_MEMORY_LIST_Mac.Read(
self.minidump, d.location.rva)
assert ctypes.sizeof(self.memory_list) == d.location.data_size
DebugPrint(self.memory_list)
elif d.stream_type == MD_MEMORY_64_LIST_STREAM:
assert self.memory_list64 is None
self.memory_list64 = MINIDUMP_MEMORY_LIST64.Read(
self.minidump, d.location.rva)
assert ctypes.sizeof(self.memory_list64) == d.location.data_size
DebugPrint(self.memory_list64)
def IsValidAddress(self, address):
return self.FindLocation(address) is not None
def ReadU8(self, address):
location = self.FindLocation(address)
return ctypes.c_uint8.from_buffer(self.minidump, location).value
def ReadU32(self, address):
location = self.FindLocation(address)
return ctypes.c_uint32.from_buffer(self.minidump, location).value
def ReadU64(self, address):
location = self.FindLocation(address)
return ctypes.c_uint64.from_buffer(self.minidump, location).value
def ReadUIntPtr(self, address):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return self.ReadU64(address)
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return self.ReadU32(address)
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return self.ReadU32(address)
def ReadBytes(self, address, size):
location = self.FindLocation(address)
return self.minidump[location:location + size]
def _ReadWord(self, location):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return ctypes.c_uint64.from_buffer(self.minidump, location).value
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return ctypes.c_uint32.from_buffer(self.minidump, location).value
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return ctypes.c_uint32.from_buffer(self.minidump, location).value
def IsProbableASCIIRegion(self, location, length):
ascii_bytes = 0
non_ascii_bytes = 0
for loc in xrange(location, location + length):
byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
if byte >= 0x7f:
non_ascii_bytes += 1
if byte < 0x20 and byte != 0:
non_ascii_bytes += 1
if byte < 0x7f and byte >= 0x20:
ascii_bytes += 1
if byte == 0xa: # newline
ascii_bytes += 1
if ascii_bytes * 10 <= length:
return False
if length > 0 and ascii_bytes > non_ascii_bytes * 7:
return True
if ascii_bytes > non_ascii_bytes * 3:
return None # Maybe
return False
def IsProbableExecutableRegion(self, location, length):
opcode_bytes = 0
sixty_four = self.arch == MD_CPU_ARCHITECTURE_AMD64
for loc in xrange(location, location + length):
byte = ctypes.c_uint8.from_buffer(self.minidump, loc).value
if (byte == 0x8b or # mov
byte == 0x89 or # mov reg-reg
(byte & 0xf0) == 0x50 or # push/pop
(sixty_four and (byte & 0xf0) == 0x40) or # rex prefix
byte == 0xc3 or # return
byte == 0x74 or # jeq
byte == 0x84 or # jeq far
byte == 0x75 or # jne
byte == 0x85 or # jne far
byte == 0xe8 or # call
byte == 0xe9 or # jmp far
byte == 0xeb): # jmp near
opcode_bytes += 1
opcode_percent = (opcode_bytes * 100) / length
threshold = 20
if opcode_percent > threshold + 2:
return True
if opcode_percent > threshold - 2:
return None # Maybe
return False
def FindRegion(self, addr):
answer = [-1, -1]
def is_in(reader, start, size, location):
if addr >= start and addr < start + size:
answer[0] = start
answer[1] = size
self.ForEachMemoryRegion(is_in)
if answer[0] == -1:
return None
return answer
def ForEachMemoryRegion(self, cb):
if self.memory_list64 is not None:
for r in self.memory_list64.ranges:
location = self.memory_list64.base_rva + offset
cb(self, r.start, r.size, location)
offset += r.size
if self.memory_list is not None:
for r in self.memory_list.ranges:
cb(self, r.start, r.memory.data_size, r.memory.rva)
def FindWord(self, word, alignment=0):
def search_inside_region(reader, start, size, location):
location = (location + alignment) & ~alignment
for loc in xrange(location, location + size - self.PointerSize()):
if reader._ReadWord(loc) == word:
slot = start + (loc - location)
print "%s: %s" % (reader.FormatIntPtr(slot),
reader.FormatIntPtr(word))
self.ForEachMemoryRegion(search_inside_region)
def FindWordList(self, word):
aligned_res = []
unaligned_res = []
def search_inside_region(reader, start, size, location):
for loc in xrange(location, location + size - self.PointerSize()):
if reader._ReadWord(loc) == word:
slot = start + (loc - location)
if slot % self.PointerSize() == 0:
aligned_res.append(slot)
else:
unaligned_res.append(slot)
self.ForEachMemoryRegion(search_inside_region)
return (aligned_res, unaligned_res)
def FindLocation(self, address):
offset = 0
if self.memory_list64 is not None:
for r in self.memory_list64.ranges:
if r.start <= address < r.start + r.size:
return self.memory_list64.base_rva + offset + address - r.start
offset += r.size
if self.memory_list is not None:
for r in self.memory_list.ranges:
if r.start <= address < r.start + r.memory.data_size:
return r.memory.rva + address - r.start
return None
def GetDisasmLines(self, address, size):
def CountUndefinedInstructions(lines):
pattern = "<UNDEFINED>"
return sum([line.count(pattern) for (ignore, line) in lines])
location = self.FindLocation(address)
if location is None: return []
arch = None
possible_objdump_flags = [""]
if self.arch == MD_CPU_ARCHITECTURE_X86:
arch = "ia32"
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
arch = "arm"
possible_objdump_flags = ["", "--disassembler-options=force-thumb"]
elif self.arch == MD_CPU_ARCHITECTURE_AMD64:
arch = "x64"
results = [ disasm.GetDisasmLines(self.minidump_name,
location,
size,
arch,
False,
objdump_flags)
for objdump_flags in possible_objdump_flags ]
return min(results, key=CountUndefinedInstructions)
def Dispose(self):
self.minidump.close()
self.minidump_file.close()
def ExceptionIP(self):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return self.exception_context.rip
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return self.exception_context.pc
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return self.exception_context.eip
def ExceptionSP(self):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return self.exception_context.rsp
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return self.exception_context.sp
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return self.exception_context.esp
def ExceptionFP(self):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return self.exception_context.rbp
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return None
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return self.exception_context.ebp
def FormatIntPtr(self, value):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return "%016x" % value
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return "%08x" % value
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return "%08x" % value
def PointerSize(self):
if self.arch == MD_CPU_ARCHITECTURE_AMD64:
return 8
elif self.arch == MD_CPU_ARCHITECTURE_ARM:
return 4
elif self.arch == MD_CPU_ARCHITECTURE_X86:
return 4
def Register(self, name):
return self.exception_context.__getattribute__(name)
def ReadMinidumpString(self, rva):
string = bytearray(MINIDUMP_STRING.Read(self.minidump, rva).buffer)
string = string.decode("utf16")
return string[0:len(string) - 1]
# Load FUNC records from a BreakPad symbol file
#
# http://code.google.com/p/google-breakpad/wiki/SymbolFiles
#
def _LoadSymbolsFrom(self, symfile, baseaddr):
print "Loading symbols from %s" % (symfile)
funcs = []
with open(symfile) as f:
for line in f:
result = re.match(
r"^FUNC ([a-f0-9]+) ([a-f0-9]+) ([a-f0-9]+) (.*)$", line)
if result is not None:
start = int(result.group(1), 16)
size = int(result.group(2), 16)
name = result.group(4).rstrip()
bisect.insort_left(self.symbols,
FuncSymbol(baseaddr + start, size, name))
print " ... done"
def TryLoadSymbolsFor(self, modulename, module):
try:
symfile = os.path.join(self.symdir,
modulename.replace('.', '_') + ".pdb.sym")
if os.path.isfile(symfile):
self._LoadSymbolsFrom(symfile, module.base_of_image)
self.modules_with_symbols.append(module)
except Exception as e:
print " ... failure (%s)" % (e)
# Returns true if address is covered by some module that has loaded symbols.
def _IsInModuleWithSymbols(self, addr):
for module in self.modules_with_symbols:
start = module.base_of_image
end = start + module.size_of_image
if (start <= addr) and (addr < end):
return True
return False
# Find symbol covering the given address and return its name in format
# <symbol name>+<offset from the start>
def FindSymbol(self, addr):
if not self._IsInModuleWithSymbols(addr):
return None
i = bisect.bisect_left(self.symbols, addr)
symbol = None
if (0 < i) and self.symbols[i - 1].Covers(addr):
symbol = self.symbols[i - 1]
elif (i < len(self.symbols)) and self.symbols[i].Covers(addr):
symbol = self.symbols[i]
else:
return None
diff = addr - symbol.start
return "%s+0x%x" % (symbol.name, diff)
class Printer(object):
"""Printer with indentation support."""
def __init__(self):
self.indent = 0
def Indent(self):
self.indent += 2
def Dedent(self):
self.indent -= 2
def Print(self, string):
print "%s%s" % (self._IndentString(), string)
def PrintLines(self, lines):
indent = self._IndentString()
print "\n".join("%s%s" % (indent, line) for line in lines)
def _IndentString(self):
return self.indent * " "
ADDRESS_RE = re.compile(r"0x[0-9a-fA-F]+")
def FormatDisasmLine(start, heap, line):
line_address = start + line[0]
stack_slot = heap.stack_map.get(line_address)
marker = " "
if stack_slot:
marker = "=>"
code = AnnotateAddresses(heap, line[1])
# Compute the actual call target which the disassembler is too stupid
# to figure out (it adds the call offset to the disassembly offset rather
# than the absolute instruction address).
if heap.reader.arch == MD_CPU_ARCHITECTURE_X86:
if code.startswith("e8"):
words = code.split()
if len(words) > 6 and words[5] == "call":
offset = int(words[4] + words[3] + words[2] + words[1], 16)
target = (line_address + offset + 5) & 0xFFFFFFFF
code = code.replace(words[6], "0x%08x" % target)
# TODO(jkummerow): port this hack to ARM and x64.
return "%s%08x %08x: %s" % (marker, line_address, line[0], code)
def AnnotateAddresses(heap, line):
extra = []
for m in ADDRESS_RE.finditer(line):
maybe_address = int(m.group(0), 16)
object = heap.FindObject(maybe_address)
if not object: continue
extra.append(str(object))
if len(extra) == 0: return line
return "%s ;; %s" % (line, ", ".join(extra))
class HeapObject(object):
def __init__(self, heap, map, address):
self.heap = heap
self.map = map
self.address = address
def Is(self, cls):
return isinstance(self, cls)
def Print(self, p):
p.Print(str(self))
def __str__(self):
instance_type = "???"
if self.map is not None:
instance_type = INSTANCE_TYPES[self.map.instance_type]
return "HeapObject(%s, %s)" % (self.heap.reader.FormatIntPtr(self.address),
instance_type)
def ObjectField(self, offset):
field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
return self.heap.FindObjectOrSmi(field_value)
def SmiField(self, offset):
field_value = self.heap.reader.ReadUIntPtr(self.address + offset)
if (field_value & 1) == 0:
return field_value / 2
return None
class Map(HeapObject):
def Decode(self, offset, size, value):
return (value >> offset) & ((1 << size) - 1)
# Instance Sizes
def InstanceSizesOffset(self):
return self.heap.PointerSize()
def InstanceSizeOffset(self):
return self.InstanceSizesOffset()
def InObjectProperties(self):
return self.InstanceSizeOffset() + 1
def PreAllocatedPropertyFields(self):
return self.InObjectProperties() + 1
def VisitorId(self):
return self.PreAllocatedPropertyFields() + 1
# Instance Attributes
def InstanceAttributesOffset(self):
return self.InstanceSizesOffset() + self.heap.IntSize()
def InstanceTypeOffset(self):
return self.InstanceAttributesOffset()
def UnusedPropertyFieldsOffset(self):
return self.InstanceTypeOffset() + 1
def BitFieldOffset(self):
return self.UnusedPropertyFieldsOffset() + 1
def BitField2Offset(self):
return self.BitFieldOffset() + 1
# Other fields
def PrototypeOffset(self):
return self.InstanceAttributesOffset() + self.heap.IntSize()
def ConstructorOffset(self):
return self.PrototypeOffset() + self.heap.PointerSize()
def TransitionsOrBackPointerOffset(self):
return self.ConstructorOffset() + self.heap.PointerSize()
def DescriptorsOffset(self):
return self.TransitionsOrBackPointerOffset() + self.heap.PointerSize()
def CodeCacheOffset(self):
return self.DescriptorsOffset() + self.heap.PointerSize()
def DependentCodeOffset(self):
return self.CodeCacheOffset() + self.heap.PointerSize()
def BitField3Offset(self):
return self.DependentCodeOffset() + self.heap.PointerSize()
def ReadByte(self, offset):
return self.heap.reader.ReadU8(self.address + offset)
def Print(self, p):
p.Print("Map(%08x)" % (self.address))
p.Print("- size: %d, inobject: %d, preallocated: %d, visitor: %d" % (
self.ReadByte(self.InstanceSizeOffset()),
self.ReadByte(self.InObjectProperties()),
self.ReadByte(self.PreAllocatedPropertyFields()),
self.VisitorId()))
bitfield = self.ReadByte(self.BitFieldOffset())
bitfield2 = self.ReadByte(self.BitField2Offset())
p.Print("- %s, unused: %d, bf: %d, bf2: %d" % (
INSTANCE_TYPES[self.ReadByte(self.InstanceTypeOffset())],
self.ReadByte(self.UnusedPropertyFieldsOffset()),
bitfield, bitfield2))
p.Print("- kind: %s" % (self.Decode(3, 5, bitfield2)))
bitfield3 = self.ObjectField(self.BitField3Offset())
p.Print(
"- EnumLength: %d NumberOfOwnDescriptors: %d OwnsDescriptors: %s" % (
self.Decode(0, 11, bitfield3),
self.Decode(11, 11, bitfield3),
self.Decode(25, 1, bitfield3)))
p.Print("- IsShared: %s" % (self.Decode(22, 1, bitfield3)))
p.Print("- FunctionWithPrototype: %s" % (self.Decode(23, 1, bitfield3)))
p.Print("- DictionaryMap: %s" % (self.Decode(24, 1, bitfield3)))
descriptors = self.ObjectField(self.DescriptorsOffset())
if descriptors.__class__ == FixedArray:
DescriptorArray(descriptors).Print(p)
else:
p.Print("Descriptors: %s" % (descriptors))
transitions = self.ObjectField(self.TransitionsOrBackPointerOffset())
if transitions.__class__ == FixedArray:
TransitionArray(transitions).Print(p)
else:
p.Print("TransitionsOrBackPointer: %s" % (transitions))
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.instance_type = \
heap.reader.ReadU8(self.address + self.InstanceTypeOffset())
class String(HeapObject):
def LengthOffset(self):
# First word after the map is the hash, the second is the length.
return self.heap.PointerSize() * 2
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.length = self.SmiField(self.LengthOffset())
def GetChars(self):
return "?string?"
def Print(self, p):
p.Print(str(self))
def __str__(self):
return "\"%s\"" % self.GetChars()
class SeqString(String):
def CharsOffset(self):
return self.heap.PointerSize() * 3
def __init__(self, heap, map, address):
String.__init__(self, heap, map, address)
self.chars = heap.reader.ReadBytes(self.address + self.CharsOffset(),
self.length)
def GetChars(self):
return self.chars
class ExternalString(String):
# TODO(vegorov) fix ExternalString for X64 architecture
RESOURCE_OFFSET = 12
WEBKIT_RESOUCE_STRING_IMPL_OFFSET = 4
WEBKIT_STRING_IMPL_CHARS_OFFSET = 8
def __init__(self, heap, map, address):
String.__init__(self, heap, map, address)
reader = heap.reader
self.resource = \
reader.ReadU32(self.address + ExternalString.RESOURCE_OFFSET)
self.chars = "?external string?"
if not reader.IsValidAddress(self.resource): return
string_impl_address = self.resource + \
ExternalString.WEBKIT_RESOUCE_STRING_IMPL_OFFSET
if not reader.IsValidAddress(string_impl_address): return
string_impl = reader.ReadU32(string_impl_address)
chars_ptr_address = string_impl + \
ExternalString.WEBKIT_STRING_IMPL_CHARS_OFFSET
if not reader.IsValidAddress(chars_ptr_address): return
chars_ptr = reader.ReadU32(chars_ptr_address)
if not reader.IsValidAddress(chars_ptr): return
raw_chars = reader.ReadBytes(chars_ptr, 2 * self.length)
self.chars = codecs.getdecoder("utf16")(raw_chars)[0]
def GetChars(self):
return self.chars
class ConsString(String):
def LeftOffset(self):
return self.heap.PointerSize() * 3
def RightOffset(self):
return self.heap.PointerSize() * 4
def __init__(self, heap, map, address):
String.__init__(self, heap, map, address)
self.left = self.ObjectField(self.LeftOffset())
self.right = self.ObjectField(self.RightOffset())
def GetChars(self):
try:
return self.left.GetChars() + self.right.GetChars()
except:
return "***CAUGHT EXCEPTION IN GROKDUMP***"
class Oddball(HeapObject):
# Should match declarations in objects.h
KINDS = [
"False",
"True",
"TheHole",
"Null",
"ArgumentMarker",
"Undefined",
"Other"
]
def ToStringOffset(self):
return self.heap.PointerSize()
def ToNumberOffset(self):
return self.ToStringOffset() + self.heap.PointerSize()
def KindOffset(self):
return self.ToNumberOffset() + self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.to_string = self.ObjectField(self.ToStringOffset())
self.kind = self.SmiField(self.KindOffset())
def Print(self, p):
p.Print(str(self))
def __str__(self):
if self.to_string:
return "Oddball(%08x, <%s>)" % (self.address, str(self.to_string))
else:
kind = "???"
if 0 <= self.kind < len(Oddball.KINDS):
kind = Oddball.KINDS[self.kind]
return "Oddball(%08x, kind=%s)" % (self.address, kind)
class FixedArray(HeapObject):
def LengthOffset(self):
return self.heap.PointerSize()
def ElementsOffset(self):
return self.heap.PointerSize() * 2
def MemberOffset(self, i):
return self.ElementsOffset() + self.heap.PointerSize() * i
def Get(self, i):
return self.ObjectField(self.MemberOffset(i))
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.length = self.SmiField(self.LengthOffset())
def Print(self, p):
p.Print("FixedArray(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("length: %d" % self.length)
base_offset = self.ElementsOffset()
for i in xrange(self.length):
offset = base_offset + 4 * i
try:
p.Print("[%08d] = %s" % (i, self.ObjectField(offset)))
except TypeError:
p.Dedent()
p.Print("...")
p.Print("}")
return
p.Dedent()
p.Print("}")
def __str__(self):
return "FixedArray(%08x, length=%d)" % (self.address, self.length)
class DescriptorArray(object):
def __init__(self, array):
self.array = array
def Length(self):
return self.array.Get(0)
def Decode(self, offset, size, value):
return (value >> offset) & ((1 << size) - 1)
TYPES = [
"normal",
"field",
"function",
"callbacks"
]
def Type(self, value):
return DescriptorArray.TYPES[self.Decode(0, 3, value)]
def Attributes(self, value):
attributes = self.Decode(3, 3, value)
result = []
if (attributes & 0): result += ["ReadOnly"]
if (attributes & 1): result += ["DontEnum"]
if (attributes & 2): result += ["DontDelete"]
return "[" + (",".join(result)) + "]"
def Deleted(self, value):
return self.Decode(6, 1, value) == 1
def FieldIndex(self, value):
return self.Decode(20, 11, value)
def Pointer(self, value):
return self.Decode(6, 11, value)
def Details(self, di, value):
return (
di,
self.Type(value),
self.Attributes(value),
self.FieldIndex(value),
self.Pointer(value)
)
def Print(self, p):
length = self.Length()
array = self.array
p.Print("Descriptors(%08x, length=%d)" % (array.address, length))
p.Print("[et] %s" % (array.Get(1)))
for di in xrange(length):
i = 2 + di * 3
p.Print("0x%x" % (array.address + array.MemberOffset(i)))
p.Print("[%i] name: %s" % (di, array.Get(i + 0)))
p.Print("[%i] details: %s %s field-index %i pointer %i" % \
self.Details(di, array.Get(i + 1)))
p.Print("[%i] value: %s" % (di, array.Get(i + 2)))
end = self.array.length // 3
if length != end:
p.Print("[%i-%i] slack descriptors" % (length, end))
class TransitionArray(object):
def __init__(self, array):
self.array = array
def IsSimpleTransition(self):
return self.array.length <= 2
def Length(self):
# SimpleTransition cases
if self.IsSimpleTransition():
return self.array.length - 1
return (self.array.length - 3) // 2
def Print(self, p):
length = self.Length()
array = self.array
p.Print("Transitions(%08x, length=%d)" % (array.address, length))
p.Print("[backpointer] %s" % (array.Get(0)))
if self.IsSimpleTransition():
if length == 1:
p.Print("[simple target] %s" % (array.Get(1)))
return
elements = array.Get(1)
if elements is not None:
p.Print("[elements ] %s" % (elements))
prototype = array.Get(2)
if prototype is not None:
p.Print("[prototype ] %s" % (prototype))
for di in xrange(length):
i = 3 + di * 2
p.Print("[%i] symbol: %s" % (di, array.Get(i + 0)))
p.Print("[%i] target: %s" % (di, array.Get(i + 1)))
class JSFunction(HeapObject):
def CodeEntryOffset(self):
return 3 * self.heap.PointerSize()
def SharedOffset(self):
return 5 * self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
code_entry = \
heap.reader.ReadU32(self.address + self.CodeEntryOffset())
self.code = heap.FindObject(code_entry - Code.HeaderSize(heap) + 1)
self.shared = self.ObjectField(self.SharedOffset())
def Print(self, p):
source = "\n".join(" %s" % line for line in self._GetSource().split("\n"))
p.Print("JSFunction(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("inferred name: %s" % self.shared.inferred_name)
if self.shared.script.Is(Script) and self.shared.script.name.Is(String):
p.Print("script name: %s" % self.shared.script.name)
p.Print("source:")
p.PrintLines(self._GetSource().split("\n"))
p.Print("code:")
self.code.Print(p)
if self.code != self.shared.code:
p.Print("unoptimized code:")
self.shared.code.Print(p)
p.Dedent()
p.Print("}")
def __str__(self):
inferred_name = ""
if self.shared is not None and self.shared.Is(SharedFunctionInfo):
inferred_name = self.shared.inferred_name
return "JSFunction(%s, %s) " % \
(self.heap.reader.FormatIntPtr(self.address), inferred_name)
def _GetSource(self):
source = "?source?"
start = self.shared.start_position
end = self.shared.end_position
if not self.shared.script.Is(Script): return source
script_source = self.shared.script.source
if not script_source.Is(String): return source
if start and end:
source = script_source.GetChars()[start:end]
return source
class SharedFunctionInfo(HeapObject):
def CodeOffset(self):
return 2 * self.heap.PointerSize()
def ScriptOffset(self):
return 7 * self.heap.PointerSize()
def InferredNameOffset(self):
return 9 * self.heap.PointerSize()
def EndPositionOffset(self):
return 12 * self.heap.PointerSize() + 4 * self.heap.IntSize()
def StartPositionAndTypeOffset(self):
return 12 * self.heap.PointerSize() + 5 * self.heap.IntSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.code = self.ObjectField(self.CodeOffset())
self.script = self.ObjectField(self.ScriptOffset())
self.inferred_name = self.ObjectField(self.InferredNameOffset())
if heap.PointerSize() == 8:
start_position_and_type = \
heap.reader.ReadU32(self.StartPositionAndTypeOffset())
self.start_position = start_position_and_type >> 2
pseudo_smi_end_position = \
heap.reader.ReadU32(self.EndPositionOffset())
self.end_position = pseudo_smi_end_position >> 2
else:
start_position_and_type = \
self.SmiField(self.StartPositionAndTypeOffset())
if start_position_and_type:
self.start_position = start_position_and_type >> 2
else:
self.start_position = None
self.end_position = \
self.SmiField(self.EndPositionOffset())
class Script(HeapObject):
def SourceOffset(self):
return self.heap.PointerSize()
def NameOffset(self):
return self.SourceOffset() + self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.source = self.ObjectField(self.SourceOffset())
self.name = self.ObjectField(self.NameOffset())
class CodeCache(HeapObject):
def DefaultCacheOffset(self):
return self.heap.PointerSize()
def NormalTypeCacheOffset(self):
return self.DefaultCacheOffset() + self.heap.PointerSize()
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.default_cache = self.ObjectField(self.DefaultCacheOffset())
self.normal_type_cache = self.ObjectField(self.NormalTypeCacheOffset())
def Print(self, p):
p.Print("CodeCache(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("default cache: %s" % self.default_cache)
p.Print("normal type cache: %s" % self.normal_type_cache)
p.Dedent()
p.Print("}")
class Code(HeapObject):
CODE_ALIGNMENT_MASK = (1 << 5) - 1
def InstructionSizeOffset(self):
return self.heap.PointerSize()
@staticmethod
def HeaderSize(heap):
return (heap.PointerSize() + heap.IntSize() + \
4 * heap.PointerSize() + 3 * heap.IntSize() + \
Code.CODE_ALIGNMENT_MASK) & ~Code.CODE_ALIGNMENT_MASK
def __init__(self, heap, map, address):
HeapObject.__init__(self, heap, map, address)
self.entry = self.address + Code.HeaderSize(heap)
self.instruction_size = \
heap.reader.ReadU32(self.address + self.InstructionSizeOffset())
def Print(self, p):
lines = self.heap.reader.GetDisasmLines(self.entry, self.instruction_size)
p.Print("Code(%s) {" % self.heap.reader.FormatIntPtr(self.address))
p.Indent()
p.Print("instruction_size: %d" % self.instruction_size)
p.PrintLines(self._FormatLine(line) for line in lines)
p.Dedent()
p.Print("}")
def _FormatLine(self, line):
return FormatDisasmLine(self.entry, self.heap, line)
class V8Heap(object):
CLASS_MAP = {
"SYMBOL_TYPE": SeqString,
"ONE_BYTE_SYMBOL_TYPE": SeqString,
"CONS_SYMBOL_TYPE": ConsString,
"CONS_ONE_BYTE_SYMBOL_TYPE": ConsString,
"EXTERNAL_SYMBOL_TYPE": ExternalString,
"EXTERNAL_SYMBOL_WITH_ONE_BYTE_DATA_TYPE": ExternalString,
"EXTERNAL_ONE_BYTE_SYMBOL_TYPE": ExternalString,
"SHORT_EXTERNAL_SYMBOL_TYPE": ExternalString,
"SHORT_EXTERNAL_SYMBOL_WITH_ONE_BYTE_DATA_TYPE": ExternalString,
"SHORT_EXTERNAL_ONE_BYTE_SYMBOL_TYPE": ExternalString,
"STRING_TYPE": SeqString,
"ONE_BYTE_STRING_TYPE": SeqString,
"CONS_STRING_TYPE": ConsString,
"CONS_ONE_BYTE_STRING_TYPE": ConsString,
"EXTERNAL_STRING_TYPE": ExternalString,
"EXTERNAL_STRING_WITH_ONE_BYTE_DATA_TYPE": ExternalString,
"EXTERNAL_ONE_BYTE_STRING_TYPE": ExternalString,
"MAP_TYPE": Map,
"ODDBALL_TYPE": Oddball,
"FIXED_ARRAY_TYPE": FixedArray,
"JS_FUNCTION_TYPE": JSFunction,
"SHARED_FUNCTION_INFO_TYPE": SharedFunctionInfo,
"SCRIPT_TYPE": Script,
"CODE_CACHE_TYPE": CodeCache,
"CODE_TYPE": Code,
}
def __init__(self, reader, stack_map):
self.reader = reader
self.stack_map = stack_map
self.objects = {}
def FindObjectOrSmi(self, tagged_address):
if (tagged_address & 1) == 0: return tagged_address / 2
return self.FindObject(tagged_address)
def FindObject(self, tagged_address):
if tagged_address in self.objects:
return self.objects[tagged_address]
if (tagged_address & self.ObjectAlignmentMask()) != 1: return None
address = tagged_address - 1
if not self.reader.IsValidAddress(address): return None
map_tagged_address = self.reader.ReadUIntPtr(address)
if tagged_address == map_tagged_address:
# Meta map?
meta_map = Map(self, None, address)
instance_type_name = INSTANCE_TYPES.get(meta_map.instance_type)
if instance_type_name != "MAP_TYPE": return None
meta_map.map = meta_map
object = meta_map
else:
map = self.FindMap(map_tagged_address)
if map is None: return None
instance_type_name = INSTANCE_TYPES.get(map.instance_type)
if instance_type_name is None: return None
cls = V8Heap.CLASS_MAP.get(instance_type_name, HeapObject)
object = cls(self, map, address)
self.objects[tagged_address] = object
return object
def FindMap(self, tagged_address):
if (tagged_address & self.MapAlignmentMask()) != 1: return None
address = tagged_address - 1
if not self.reader.IsValidAddress(address): return None
object = Map(self, None, address)
return object
def IntSize(self):
return 4
def PointerSize(self):
return self.reader.PointerSize()
def ObjectAlignmentMask(self):
return self.PointerSize() - 1
def MapAlignmentMask(self):
if self.reader.arch == MD_CPU_ARCHITECTURE_AMD64:
return (1 << 4) - 1
elif self.reader.arch == MD_CPU_ARCHITECTURE_ARM:
return (1 << 4) - 1
elif self.reader.arch == MD_CPU_ARCHITECTURE_X86:
return (1 << 5) - 1
def PageAlignmentMask(self):
return (1 << 20) - 1
class KnownObject(HeapObject):
def __init__(self, heap, known_name):
HeapObject.__init__(self, heap, None, None)
self.known_name = known_name
def __str__(self):
return "<%s>" % self.known_name
class KnownMap(HeapObject):
def __init__(self, heap, known_name, instance_type):
HeapObject.__init__(self, heap, None, None)
self.instance_type = instance_type
self.known_name = known_name
def __str__(self):
return "<%s>" % self.known_name
COMMENT_RE = re.compile(r"^C (0x[0-9a-fA-F]+) (.*)$")
PAGEADDRESS_RE = re.compile(
r"^P (mappage|pointerpage|datapage) (0x[0-9a-fA-F]+)$")
class InspectionInfo(object):
def __init__(self, minidump_name, reader):
self.comment_file = minidump_name + ".comments"
self.address_comments = {}
self.page_address = {}
if os.path.exists(self.comment_file):
with open(self.comment_file, "r") as f:
lines = f.readlines()
f.close()
for l in lines:
m = COMMENT_RE.match(l)
if m:
self.address_comments[int(m.group(1), 0)] = m.group(2)
m = PAGEADDRESS_RE.match(l)
if m:
self.page_address[m.group(1)] = int(m.group(2), 0)
self.reader = reader
self.styles = {}
self.color_addresses()
return
def get_page_address(self, page_kind):
return self.page_address.get(page_kind, 0)
def save_page_address(self, page_kind, address):
with open(self.comment_file, "a") as f:
f.write("P %s 0x%x\n" % (page_kind, address))
f.close()
def color_addresses(self):
# Color all stack addresses.
exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
stack_top = self.reader.ExceptionSP()
stack_bottom = exception_thread.stack.start + \
exception_thread.stack.memory.data_size
frame_pointer = self.reader.ExceptionFP()
self.styles[frame_pointer] = "frame"
for slot in xrange(stack_top, stack_bottom, self.reader.PointerSize()):
self.styles[slot] = "stackaddress"
for slot in xrange(stack_top, stack_bottom, self.reader.PointerSize()):
maybe_address = self.reader.ReadUIntPtr(slot)
self.styles[maybe_address] = "stackval"
if slot == frame_pointer:
self.styles[slot] = "frame"
frame_pointer = maybe_address
self.styles[self.reader.ExceptionIP()] = "pc"
def get_style_class(self, address):
return self.styles.get(address, None)
def get_style_class_string(self, address):
style = self.get_style_class(address)
if style != None:
return " class=\"%s\" " % style
else:
return ""
def set_comment(self, address, comment):
self.address_comments[address] = comment
with open(self.comment_file, "a") as f:
f.write("C 0x%x %s\n" % (address, comment))
f.close()
def get_comment(self, address):
return self.address_comments.get(address, "")
class InspectionPadawan(object):
"""The padawan can improve annotations by sensing well-known objects."""
def __init__(self, reader, heap):
self.reader = reader
self.heap = heap
self.known_first_map_page = 0
self.known_first_data_page = 0
self.known_first_pointer_page = 0
def __getattr__(self, name):
"""An InspectionPadawan can be used instead of V8Heap, even though
it does not inherit from V8Heap (aka. mixin)."""
return getattr(self.heap, name)
def GetPageOffset(self, tagged_address):
return tagged_address & self.heap.PageAlignmentMask()
def IsInKnownMapSpace(self, tagged_address):
page_address = tagged_address & ~self.heap.PageAlignmentMask()
return page_address == self.known_first_map_page
def IsInKnownOldSpace(self, tagged_address):
page_address = tagged_address & ~self.heap.PageAlignmentMask()
return page_address in [self.known_first_data_page,
self.known_first_pointer_page]
def ContainingKnownOldSpaceName(self, tagged_address):
page_address = tagged_address & ~self.heap.PageAlignmentMask()
if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
return None
def SenseObject(self, tagged_address):
if self.IsInKnownOldSpace(tagged_address):
offset = self.GetPageOffset(tagged_address)
lookup_key = (self.ContainingKnownOldSpaceName(tagged_address), offset)
known_obj_name = KNOWN_OBJECTS.get(lookup_key)
if known_obj_name:
return KnownObject(self, known_obj_name)
if self.IsInKnownMapSpace(tagged_address):
known_map = self.SenseMap(tagged_address)
if known_map:
return known_map
found_obj = self.heap.FindObject(tagged_address)
if found_obj: return found_obj
address = tagged_address - 1
if self.reader.IsValidAddress(address):
map_tagged_address = self.reader.ReadUIntPtr(address)
map = self.SenseMap(map_tagged_address)
if map is None: return None
instance_type_name = INSTANCE_TYPES.get(map.instance_type)
if instance_type_name is None: return None
cls = V8Heap.CLASS_MAP.get(instance_type_name, HeapObject)
return cls(self, map, address)
return None
def SenseMap(self, tagged_address):
if self.IsInKnownMapSpace(tagged_address):
offset = self.GetPageOffset(tagged_address)
known_map_info = KNOWN_MAPS.get(offset)
if known_map_info:
known_map_type, known_map_name = known_map_info
return KnownMap(self, known_map_name, known_map_type)
found_map = self.heap.FindMap(tagged_address)
if found_map: return found_map
return None
def FindObjectOrSmi(self, tagged_address):
"""When used as a mixin in place of V8Heap."""
found_obj = self.SenseObject(tagged_address)
if found_obj: return found_obj
if (tagged_address & 1) == 0:
return "Smi(%d)" % (tagged_address / 2)
else:
return "Unknown(%s)" % self.reader.FormatIntPtr(tagged_address)
def FindObject(self, tagged_address):
"""When used as a mixin in place of V8Heap."""
raise NotImplementedError
def FindMap(self, tagged_address):
"""When used as a mixin in place of V8Heap."""
raise NotImplementedError
def PrintKnowledge(self):
print " known_first_map_page = %s\n"\
" known_first_data_page = %s\n"\
" known_first_pointer_page = %s" % (
self.reader.FormatIntPtr(self.known_first_map_page),
self.reader.FormatIntPtr(self.known_first_data_page),
self.reader.FormatIntPtr(self.known_first_pointer_page))
WEB_HEADER = """
<!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type">
<style media="screen" type="text/css">
.code {
font-family: monospace;
}
.dmptable {
border-collapse : collapse;
border-spacing : 0px;
}
.codedump {
border-collapse : collapse;
border-spacing : 0px;
}
.addrcomments {
border : 0px;
}
.register {
padding-right : 1em;
}
.header {
clear : both;
}
.header .navigation {
float : left;
}
.header .dumpname {
float : right;
}
tr.highlight-line {
background-color : yellow;
}
.highlight {
background-color : magenta;
}
tr.inexact-highlight-line {
background-color : pink;
}
input {
background-color: inherit;
border: 1px solid LightGray;
}
.dumpcomments {
border : 1px solid LightGray;
width : 32em;
}
.regions td {
padding:0 15px 0 15px;
}
.stackframe td {
background-color : cyan;
}
.stackaddress {
background-color : LightGray;
}
.stackval {
background-color : LightCyan;
}
.frame {
background-color : cyan;
}
.commentinput {
width : 20em;
}
a.nodump:visited {
color : black;
text-decoration : none;
}
a.nodump:link {
color : black;
text-decoration : none;
}
a:visited {
color : blueviolet;
}
a:link {
color : blue;
}
.disasmcomment {
color : DarkGreen;
}
</style>
<script type="application/javascript">
var address_str = "address-";
var address_len = address_str.length;
function comment() {
var s = event.srcElement.id;
var index = s.indexOf(address_str);
if (index >= 0) {
send_comment(s.substring(index + address_len), event.srcElement.value);
}
}
function send_comment(address, comment) {
xmlhttp = new XMLHttpRequest();
address = encodeURIComponent(address)
comment = encodeURIComponent(comment)
xmlhttp.open("GET",
"setcomment?%(query_dump)s&address=" + address +
"&comment=" + comment, true);
xmlhttp.send();
}
var dump_str = "dump-";
var dump_len = dump_str.length;
function dump_comment() {
var s = event.srcElement.id;
var index = s.indexOf(dump_str);
if (index >= 0) {
send_dump_desc(s.substring(index + dump_len), event.srcElement.value);
}
}
function send_dump_desc(name, desc) {
xmlhttp = new XMLHttpRequest();
name = encodeURIComponent(name)
desc = encodeURIComponent(desc)
xmlhttp.open("GET",
"setdumpdesc?dump=" + name +
"&description=" + desc, true);
xmlhttp.send();
}
function onpage(kind, address) {
xmlhttp = new XMLHttpRequest();
kind = encodeURIComponent(kind)
address = encodeURIComponent(address)
xmlhttp.onreadystatechange = function() {
if (xmlhttp.readyState==4 && xmlhttp.status==200) {
location.reload(true)
}
};
xmlhttp.open("GET",
"setpageaddress?%(query_dump)s&kind=" + kind +
"&address=" + address);
xmlhttp.send();
}
</script>
<title>Dump %(dump_name)s</title>
</head>
<body>
<div class="header">
<form class="navigation" action="search.html">
<a href="summary.html?%(query_dump)s">Context info</a>
<a href="info.html?%(query_dump)s">Dump info</a>
<a href="modules.html?%(query_dump)s">Modules</a>
<input type="search" name="val">
<input type="submit" name="search" value="Search">
<input type="hidden" name="dump" value="%(dump_name)s">
</form>
<form class="navigation" action="disasm.html#highlight">
<input type="search" name="val">
<input type="submit" name="disasm" value="Disasm">
<a href="dumps.html">Dumps...</a>
</form>
</div>
<br>
<hr>
"""
WEB_FOOTER = """
</body>
</html>
"""
class WebParameterError(Exception):
def __init__(self, message):
Exception.__init__(self, message)
class InspectionWebHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def formatter(self, query_components):
name = query_components.get("dump", [None])[0]
return self.server.get_dump_formatter(name)
def send_success_html_headers(self):
self.send_response(200)
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
self.send_header('Content-type','text/html')
self.end_headers()
return
def do_GET(self):
try:
parsedurl = urlparse.urlparse(self.path)
query_components = urlparse.parse_qs(parsedurl.query)
if parsedurl.path == "/dumps.html":
self.send_success_html_headers()
self.server.output_dumps(self.wfile)
elif parsedurl.path == "/summary.html":
self.send_success_html_headers()
self.formatter(query_components).output_summary(self.wfile)
elif parsedurl.path == "/info.html":
self.send_success_html_headers()
self.formatter(query_components).output_info(self.wfile)
elif parsedurl.path == "/modules.html":
self.send_success_html_headers()
self.formatter(query_components).output_modules(self.wfile)
elif parsedurl.path == "/search.html":
address = query_components.get("val", [])
if len(address) != 1:
self.send_error(404, "Invalid params")
return
self.send_success_html_headers()
self.formatter(query_components).output_search_res(
self.wfile, address[0])
elif parsedurl.path == "/disasm.html":
address = query_components.get("val", [])
exact = query_components.get("exact", ["on"])
if len(address) != 1:
self.send_error(404, "Invalid params")
return
self.send_success_html_headers()
self.formatter(query_components).output_disasm(
self.wfile, address[0], exact[0])
elif parsedurl.path == "/data.html":
address = query_components.get("val", [])
datakind = query_components.get("type", ["address"])
if len(address) == 1 and len(datakind) == 1:
self.send_success_html_headers()
self.formatter(query_components).output_data(
self.wfile, address[0], datakind[0])
else:
self.send_error(404,'Invalid params')
elif parsedurl.path == "/setdumpdesc":
name = query_components.get("dump", [""])
description = query_components.get("description", [""])
if len(name) == 1 and len(description) == 1:
name = name[0]
description = description[0]
if self.server.set_dump_desc(name, description):
self.send_success_html_headers()
self.wfile.write("OK")
return
self.send_error(404,'Invalid params')
elif parsedurl.path == "/setcomment":
address = query_components.get("address", [])
comment = query_components.get("comment", [""])
if len(address) == 1 and len(comment) == 1:
address = address[0]
comment = comment[0]
self.formatter(query_components).set_comment(address, comment)
self.send_success_html_headers()
self.wfile.write("OK")
else:
self.send_error(404,'Invalid params')
elif parsedurl.path == "/setpageaddress":
kind = query_components.get("kind", [])
address = query_components.get("address", [""])
if len(kind) == 1 and len(address) == 1:
kind = kind[0]
address = address[0]
self.formatter(query_components).set_page_address(kind, address)
self.send_success_html_headers()
self.wfile.write("OK")
else:
self.send_error(404,'Invalid params')
else:
self.send_error(404,'File Not Found: %s' % self.path)
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
except WebParameterError as e:
self.send_error(404, 'Web parameter error: %s' % e.message)
HTML_REG_FORMAT = "<span class=\"register\"><b>%s</b>: %s</span><br/>\n"
class InspectionWebFormatter(object):
CONTEXT_FULL = 0
CONTEXT_SHORT = 1
def __init__(self, switches, minidump_name, http_server):
self.dumpfilename = os.path.split(minidump_name)[1]
self.encfilename = urllib.urlencode({ 'dump' : self.dumpfilename })
self.reader = MinidumpReader(switches, minidump_name)
self.server = http_server
# Set up the heap
exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
stack_top = self.reader.ExceptionSP()
stack_bottom = exception_thread.stack.start + \
exception_thread.stack.memory.data_size
stack_map = {self.reader.ExceptionIP(): -1}
for slot in xrange(stack_top, stack_bottom, self.reader.PointerSize()):
maybe_address = self.reader.ReadUIntPtr(slot)
if not maybe_address in stack_map:
stack_map[maybe_address] = slot
self.heap = V8Heap(self.reader, stack_map)
self.padawan = InspectionPadawan(self.reader, self.heap)
self.comments = InspectionInfo(minidump_name, self.reader)
self.padawan.known_first_data_page = (
self.comments.get_page_address("datapage"))
self.padawan.known_first_map_page = (
self.comments.get_page_address("mappage"))
self.padawan.known_first_pointer_page = (
self.comments.get_page_address("pointerpage"))
def set_comment(self, straddress, comment):
try:
address = int(straddress, 0)
self.comments.set_comment(address, comment)
except ValueError:
print "Invalid address"
def set_page_address(self, kind, straddress):
try:
address = int(straddress, 0)
if kind == "datapage":
self.padawan.known_first_data_page = address
elif kind == "mappage":
self.padawan.known_first_map_page = address
elif kind == "pointerpage":
self.padawan.known_first_pointer_page = address
self.comments.save_page_address(kind, address)
except ValueError:
print "Invalid address"
def td_from_address(self, f, address):
f.write("<td %s>" % self.comments.get_style_class_string(address))
def format_address(self, maybeaddress, straddress = None):
if maybeaddress is None:
return "not in dump"
else:
if straddress is None:
straddress = "0x" + self.reader.FormatIntPtr(maybeaddress)
style_class = ""
if not self.reader.IsValidAddress(maybeaddress):
style_class = " class=\"nodump\""
return ("<a %s href=\"search.html?%s&val=%s\">%s</a>" %
(style_class, self.encfilename, straddress, straddress))
def output_header(self, f):
f.write(WEB_HEADER %
{ "query_dump" : self.encfilename,
"dump_name" : cgi.escape(self.dumpfilename) })
def output_footer(self, f):
f.write(WEB_FOOTER)
MAX_CONTEXT_STACK = 4096
def output_summary(self, f):
self.output_header(f)
f.write('<div class="code">')
self.output_context(f, InspectionWebFormatter.CONTEXT_SHORT)
self.output_disasm_pc(f)
# Output stack
exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
stack_bottom = exception_thread.stack.start + \
min(exception_thread.stack.memory.data_size, self.MAX_CONTEXT_STACK)
stack_top = self.reader.ExceptionSP()
self.output_words(f, stack_top - 16, stack_bottom, stack_top, "Stack")
f.write('</div>')
self.output_footer(f)
return
def output_info(self, f):
self.output_header(f)
f.write("<h3>Dump info</h3>\n")
f.write("Description: ")
self.server.output_dump_desc_field(f, self.dumpfilename)
f.write("<br>\n")
f.write("Filename: ")
f.write("<span class=\"code\">%s</span><br>\n" % (self.dumpfilename))
dt = datetime.datetime.fromtimestamp(self.reader.header.time_date_stampt)
f.write("Timestamp: %s<br>\n" % dt.strftime('%Y-%m-%d %H:%M:%S'))
self.output_context(f, InspectionWebFormatter.CONTEXT_FULL)
self.output_address_ranges(f)
self.output_footer(f)
return
def output_address_ranges(self, f):
regions = {}
def print_region(_reader, start, size, _location):
regions[start] = size
self.reader.ForEachMemoryRegion(print_region)
f.write("<h3>Available memory regions</h3>\n")
f.write('<div class="code">')
f.write("<table class=\"regions\">\n")
f.write("<thead><tr>")
f.write("<th>Start address</th>")
f.write("<th>End address</th>")
f.write("<th>Number of bytes</th>")
f.write("</tr></thead>\n")
for start in sorted(regions):
size = regions[start]
f.write("<tr>")
f.write("<td>%s</td>" % self.format_address(start))
f.write("<td> %s</td>" % self.format_address(start + size))
f.write("<td> %d</td>" % size)
f.write("</tr>\n")
f.write("</table>\n")
f.write('</div>')
return
def output_module_details(self, f, module):
f.write("<b>%s</b>" % GetModuleName(self.reader, module))
file_version = GetVersionString(module.version_info.dwFileVersionMS,
module.version_info.dwFileVersionLS)
product_version = GetVersionString(module.version_info.dwProductVersionMS,
module.version_info.dwProductVersionLS)
f.write("<br> \n")
f.write("base: %s" % self.reader.FormatIntPtr(module.base_of_image))
f.write("<br> \n")
f.write(" end: %s" % self.reader.FormatIntPtr(module.base_of_image +
module.size_of_image))
f.write("<br> \n")
f.write(" file version: %s" % file_version)
f.write("<br> \n")
f.write(" product version: %s" % product_version)
f.write("<br> \n")
time_date_stamp = datetime.datetime.fromtimestamp(module.time_date_stamp)
f.write(" timestamp: %s" % time_date_stamp)
f.write("<br>\n");
def output_modules(self, f):
self.output_header(f)
f.write('<div class="code">')
for module in self.reader.module_list.modules:
self.output_module_details(f, module)
f.write("</div>")
self.output_footer(f)
return
def output_context(self, f, details):
exception_thread = self.reader.thread_map[self.reader.exception.thread_id]
f.write("<h3>Exception context</h3>")
f.write('<div class="code">\n')
f.write("Thread id: %d" % exception_thread.id)
f.write(" Exception code: %08X<br/>\n" %
self.reader.exception.exception.code)
if details == InspectionWebFormatter.CONTEXT_FULL:
if self.reader.exception.exception.parameter_count > 0:
f.write(" Exception parameters: \n")
for i in xrange(0, self.reader.exception.exception.parameter_count):
f.write("%08x" % self.reader.exception.exception.information[i])
f.write("<br><br>\n")
for r in CONTEXT_FOR_ARCH[self.reader.arch]:
f.write(HTML_REG_FORMAT %
(r, self.format_address(self.reader.Register(r))))
# TODO(vitalyr): decode eflags.
if self.reader.arch == MD_CPU_ARCHITECTURE_ARM:
f.write("<b>cpsr</b>: %s" % bin(self.reader.exception_context.cpsr)[2:])
else:
f.write("<b>eflags</b>: %s" %
bin(self.reader.exception_context.eflags)[2:])
f.write('</div>\n')
return
def align_down(self, a, size):
alignment_correction = a % size
return a - alignment_correction
def align_up(self, a, size):
alignment_correction = (size - 1) - ((a + size - 1) % size)
return a + alignment_correction
def format_object(self, address):
heap_object = self.padawan.SenseObject(address)
return cgi.escape(str(heap_object or ""))
def output_data(self, f, straddress, datakind):
try:
self.output_header(f)
address = int(straddress, 0)
if not self.reader.IsValidAddress(address):
f.write("<h3>Address 0x%x not found in the dump.</h3>" % address)
return
region = self.reader.FindRegion(address)
if datakind == "address":
self.output_words(f, region[0], region[0] + region[1], address, "Dump")
elif datakind == "ascii":
self.output_ascii(f, region[0], region[0] + region[1], address)
self.output_footer(f)
except ValueError:
f.write("<h3>Unrecognized address format \"%s\".</h3>" % straddress)
return
def output_words(self, f, start_address, end_address,
highlight_address, desc):
region = self.reader.FindRegion(highlight_address)
if region is None:
f.write("<h3>Address 0x%x not found in the dump.</h3>\n" %
(highlight_address))
return
size = self.heap.PointerSize()
start_address = self.align_down(start_address, size)
low = self.align_down(region[0], size)
high = self.align_up(region[0] + region[1], size)
if start_address < low:
start_address = low
end_address = self.align_up(end_address, size)
if end_address > high:
end_address = high
expand = ""
if start_address != low or end_address != high:
expand = ("(<a href=\"data.html?%s&val=0x%x#highlight\">"
" more..."
" </a>)" %
(self.encfilename, highlight_address))
f.write("<h3>%s 0x%x - 0x%x, "
"highlighting <a href=\"#highlight\">0x%x</a> %s</h3>\n" %
(desc, start_address, end_address, highlight_address, expand))
f.write('<div class="code">')
f.write("<table class=\"codedump\">\n")
for slot in xrange(start_address, end_address, size):
heap_object = ""
maybe_address = None
end_region = region[0] + region[1]
if slot < region[0] or slot + size > end_region:
straddress = "0x"
for i in xrange(end_region, slot + size):
straddress += "??"
for i in reversed(
xrange(max(slot, region[0]), min(slot + size, end_region))):
straddress += "%02x" % self.reader.ReadU8(i)
for i in xrange(slot, region[0]):
straddress += "??"
else:
maybe_address = self.reader.ReadUIntPtr(slot)
straddress = self.format_address(maybe_address)
if maybe_address:
heap_object = self.format_object(maybe_address)
address_fmt = "%s </td>\n"
if slot == highlight_address:
f.write("<tr class=\"highlight-line\">\n")
address_fmt = "<a id=\"highlight\"></a>%s </td>\n"
elif slot < highlight_address and highlight_address < slot + size:
f.write("<tr class=\"inexact-highlight-line\">\n")
address_fmt = "<a id=\"highlight\"></a>%s </td>\n"
else:
f.write("<tr>\n")
f.write(" <td>")
self.output_comment_box(f, "da-", slot)
f.write("</td>\n")
f.write(" ")
self.td_from_address(f, slot)
f.write(address_fmt % self.format_address(slot))
f.write(" ")
self.td_from_address(f, maybe_address)
f.write(": %s </td>\n" % straddress)
f.write(" <td>")
if maybe_address != None:
self.output_comment_box(
f, "sv-" + self.reader.FormatIntPtr(slot), maybe_address)
f.write(" </td>\n")
f.write(" <td>%s</td>\n" % (heap_object or ''))
f.write("</tr>\n")
f.write("</table>\n")
f.write("</div>")
return
def output_ascii(self, f, start_address, end_address, highlight_address):
region = self.reader.FindRegion(highlight_address)
if region is None:
f.write("<h3>Address %x not found in the dump.</h3>" %
highlight_address)
return
if start_address < region[0]:
start_address = region[0]
if end_address > region[0] + region[1]:
end_address = region[0] + region[1]
expand = ""
if start_address != region[0] or end_address != region[0] + region[1]:
link = ("data.html?%s&val=0x%x&type=ascii#highlight" %
(self.encfilename, highlight_address))
expand = "(<a href=\"%s\">more...</a>)" % link
f.write("<h3>ASCII dump 0x%x - 0x%x, highlighting 0x%x %s</h3>" %
(start_address, end_address, highlight_address, expand))
line_width = 64
f.write('<div class="code">')
start = self.align_down(start_address, line_width)
for address in xrange(start, end_address):
if address % 64 == 0:
if address != start:
f.write("<br>")
f.write("0x%08x: " % address)
if address < start_address:
f.write(" ")
else:
if address == highlight_address:
f.write("<span class=\"highlight\">")
code = self.reader.ReadU8(address)
if code < 127 and code >= 32:
f.write("&#")
f.write(str(code))
f.write(";")
else:
f.write("·")
if address == highlight_address:
f.write("</span>")
f.write("</div>")
return
def output_disasm(self, f, straddress, strexact):
try:
self.output_header(f)
address = int(straddress, 0)
if not self.reader.IsValidAddress(address):
f.write("<h3>Address 0x%x not found in the dump.</h3>" % address)
return
region = self.reader.FindRegion(address)
self.output_disasm_range(
f, region[0], region[0] + region[1], address, strexact == "on")
self.output_footer(f)
except ValueError:
f.write("<h3>Unrecognized address format \"%s\".</h3>" % straddress)
return
def output_disasm_range(
self, f, start_address, end_address, highlight_address, exact):
region = self.reader.FindRegion(highlight_address)
if start_address < region[0]:
start_address = region[0]
if end_address > region[0] + region[1]:
end_address = region[0] + region[1]
count = end_address - start_address
lines = self.reader.GetDisasmLines(start_address, count)
found = False
if exact:
for line in lines:
if line[0] + start_address == highlight_address:
found = True
break
if not found:
start_address = highlight_address
count = end_address - start_address
lines = self.reader.GetDisasmLines(highlight_address, count)
expand = ""
if start_address != region[0] or end_address != region[0] + region[1]:
exactness = ""
if exact and not found and end_address == region[0] + region[1]:
exactness = "&exact=off"
expand = ("(<a href=\"disasm.html?%s%s"
"&val=0x%x#highlight\">more...</a>)" %
(self.encfilename, exactness, highlight_address))
f.write("<h3>Disassembling 0x%x - 0x%x, highlighting 0x%x %s</h3>" %
(start_address, end_address, highlight_address, expand))
f.write('<div class="code">')
f.write("<table class=\"codedump\">\n");
for i in xrange(0, len(lines)):
line = lines[i]
next_address = count
if i + 1 < len(lines):
next_line = lines[i + 1]
next_address = next_line[0]
self.format_disasm_line(
f, start_address, line, next_address, highlight_address)
f.write("</table>\n")
f.write("</div>")
return
def annotate_disasm_addresses(self, line):
extra = []
for m in ADDRESS_RE.finditer(line):
maybe_address = int(m.group(0), 16)
formatted_address = self.format_address(maybe_address, m.group(0))
line = line.replace(m.group(0), formatted_address)
object_info = self.padawan.SenseObject(maybe_address)
if not object_info:
continue
extra.append(cgi.escape(str(object_info)))
if len(extra) == 0:
return line
return ("%s <span class=\"disasmcomment\">;; %s</span>" %
(line, ", ".join(extra)))
def format_disasm_line(
self, f, start, line, next_address, highlight_address):
line_address = start + line[0]
address_fmt = " <td>%s</td>\n"
if line_address == highlight_address:
f.write("<tr class=\"highlight-line\">\n")
address_fmt = " <td><a id=\"highlight\">%s</a></td>\n"
elif (line_address < highlight_address and
highlight_address < next_address + start):
f.write("<tr class=\"inexact-highlight-line\">\n")
address_fmt = " <td><a id=\"highlight\">%s</a></td>\n"
else:
f.write("<tr>\n")
num_bytes = next_address - line[0]
stack_slot = self.heap.stack_map.get(line_address)
marker = ""
if stack_slot:
marker = "=>"
op_offset = 3 * num_bytes - 1
code = line[1]
# Compute the actual call target which the disassembler is too stupid
# to figure out (it adds the call offset to the disassembly offset rather
# than the absolute instruction address).
if self.heap.reader.arch == MD_CPU_ARCHITECTURE_X86:
if code.startswith("e8"):
words = code.split()
if len(words) > 6 and words[5] == "call":
offset = int(words[4] + words[3] + words[2] + words[1], 16)
target = (line_address + offset + 5) & 0xFFFFFFFF
code = code.replace(words[6], "0x%08x" % target)
# TODO(jkummerow): port this hack to ARM and x64.
opcodes = code[:op_offset]
code = self.annotate_disasm_addresses(code[op_offset:])
f.write(" <td>")
self.output_comment_box(f, "codel-", line_address)
f.write("</td>\n")
f.write(address_fmt % marker)
f.write(" ")
self.td_from_address(f, line_address)
f.write("%s (+0x%x)</td>\n" %
(self.format_address(line_address), line[0]))
f.write(" <td>: %s </td>\n" % opcodes)
f.write(" <td>%s</td>\n" % code)
f.write("</tr>\n")
def output_comment_box(self, f, prefix, address):
f.write("<input type=\"text\" class=\"commentinput\" "
"id=\"%s-address-0x%s\" onchange=\"comment()\" value=\"%s\">" %
(prefix,
self.reader.FormatIntPtr(address),
cgi.escape(self.comments.get_comment(address)) or ""))
MAX_FOUND_RESULTS = 100
def output_find_results(self, f, results):
f.write("Addresses")
toomany = len(results) > self.MAX_FOUND_RESULTS
if toomany:
f.write("(found %i results, displaying only first %i)" %
(len(results), self.MAX_FOUND_RESULTS))
f.write(": \n")
results = sorted(results)
results = results[:min(len(results), self.MAX_FOUND_RESULTS)]
for address in results:
f.write("<span %s>%s</span>\n" %
(self.comments.get_style_class_string(address),
self.format_address(address)))
if toomany:
f.write("...\n")
def output_page_info(self, f, page_kind, page_address, my_page_address):
if my_page_address == page_address and page_address != 0:
f.write("Marked first %s page.\n" % page_kind)
else:
f.write("<span id=\"%spage\" style=\"display:none\">" % page_kind)
f.write("Marked first %s page." % page_kind)
f.write("</span>\n")
f.write("<button onclick=\"onpage('%spage', '0x%x')\">" %
(page_kind, my_page_address))
f.write("Mark as first %s page</button>\n" % page_kind)
return
def output_search_res(self, f, straddress):
try:
self.output_header(f)
f.write("<h3>Search results for %s</h3>" % straddress)
address = int(straddress, 0)
f.write("Comment: ")
self.output_comment_box(f, "search-", address)
f.write("<br>\n")
page_address = address & ~self.heap.PageAlignmentMask()
f.write("Page info: \n")
self.output_page_info(f, "data", self.padawan.known_first_data_page, \
page_address)
self.output_page_info(f, "map", self.padawan.known_first_map_page, \
page_address)
self.output_page_info(f, "pointer", \
self.padawan.known_first_pointer_page, \
page_address)
if not self.reader.IsValidAddress(address):
f.write("<h3>The contents at address %s not found in the dump.</h3>" % \
straddress)
else:
# Print as words
self.output_words(f, address - 8, address + 32, address, "Dump")
# Print as ASCII
f.write("<hr>\n")
self.output_ascii(f, address, address + 256, address)
# Print as code
f.write("<hr>\n")
self.output_disasm_range(f, address - 16, address + 16, address, True)
aligned_res, unaligned_res = self.reader.FindWordList(address)
if len(aligned_res) > 0:
f.write("<h3>Occurrences of 0x%x at aligned addresses</h3>\n" %
address)
self.output_find_results(f, aligned_res)
if len(unaligned_res) > 0:
f.write("<h3>Occurrences of 0x%x at unaligned addresses</h3>\n" % \
address)
self.output_find_results(f, unaligned_res)
if len(aligned_res) + len(unaligned_res) == 0:
f.write("<h3>No occurences of 0x%x found in the dump</h3>\n" % address)
self.output_footer(f)
except ValueError:
f.write("<h3>Unrecognized address format \"%s\".</h3>" % straddress)
return
def output_disasm_pc(self, f):
address = self.reader.ExceptionIP()
if not self.reader.IsValidAddress(address):
return
self.output_disasm_range(f, address - 16, address + 16, address, True)
WEB_DUMPS_HEADER = """
<!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type">
<style media="screen" type="text/css">
.dumplist {
border-collapse : collapse;
border-spacing : 0px;
font-family: monospace;
}
.dumpcomments {
border : 1px solid LightGray;
width : 32em;
}
</style>
<script type="application/javascript">
var dump_str = "dump-";
var dump_len = dump_str.length;
function dump_comment() {
var s = event.srcElement.id;
var index = s.indexOf(dump_str);
if (index >= 0) {
send_dump_desc(s.substring(index + dump_len), event.srcElement.value);
}
}
function send_dump_desc(name, desc) {
xmlhttp = new XMLHttpRequest();
name = encodeURIComponent(name)
desc = encodeURIComponent(desc)
xmlhttp.open("GET",
"setdumpdesc?dump=" + name +
"&description=" + desc, true);
xmlhttp.send();
}
</script>
<title>Dump list</title>
</head>
<body>
"""
WEB_DUMPS_FOOTER = """
</body>
</html>
"""
DUMP_FILE_RE = re.compile(r"[-_0-9a-zA-Z][-\._0-9a-zA-Z]*\.dmp$")
class InspectionWebServer(BaseHTTPServer.HTTPServer):
def __init__(self, port_number, switches, minidump_name):
BaseHTTPServer.HTTPServer.__init__(
self, ('', port_number), InspectionWebHandler)
splitpath = os.path.split(minidump_name)
self.dumppath = splitpath[0]
self.dumpfilename = splitpath[1]
self.default_formatter = InspectionWebFormatter(
switches, minidump_name, self)
self.formatters = { self.dumpfilename : self.default_formatter }
self.switches = switches
def output_dump_desc_field(self, f, name):
try:
descfile = open(os.path.join(self.dumppath, name + ".desc"), "r")
desc = descfile.readline()
descfile.close()
except IOError:
desc = ""
f.write("<input type=\"text\" class=\"dumpcomments\" "
"id=\"dump-%s\" onchange=\"dump_comment()\" value=\"%s\">\n" %
(cgi.escape(name), desc))
def set_dump_desc(self, name, description):
if not DUMP_FILE_RE.match(name):
return False
fname = os.path.join(self.dumppath, name)
if not os.path.isfile(fname):
return False
fname = fname + ".desc"
descfile = open(fname, "w")
descfile.write(description)
descfile.close()
return True
def get_dump_formatter(self, name):
if name is None:
return self.default_formatter
else:
if not DUMP_FILE_RE.match(name):
raise WebParameterError("Invalid name '%s'" % name)
formatter = self.formatters.get(name, None)
if formatter is None:
try:
formatter = InspectionWebFormatter(
self.switches, os.path.join(self.dumppath, name), self)
self.formatters[name] = formatter
except IOError:
raise WebParameterError("Could not open dump '%s'" % name)
return formatter
def output_dumps(self, f):
f.write(WEB_DUMPS_HEADER)
f.write("<h3>List of available dumps</h3>")
f.write("<table class=\"dumplist\">\n")
f.write("<thead><tr>")
f.write("<th>Name</th>")
f.write("<th>File time</th>")
f.write("<th>Comment</th>")
f.write("</tr></thead>")
dumps_by_time = {}
for fname in os.listdir(self.dumppath):
if DUMP_FILE_RE.match(fname):
mtime = os.stat(os.path.join(self.dumppath, fname)).st_mtime
fnames = dumps_by_time.get(mtime, [])
fnames.append(fname)
dumps_by_time[mtime] = fnames
for mtime in sorted(dumps_by_time, reverse=True):
fnames = dumps_by_time[mtime]
for fname in fnames:
f.write("<tr>\n")
f.write("<td><a href=\"summary.html?%s\">%s</a></td>\n" % (
(urllib.urlencode({ 'dump' : fname }), fname)))
f.write("<td> ")
f.write(datetime.datetime.fromtimestamp(mtime))
f.write("</td>")
f.write("<td> ")
self.output_dump_desc_field(f, fname)
f.write("</td>")
f.write("</tr>\n")
f.write("</table>\n")
f.write(WEB_DUMPS_FOOTER)
return
class InspectionShell(cmd.Cmd):
def __init__(self, reader, heap):
cmd.Cmd.__init__(self)
self.reader = reader
self.heap = heap
self.padawan = InspectionPadawan(reader, heap)
self.prompt = "(grok) "
def do_da(self, address):
"""
Print ASCII string starting at specified address.
"""
address = int(address, 16)
string = ""
while self.reader.IsValidAddress(address):
code = self.reader.ReadU8(address)
if code < 128:
string += chr(code)
else:
break
address += 1
if string == "":
print "Not an ASCII string at %s" % self.reader.FormatIntPtr(address)
else:
print "%s\n" % string
def do_dd(self, args):
"""
Interpret memory in the given region [address, address + num * word_size)
(if available) as a sequence of words. Automatic alignment is not performed.
If the num is not specified, a default value of 16 words is used.
Synopsis: dd 0x<address> 0x<num>
"""
args = args.split(' ')
start = int(args[0], 16)
num = int(args[1], 16) if len(args) > 1 else 0x10
if (start & self.heap.ObjectAlignmentMask()) != 0:
print "Warning: Dumping un-aligned memory, is this what you had in mind?"
for slot in xrange(start,
start + self.reader.PointerSize() * num,
self.reader.PointerSize()):
if not self.reader.IsValidAddress(slot):
print "Address is not contained within the minidump!"
return
maybe_address = self.reader.ReadUIntPtr(slot)
heap_object = self.padawan.SenseObject(maybe_address)
print "%s: %s %s" % (self.reader.FormatIntPtr(slot),
self.reader.FormatIntPtr(maybe_address),
heap_object or '')
def do_do(self, address):
"""
Interpret memory at the given address as a V8 object. Automatic
alignment makes sure that you can pass tagged as well as un-tagged
addresses.
"""
address = int(address, 16)
if (address & self.heap.ObjectAlignmentMask()) == 0:
address = address + 1
elif (address & self.heap.ObjectAlignmentMask()) != 1:
print "Address doesn't look like a valid pointer!"
return
heap_object = self.padawan.SenseObject(address)
if heap_object:
heap_object.Print(Printer())
else:
print "Address cannot be interpreted as object!"
def do_do_desc(self, address):
"""
Print a descriptor array in a readable format.
"""
start = int(address, 16)
if ((start & 1) == 1): start = start - 1
DescriptorArray(FixedArray(self.heap, None, start)).Print(Printer())
def do_do_map(self, address):
"""
Print a descriptor array in a readable format.
"""
start = int(address, 16)
if ((start & 1) == 1): start = start - 1
Map(self.heap, None, start).Print(Printer())
def do_do_trans(self, address):
"""
Print a transition array in a readable format.
"""
start = int(address, 16)
if ((start & 1) == 1): start = start - 1
TransitionArray(FixedArray(self.heap, None, start)).Print(Printer())
def do_dp(self, address):
"""
Interpret memory at the given address as being on a V8 heap page
and print information about the page header (if available).
"""
address = int(address, 16)
page_address = address & ~self.heap.PageAlignmentMask()
if self.reader.IsValidAddress(page_address):
raise NotImplementedError
else:
print "Page header is not available!"
def do_k(self, arguments):
"""
Teach V8 heap layout information to the inspector. This increases
the amount of annotations the inspector can produce while dumping
data. The first page of each heap space is of particular interest
because it contains known objects that do not move.
"""
self.padawan.PrintKnowledge()
def do_kd(self, address):
"""
Teach V8 heap layout information to the inspector. Set the first
data-space page by passing any pointer into that page.
"""
address = int(address, 16)
page_address = address & ~self.heap.PageAlignmentMask()
self.padawan.known_first_data_page = page_address
def do_km(self, address):
"""
Teach V8 heap layout information to the inspector. Set the first
map-space page by passing any pointer into that page.
"""
address = int(address, 16)
page_address = address & ~self.heap.PageAlignmentMask()
self.padawan.known_first_map_page = page_address
def do_kp(self, address):
"""
Teach V8 heap layout information to the inspector. Set the first
pointer-space page by passing any pointer into that page.
"""
address = int(address, 16)
page_address = address & ~self.heap.PageAlignmentMask()
self.padawan.known_first_pointer_page = page_address
def do_list(self, smth):
"""
List all available memory regions.
"""
def print_region(reader, start, size, location):
print " %s - %s (%d bytes)" % (reader.FormatIntPtr(start),
reader.FormatIntPtr(start + size),
size)
print "Available memory regions:"
self.reader.ForEachMemoryRegion(print_region)
def do_lm(self, arg):
"""
List details for all loaded modules in the minidump. An argument can
be passed to limit the output to only those modules that contain the
argument as a substring (case insensitive match).
"""
for module in self.reader.module_list.modules:
if arg:
name = GetModuleName(self.reader, module).lower()
if name.find(arg.lower()) >= 0:
PrintModuleDetails(self.reader, module)
else:
PrintModuleDetails(self.reader, module)
print
def do_s(self, word):
"""
Search for a given word in available memory regions. The given word
is expanded to full pointer size and searched at aligned as well as
un-aligned memory locations. Use 'sa' to search aligned locations
only.
"""
try:
word = int(word, 0)
except ValueError:
print "Malformed word, prefix with '0x' to use hexadecimal format."
return
print "Searching for word %d/0x%s:" % (word, self.reader.FormatIntPtr(word))
self.reader.FindWord(word)
def do_sh(self, none):
"""
Search for the V8 Heap object in all available memory regions. You
might get lucky and find this rare treasure full of invaluable
information.
"""
raise NotImplementedError
def do_u(self, args):
"""
Unassemble memory in the region [address, address + size). If the
size is not specified, a default value of 32 bytes is used.
Synopsis: u 0x<address> 0x<size>
"""
args = args.split(' ')
start = int(args[0], 16)
size = int(args[1], 16) if len(args) > 1 else 0x20
if not self.reader.IsValidAddress(start):
print "Address is not contained within the minidump!"
return
lines = self.reader.GetDisasmLines(start, size)
for line in lines:
print FormatDisasmLine(start, self.heap, line)
print
def do_EOF(self, none):
raise KeyboardInterrupt
EIP_PROXIMITY = 64
CONTEXT_FOR_ARCH = {
MD_CPU_ARCHITECTURE_AMD64:
['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip',
'r8', 'r9', 'r10', 'r11', 'r12', 'r13', 'r14', 'r15'],
MD_CPU_ARCHITECTURE_ARM:
['r0', 'r1', 'r2', 'r3', 'r4', 'r5', 'r6', 'r7', 'r8', 'r9',
'r10', 'r11', 'r12', 'sp', 'lr', 'pc'],
MD_CPU_ARCHITECTURE_X86:
['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip']
}
KNOWN_MODULES = {'chrome.exe', 'chrome.dll'}
def GetVersionString(ms, ls):
return "%d.%d.%d.%d" % (ms >> 16, ms & 0xffff, ls >> 16, ls & 0xffff)
def GetModuleName(reader, module):
name = reader.ReadMinidumpString(module.module_name_rva)
# simplify for path manipulation
name = name.encode('utf-8')
return str(os.path.basename(str(name).replace("\\", "/")))
def PrintModuleDetails(reader, module):
print "%s" % GetModuleName(reader, module)
file_version = GetVersionString(module.version_info.dwFileVersionMS,
module.version_info.dwFileVersionLS)
product_version = GetVersionString(module.version_info.dwProductVersionMS,
module.version_info.dwProductVersionLS)
print " base: %s" % reader.FormatIntPtr(module.base_of_image)
print " end: %s" % reader.FormatIntPtr(module.base_of_image +
module.size_of_image)
print " file version: %s" % file_version
print " product version: %s" % product_version
time_date_stamp = datetime.datetime.fromtimestamp(module.time_date_stamp)
print " timestamp: %s" % time_date_stamp
def AnalyzeMinidump(options, minidump_name):
reader = MinidumpReader(options, minidump_name)
heap = None
DebugPrint("========================================")
if reader.exception is None:
print "Minidump has no exception info"
else:
print "Exception info:"
exception_thread = reader.thread_map[reader.exception.thread_id]
print " thread id: %d" % exception_thread.id
print " code: %08X" % reader.exception.exception.code
print " context:"
for r in CONTEXT_FOR_ARCH[reader.arch]:
print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r)))
# TODO(vitalyr): decode eflags.
if reader.arch == MD_CPU_ARCHITECTURE_ARM:
print " cpsr: %s" % bin(reader.exception_context.cpsr)[2:]
else:
print " eflags: %s" % bin(reader.exception_context.eflags)[2:]
print
print " modules:"
for module in reader.module_list.modules:
name = GetModuleName(reader, module)
if name in KNOWN_MODULES:
print " %s at %08X" % (name, module.base_of_image)
reader.TryLoadSymbolsFor(name, module)
print
stack_top = reader.ExceptionSP()
stack_bottom = exception_thread.stack.start + \
exception_thread.stack.memory.data_size
stack_map = {reader.ExceptionIP(): -1}
for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
maybe_address = reader.ReadUIntPtr(slot)
if not maybe_address in stack_map:
stack_map[maybe_address] = slot
heap = V8Heap(reader, stack_map)
print "Disassembly around exception.eip:"
eip_symbol = reader.FindSymbol(reader.ExceptionIP())
if eip_symbol is not None:
print eip_symbol
disasm_start = reader.ExceptionIP() - EIP_PROXIMITY
disasm_bytes = 2 * EIP_PROXIMITY
if (options.full):
full_range = reader.FindRegion(reader.ExceptionIP())
if full_range is not None:
disasm_start = full_range[0]
disasm_bytes = full_range[1]
lines = reader.GetDisasmLines(disasm_start, disasm_bytes)
for line in lines:
print FormatDisasmLine(disasm_start, heap, line)
print
if heap is None:
heap = V8Heap(reader, None)
if options.full:
FullDump(reader, heap)
if options.command:
InspectionShell(reader, heap).onecmd(options.command)
if options.shell:
try:
InspectionShell(reader, heap).cmdloop("type help to get help")
except KeyboardInterrupt:
print "Kthxbye."
elif not options.command:
if reader.exception is not None:
frame_pointer = reader.ExceptionFP()
print "Annotated stack (from exception.esp to bottom):"
for slot in xrange(stack_top, stack_bottom, reader.PointerSize()):
ascii_content = [c if c >= '\x20' and c < '\x7f' else '.'
for c in reader.ReadBytes(slot, reader.PointerSize())]
maybe_address = reader.ReadUIntPtr(slot)
heap_object = heap.FindObject(maybe_address)
maybe_symbol = reader.FindSymbol(maybe_address)
if slot == frame_pointer:
maybe_symbol = "<---- frame pointer"
frame_pointer = maybe_address
print "%s: %s %s %s" % (reader.FormatIntPtr(slot),
reader.FormatIntPtr(maybe_address),
"".join(ascii_content),
maybe_symbol or "")
if heap_object:
heap_object.Print(Printer())
print
reader.Dispose()
if __name__ == "__main__":
parser = optparse.OptionParser(USAGE)
parser.add_option("-s", "--shell", dest="shell", action="store_true",
help="start an interactive inspector shell")
parser.add_option("-w", "--web", dest="web", action="store_true",
help="start a web server on localhost:%i" % PORT_NUMBER)
parser.add_option("-c", "--command", dest="command", default="",
help="run an interactive inspector shell command and exit")
parser.add_option("-f", "--full", dest="full", action="store_true",
help="dump all information contained in the minidump")
parser.add_option("--symdir", dest="symdir", default=".",
help="directory containing *.pdb.sym file with symbols")
parser.add_option("--objdump",
default="/usr/bin/objdump",
help="objdump tool to use [default: %default]")
options, args = parser.parse_args()
if os.path.exists(options.objdump):
disasm.OBJDUMP_BIN = options.objdump
OBJDUMP_BIN = options.objdump
else:
print "Cannot find %s, falling back to default objdump" % options.objdump
if len(args) != 1:
parser.print_help()
sys.exit(1)
if options.web:
try:
server = InspectionWebServer(PORT_NUMBER, options, args[0])
print 'Started httpserver on port ' , PORT_NUMBER
webbrowser.open('http://localhost:%i/summary.html' % PORT_NUMBER)
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down the web server'
server.socket.close()
else:
AnalyzeMinidump(options, args[0])
| {
"content_hash": "c6b855b279f96d17118862df20287f44",
"timestamp": "",
"source": "github",
"line_count": 3165,
"max_line_length": 81,
"avg_line_length": 33.95608214849921,
"alnum_prop": 0.6333522531659703,
"repo_name": "nekulin/arangodb",
"id": "8986a91b5c318302c0f8ee8601d8919df675d9d6",
"size": "109067",
"binary": false,
"copies": "13",
"ref": "refs/heads/devel",
"path": "3rdParty/V8-4.3.61/tools/grokdump.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "AppleScript",
"bytes": "1429"
},
{
"name": "Assembly",
"bytes": "142084"
},
{
"name": "Batchfile",
"bytes": "9073"
},
{
"name": "C",
"bytes": "1938354"
},
{
"name": "C#",
"bytes": "55625"
},
{
"name": "C++",
"bytes": "79307771"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "109682"
},
{
"name": "CSS",
"bytes": "1683781"
},
{
"name": "CoffeeScript",
"bytes": "94"
},
{
"name": "DIGITAL Command Language",
"bytes": "27303"
},
{
"name": "Emacs Lisp",
"bytes": "15477"
},
{
"name": "Go",
"bytes": "1018005"
},
{
"name": "Groff",
"bytes": "263567"
},
{
"name": "HTML",
"bytes": "458914"
},
{
"name": "JavaScript",
"bytes": "57970034"
},
{
"name": "LLVM",
"bytes": "39361"
},
{
"name": "Lua",
"bytes": "16189"
},
{
"name": "Makefile",
"bytes": "177932"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "NSIS",
"bytes": "26909"
},
{
"name": "Objective-C",
"bytes": "4430"
},
{
"name": "Objective-C++",
"bytes": "1857"
},
{
"name": "Pascal",
"bytes": "145262"
},
{
"name": "Perl",
"bytes": "227308"
},
{
"name": "Protocol Buffer",
"bytes": "5837"
},
{
"name": "Python",
"bytes": "3563935"
},
{
"name": "Ruby",
"bytes": "1000569"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Scheme",
"bytes": "19885"
},
{
"name": "Shell",
"bytes": "488744"
},
{
"name": "VimL",
"bytes": "4075"
},
{
"name": "Yacc",
"bytes": "36950"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<!--
To change this license header, choose License Headers in Project Properties.
To change this template file, choose Tools | Templates
and open the template in the editor.
-->
<html>
<head>
<title>Multiplayer - Pong</title>
<meta charset="windows-1252">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="https://code.jquery.com/jquery-2.2.1.min.js"></script>
<script src="https://cdn.socket.io/socket.io-1.2.0.js"></script>
<script src="js/player.js" type="text/javascript"></script>
<script src="js/projectile.js" type="text/javascript"></script>
<script src="js/PongJS.js" type="text/javascript"></script>
<link href='https://fonts.googleapis.com/css?family=Orbitron:500' rel='stylesheet' type='text/css'>
<link href='https://fonts.googleapis.com/css?family=Inconsolata' rel='stylesheet' type='text/css'>
<link href="css/splashCSS.css" rel="stylesheet" type="text/css"/>
<link href="css/style.css" rel="stylesheet" type="text/css"/>
<link href="Test/TestStijl.css" rel="stylesheet" type="text/css"/>
</head>
<body>
<!-- Vanaf het menu kunt u navigeren met de pijltjestoetsen en Enter.-->
<div id="splash">
<span id="header">Multiplayer - Pong</span><br>
<img id="splash_img" src="images/splashPong.png" alt="splash"><br>
<span id="info">Press space to continue...</span>
</div>
</body>
</html>
| {
"content_hash": "a554afbd14290b37675ae284daf0187d",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 103,
"avg_line_length": 43.06060606060606,
"alnum_prop": 0.6790992258972555,
"repo_name": "ICTSE2bSMR/multi-pong",
"id": "5b0e361403f6a8f1371868c296ac5bc8cf9ef6af",
"size": "1421",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "app/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3028"
},
{
"name": "HTML",
"bytes": "7397"
},
{
"name": "JavaScript",
"bytes": "28725"
}
],
"symlink_target": ""
} |
import sys
import config_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(config_util.Config):
"""Basic Config class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| {
"content_hash": "91cd64ea3449c40cb4f58bd2413c9a88",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 71,
"avg_line_length": 20.405405405405407,
"alnum_prop": 0.5894039735099338,
"repo_name": "junhuac/MQUIC",
"id": "930173a0c22e07732143fc3abcc0ff44d69486fb",
"size": "918",
"binary": false,
"copies": "12",
"ref": "refs/heads/master",
"path": "depot_tools/fetch_configs/skia.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "25707"
},
{
"name": "Assembly",
"bytes": "5386"
},
{
"name": "Batchfile",
"bytes": "42909"
},
{
"name": "C",
"bytes": "1168925"
},
{
"name": "C#",
"bytes": "81308"
},
{
"name": "C++",
"bytes": "43919800"
},
{
"name": "CMake",
"bytes": "46379"
},
{
"name": "CSS",
"bytes": "19668"
},
{
"name": "Emacs Lisp",
"bytes": "32613"
},
{
"name": "Go",
"bytes": "7247"
},
{
"name": "Groff",
"bytes": "127224"
},
{
"name": "HTML",
"bytes": "2548385"
},
{
"name": "Java",
"bytes": "1332462"
},
{
"name": "JavaScript",
"bytes": "851006"
},
{
"name": "M4",
"bytes": "29823"
},
{
"name": "Makefile",
"bytes": "459525"
},
{
"name": "Objective-C",
"bytes": "120158"
},
{
"name": "Objective-C++",
"bytes": "330017"
},
{
"name": "PHP",
"bytes": "11283"
},
{
"name": "Protocol Buffer",
"bytes": "2991"
},
{
"name": "Python",
"bytes": "16872234"
},
{
"name": "R",
"bytes": "1842"
},
{
"name": "Ruby",
"bytes": "937"
},
{
"name": "Shell",
"bytes": "764509"
},
{
"name": "Swift",
"bytes": "116"
},
{
"name": "VimL",
"bytes": "12288"
},
{
"name": "nesC",
"bytes": "14779"
}
],
"symlink_target": ""
} |
const fs = require('fs');
const path = require('path');
const eos = require('util').promisify(require('stream').finished);
const extension = process.env.ARROW_JS_DEBUG === 'src' ? '.ts' : '.cjs';
const { RecordBatchReader, RecordBatchStreamWriter } = require(`../index${extension}`);
(async () => {
const readable = process.argv.length < 3 ? process.stdin : fs.createReadStream(path.resolve(process.argv[2]));
const writable = process.argv.length < 4 ? process.stdout : fs.createWriteStream(path.resolve(process.argv[3]));
const fileToStream = readable
.pipe(RecordBatchReader.throughNode())
.pipe(RecordBatchStreamWriter.throughNode())
.pipe(writable);
await eos(fileToStream);
})().catch((e) => { console.error(e); process.exit(1); });
| {
"content_hash": "6fab86644bd18f4920b30dd1a24f3d0f",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 116,
"avg_line_length": 41.21052631578947,
"alnum_prop": 0.6743295019157088,
"repo_name": "kou/arrow",
"id": "7fab54e2cb09fc98e96ca6437504d1d34ccbffef",
"size": "1621",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "js/bin/file-to-stream.js",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "3709"
},
{
"name": "Batchfile",
"bytes": "30689"
},
{
"name": "C",
"bytes": "1400442"
},
{
"name": "C#",
"bytes": "1029129"
},
{
"name": "C++",
"bytes": "24695324"
},
{
"name": "CMake",
"bytes": "711360"
},
{
"name": "Cython",
"bytes": "1554440"
},
{
"name": "Dockerfile",
"bytes": "147322"
},
{
"name": "Emacs Lisp",
"bytes": "1064"
},
{
"name": "FreeMarker",
"bytes": "2312"
},
{
"name": "Go",
"bytes": "4586449"
},
{
"name": "HTML",
"bytes": "3430"
},
{
"name": "Java",
"bytes": "7045674"
},
{
"name": "JavaScript",
"bytes": "127157"
},
{
"name": "Jinja",
"bytes": "19948"
},
{
"name": "Lua",
"bytes": "8771"
},
{
"name": "MATLAB",
"bytes": "40399"
},
{
"name": "Makefile",
"bytes": "32873"
},
{
"name": "Meson",
"bytes": "69508"
},
{
"name": "Objective-C++",
"bytes": "11472"
},
{
"name": "Perl",
"bytes": "3803"
},
{
"name": "Python",
"bytes": "3059136"
},
{
"name": "R",
"bytes": "1561613"
},
{
"name": "Ruby",
"bytes": "1615226"
},
{
"name": "Shell",
"bytes": "390773"
},
{
"name": "Thrift",
"bytes": "34246"
},
{
"name": "TypeScript",
"bytes": "1075563"
},
{
"name": "Vala",
"bytes": "24798"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Powersheets {
public interface IPowersheetExporter {
StringBuilder Dump(IEnumerable<IPowersheetExporterDump> dataSet, bool writeHeadings, bool writeAutoIncrement);
StringBuilder Dump(IEnumerable<IPowersheetExporterDump> dataSet, IEnumerable<string> propertyColumns, bool writeHeadings, bool writeAutoIncrement);
StringBuilder Export(IEnumerable<object> dataSet, bool writeHeadings, bool writeAutoIncrement);
StringBuilder Export(IEnumerable<object> dataSet, IEnumerable<string> columns, bool writeHeadings, bool writeAutoIncrement);
}
}
| {
"content_hash": "7b5ceea89b58b06d04551b79f7e7612b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 155,
"avg_line_length": 37.578947368421055,
"alnum_prop": 0.788515406162465,
"repo_name": "Bigbudddo/Powersheets",
"id": "1ec29ca29182d0e764629119f3aa60185861bdcf",
"size": "716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Powersheets/Interfaces/IPowersheetExporter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "237243"
}
],
"symlink_target": ""
} |
require 'json'
if STDIN.tty?
puts 'invalid'
puts 'This command is meant be launched by webhook'
else
data = JSON.parse(STDIN.read)
description = data['repository']['description']
parsed = description.scan(/^.*\[prefix:['"]?(\S+)['"]?\].*$/)
prefix = parsed[0]
puts prefix
end
| {
"content_hash": "9b9eaab7d7f47b11381588c669e85ad3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 63,
"avg_line_length": 24.25,
"alnum_prop": 0.6494845360824743,
"repo_name": "mld/puppet-ca-multi-master",
"id": "5bbb842b12ae5c2565324185490a1f00fede3cca",
"size": "314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "puppet/modules/r10k/files/prefix_command.rb",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Pascal",
"bytes": "2266"
},
{
"name": "Puppet",
"bytes": "321757"
},
{
"name": "Ruby",
"bytes": "1725854"
},
{
"name": "Shell",
"bytes": "6817"
}
],
"symlink_target": ""
} |
/* @flow */
import React from 'react'
import ReactDOM from 'react-dom'
import { createStore, applyMiddleware, compose } from 'redux'
import { Provider } from 'react-redux'
import thunkMiddleware from 'redux-thunk'
import { ConnectedRouter, routerMiddleware } from 'react-router-redux'
import createHistory from 'history/createBrowserHistory'
import './index.css'
import rootReducer from './reducers/index'
import RootContainer from './containers/RootContainer'
const history = createHistory()
const middleware = routerMiddleware(history)
const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose
export const store = createStore(
rootReducer,
composeEnhancers(applyMiddleware(middleware, thunkMiddleware)),
)
ReactDOM.render(
<Provider store={store}>
<ConnectedRouter history={history}>
<RootContainer />
</ConnectedRouter>
</Provider>,
document.getElementById('root'),
)
| {
"content_hash": "2f2ed3de5caa743c6f983761060ecfa1",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 28.875,
"alnum_prop": 0.762987012987013,
"repo_name": "picatic/picatic-examples",
"id": "59ddc29fb8ad63061e30d139b5f9cc0912a628a6",
"size": "924",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "create-event/src/index.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "594826"
},
{
"name": "HTML",
"bytes": "23308"
},
{
"name": "JavaScript",
"bytes": "55127"
}
],
"symlink_target": ""
} |
<div class="content">
<div id="example_title">
<h1>Tooltips</h1>
There are two more
</div>
<div id="example_view"></div>
<div id="example_code"></div>
</div>
<!--CODE-->
<div id="toolbar" style="padding: 4px; border: 1px solid #dfdfdf; border-radius: 3px"></div>
<div style="height: 40px"></div>
Tooltip position:<br>
<div style="padding: 10px">
<label><input type="radio" name="position" onclick="w2ui.toolbar.tooltip = 'top'; w2ui.toolbar.refresh()" checked> Top</label><br>
<label><input type="radio" name="position" onclick="w2ui.toolbar.tooltip = 'bottom'; w2ui.toolbar.refresh()"> Bottom</label><br>
<label><input type="radio" name="position" onclick="w2ui.toolbar.tooltip = 'left'; w2ui.toolbar.refresh()"> Left</label><br>
<label><input type="radio" name="position" onclick="w2ui.toolbar.tooltip = 'right'; w2ui.toolbar.refresh()"> Right</label><br>
</div>
<!--CODE-->
<script>
$(function () {
$('#toolbar').w2toolbar({
name: 'toolbar',
tooltip: 'top',
items: [
{ type: 'check', id: 'item1', text: 'Check', icon: 'fa fa-check-square-o', checked: true, tooltip: 'Tooltip text' },
{ type: 'break', id: 'break0' },
{ type: 'menu', id: 'item2', text: 'Menu', icon: 'fa fa-table', count: 17, tooltip: 'Tooltip text can be very very long',
items: [
{ text: 'Item 1', icon: 'fa fa-camera', count: 5 },
{ text: 'Item 2', icon: 'fa fa-picture-o', disabled: true },
{ text: 'Item 3', icon: 'fa fa-glass', count: 12 }
]
},
{ type: 'break', id: 'break1' },
{ type: 'radio', id: 'item3', group: '1', text: 'Radio 1', icon: 'fa fa-star', checked: true, tooltip: 'Tooltip text can also be<br>in multiple lines' },
{ type: 'radio', id: 'item4', group: '1', text: 'Radio 2', icon: 'fa fa-heart',
tooltip: function (item) {
return 'Can also be result of a function: ' + item.text;
}
},
],
onClick: function (event) {
console.log('Selected:', event.item.selected);
}
});
});
</script>
| {
"content_hash": "4f2ac1d5fcd8a16b392a32af29a0d276",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 167,
"avg_line_length": 44.80392156862745,
"alnum_prop": 0.5431072210065645,
"repo_name": "mpf82/w2ui",
"id": "363072f5c6d8903bc0282b9a8b228902ea612dbd",
"size": "2285",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demos/examples/toolbar/8.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "428242"
},
{
"name": "HTML",
"bytes": "504869"
},
{
"name": "Hack",
"bytes": "2551"
},
{
"name": "Java",
"bytes": "213640"
},
{
"name": "JavaScript",
"bytes": "3093884"
},
{
"name": "LiveScript",
"bytes": "6103"
},
{
"name": "PHP",
"bytes": "426362"
},
{
"name": "Python",
"bytes": "182361"
},
{
"name": "Ruby",
"bytes": "7095"
},
{
"name": "Shell",
"bytes": "1567"
}
],
"symlink_target": ""
} |
var hmt = hmt || [];
(function() {
var hm = document.createElement("script");
hm.src = "https://hm.baidu.com/hm.js?5573716a80598952ad73aca7f896ef45";
var s = document.getElementsByTagName("script")[0];
s.parentNode.insertBefore(hm, s);
})(); | {
"content_hash": "925225a61170560d1f7eb766cce9e433",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 71,
"avg_line_length": 34.42857142857143,
"alnum_prop": 0.7053941908713693,
"repo_name": "FreeCodeCamp/FreeCodeCamp",
"id": "fc19e952518989ace1783d8162afb4d8b76cb82d",
"size": "241",
"binary": false,
"copies": "2",
"ref": "refs/heads/i18n-sync-client",
"path": "client/static/misc/cap.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "190263"
},
{
"name": "HTML",
"bytes": "160430"
},
{
"name": "JavaScript",
"bytes": "546299"
}
],
"symlink_target": ""
} |
package ssh
// Config for SSH agent
type Config struct {
User string
Host string
Key string
}
// Configure the SSH agent with a *Config object
func (a *Agent) Configure(config interface{}) error {
c, ok := config.(*Config)
if !ok {
// TODO err
return nil
}
a.SetUser(c.User)
a.SetHost(c.Host)
a.SetPrivateKeyFromFile(c.Key)
return nil
}
| {
"content_hash": "34ee8ba6d9339567f89c993df1de4272",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 53,
"avg_line_length": 16.857142857142858,
"alnum_prop": 0.6892655367231638,
"repo_name": "xor-gate/cicd",
"id": "aaa9091b5d3b688a4d9bf2e0027b96d19b736fee",
"size": "354",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "modules/agent/ssh/config.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "107051"
},
{
"name": "Shell",
"bytes": "986"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "28937849dfb0fa52595ab6edbf1a4957",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "54d33b8935a6e4c3b5a86415c035fc14608f8978",
"size": "190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Gesneriaceae/Cyrtandra/Cyrtandra macrocalyx/ Syn. Cyrtandra kamokuensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
'use strict';
const tag = require('../middlewares/tag');
const cleanUpQueryParams = require('../middlewares/clean-up-query-params');
const credentials = require('../middlewares/credentials');
const dbConnSetup = require('../middlewares/db-conn-setup');
const authorize = require('../middlewares/authorize');
const checkJsonContentType = require('../middlewares/check-json-content-type');
const incrementMapViewCount = require('../middlewares/increment-map-view-count');
const augmentLayergroupData = require('../middlewares/augment-layergroup-data');
const cacheControlHeader = require('../middlewares/cache-control-header');
const cacheChannelHeader = require('../middlewares/cache-channel-header');
const surrogateKeyHeader = require('../middlewares/surrogate-key-header');
const lastModifiedHeader = require('../middlewares/last-modified-header');
const lastUpdatedTimeLayergroup = require('../middlewares/last-updated-time-layergroup');
const layerStats = require('../middlewares/layer-stats');
const layergroupIdHeader = require('../middlewares/layergroup-id-header');
const layergroupMetadata = require('../middlewares/layergroup-metadata');
const mapError = require('../middlewares/map-error');
const NamedMapMapConfigProvider = require('../../models/mapconfig/provider/named-map-provider');
const CreateLayergroupMapConfigProvider = require('../../models/mapconfig/provider/create-layergroup-provider');
const rateLimit = require('../middlewares/rate-limit');
const { RATE_LIMIT_ENDPOINTS_GROUPS } = rateLimit;
const metrics = require('../middlewares/metrics');
module.exports = class NamedMapController {
/**
* @param {PgConnection} pgConnection
* @param {TemplateMaps} templateMaps
* @param {MapBackend} mapBackend
* @param metadataBackend
* @param {SurrogateKeysCache} surrogateKeysCache
* @param {UserLimitsBackend} userLimitsBackend
* @param {LayergroupAffectedTables} layergroupAffectedTables
* @param {MapConfigAdapter} mapConfigAdapter
* @param {StatsBackend} statsBackend
* @param {AuthBackend} authBackend
* @param layergroupMetadata
* @constructor
*/
constructor (
config,
pgConnection,
templateMaps,
mapBackend,
metadataBackend,
surrogateKeysCache,
userLimitsBackend,
layergroupAffectedTables,
mapConfigAdapter,
statsBackend,
authBackend,
layergroupMetadata,
metricsBackend
) {
this.config = config;
this.pgConnection = pgConnection;
this.templateMaps = templateMaps;
this.mapBackend = mapBackend;
this.metadataBackend = metadataBackend;
this.surrogateKeysCache = surrogateKeysCache;
this.userLimitsBackend = userLimitsBackend;
this.layergroupAffectedTables = layergroupAffectedTables;
this.mapConfigAdapter = mapConfigAdapter;
this.statsBackend = statsBackend;
this.authBackend = authBackend;
this.layergroupMetadata = layergroupMetadata;
this.metricsBackend = metricsBackend;
}
route (templateRouter) {
templateRouter.get('/:template_id/jsonp', this.middlewares());
templateRouter.post('/:template_id', this.middlewares());
}
middlewares () {
const useTemplateHash = true;
const includeQuery = false;
const label = 'NAMED MAP LAYERGROUP';
const addContext = false;
const metricsTags = {
event: 'map_view',
attributes: { map_type: 'named' },
from: {
req: {
query: { client: 'client' }
}
}
};
return [
tag({ tags: ['map', 'named'] }),
metrics({
enabled: this.config.pubSubMetrics.enabled,
metricsBackend: this.metricsBackend,
tags: metricsTags
}),
credentials(),
authorize(this.authBackend),
dbConnSetup(this.pgConnection),
rateLimit(this.userLimitsBackend, RATE_LIMIT_ENDPOINTS_GROUPS.NAMED),
cleanUpQueryParams(['aggregation']),
checkJsonContentType(),
checkInstantiteLayergroup(),
getTemplate(
this.templateMaps,
this.pgConnection,
this.metadataBackend,
this.userLimitsBackend,
this.mapConfigAdapter,
this.layergroupAffectedTables
),
instantiateLayergroup(
this.mapBackend,
this.userLimitsBackend,
this.pgConnection,
this.layergroupAffectedTables
),
incrementMapViewCount(this.metadataBackend),
augmentLayergroupData(),
lastUpdatedTimeLayergroup(),
cacheControlHeader({ ttl: global.environment.varnish.layergroupTtl, revalidate: true, overwriteWithTTLAlways: true }),
cacheChannelHeader(),
surrogateKeyHeader({ surrogateKeysCache: this.surrogateKeysCache }),
lastModifiedHeader(),
layerStats(this.pgConnection, this.statsBackend),
layergroupIdHeader(this.templateMaps, useTemplateHash),
layergroupMetadata(this.layergroupMetadata, includeQuery),
mapError({ label, addContext })
];
}
};
function checkInstantiteLayergroup () {
return function checkInstantiteLayergroupMiddleware (req, res, next) {
if (req.method === 'GET') {
const { callback, config } = req.query;
if (callback === undefined || callback.length === 0) {
return next(new Error('callback parameter should be present and be a function name'));
}
if (config) {
try {
req.body = JSON.parse(config);
} catch (e) {
return next(new Error('Invalid config parameter, should be a valid JSON'));
}
}
}
return next();
};
}
function getTemplate (
templateMaps,
pgConnection,
metadataBackend,
userLimitsBackend,
mapConfigAdapter,
affectedTablesCache
) {
return function getTemplateMiddleware (req, res, next) {
const templateParams = req.body;
const { user, dbuser, dbname, dbpassword, dbhost, dbport } = res.locals;
const { template_id: templateId } = req.params;
const { auth_token: authToken } = req.query;
const params = Object.assign({ dbuser, dbname, dbpassword, dbhost, dbport }, req.query);
const mapConfigProvider = new NamedMapMapConfigProvider(
templateMaps,
pgConnection,
metadataBackend,
userLimitsBackend,
mapConfigAdapter,
affectedTablesCache,
user,
templateId,
templateParams,
authToken,
params
);
mapConfigProvider.logger = res.locals.logger;
mapConfigProvider.getMapConfig((err, mapConfig, rendererParams, context, stats = {}) => {
req.profiler.add(stats);
if (err) {
return next(err);
}
res.locals.mapConfig = mapConfig;
res.locals.rendererParams = rendererParams;
res.locals.mapConfigProvider = mapConfigProvider;
next();
});
};
}
function instantiateLayergroup (mapBackend, userLimitsBackend, pgConnection, affectedTablesCache) {
return function instantiateLayergroupMiddleware (req, res, next) {
const { user, mapConfig, rendererParams } = res.locals;
const mapConfigProvider = new CreateLayergroupMapConfigProvider(
mapConfig,
user,
userLimitsBackend,
pgConnection,
affectedTablesCache,
rendererParams
);
mapBackend.createLayergroup(mapConfig, rendererParams, mapConfigProvider, (err, layergroup, stats = {}) => {
req.profiler.add(stats);
if (err) {
return next(err);
}
res.statusCode = 200;
res.body = layergroup;
const { mapConfigProvider } = res.locals;
res.locals.analysesResults = mapConfigProvider.analysesResults;
res.locals.template = mapConfigProvider.template;
res.locals.context = mapConfigProvider.context;
next();
});
};
}
| {
"content_hash": "b33ec0c26cc6f2c011c7b4afc97f22e6",
"timestamp": "",
"source": "github",
"line_count": 235,
"max_line_length": 130,
"avg_line_length": 36.36595744680851,
"alnum_prop": 0.6185349871284812,
"repo_name": "CartoDB/Windshaft-cartodb",
"id": "2dc0c09d642d33633f8033087653bd83bef45941",
"size": "8546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/api/template/named-template-controller.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "7484"
},
{
"name": "HTML",
"bytes": "639"
},
{
"name": "JavaScript",
"bytes": "1966891"
},
{
"name": "PLpgSQL",
"bytes": "50586"
},
{
"name": "Python",
"bytes": "1051"
},
{
"name": "Shell",
"bytes": "1673"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_20) on Sat Jul 16 17:38:36 CEST 2011 -->
<TITLE>
SecurityRoleTypeImplTestCase
</TITLE>
<META NAME="date" CONTENT="2011-07-16">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="SecurityRoleTypeImplTestCase";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/SecurityRoleTypeImplTestCase.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImpl.html" title="class in org.jboss.shrinkwrap.descriptor.impl.jbosscommon51"><B>PREV CLASS</B></A>
<A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/ServiceRefTypeImpl.html" title="class in org.jboss.shrinkwrap.descriptor.impl.jbosscommon51"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html" target="_top"><B>FRAMES</B></A>
<A HREF="SecurityRoleTypeImplTestCase.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
org.jboss.shrinkwrap.descriptor.impl.jbosscommon51</FONT>
<BR>
Class SecurityRoleTypeImplTestCase</H2>
<PRE>
java.lang.Object
<IMG SRC="../../../../../../resources/inherit.gif" ALT="extended by "><B>org.jboss.shrinkwrap.descriptor.impl.jbosscommon51.SecurityRoleTypeImplTestCase</B>
</PRE>
<HR>
<DL>
<DT><PRE>public class <B>SecurityRoleTypeImplTestCase</B><DT>extends java.lang.Object</DL>
</PRE>
<P>
<HR>
<P>
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<A NAME="constructor_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html#SecurityRoleTypeImplTestCase()">SecurityRoleTypeImplTestCase</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html#testDescription()">testDescription</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html#testPrincipalName()">testPrincipalName</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html#testRoleName()">testRoleName</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.lang.Object"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<A NAME="constructor_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="SecurityRoleTypeImplTestCase()"><!-- --></A><H3>
SecurityRoleTypeImplTestCase</H3>
<PRE>
public <B>SecurityRoleTypeImplTestCase</B>()</PRE>
<DL>
</DL>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="testDescription()"><!-- --></A><H3>
testDescription</H3>
<PRE>
public void <B>testDescription</B>()
throws java.lang.Exception</PRE>
<DL>
<DD><DL>
<DT><B>Throws:</B>
<DD><CODE>java.lang.Exception</CODE></DL>
</DD>
</DL>
<HR>
<A NAME="testRoleName()"><!-- --></A><H3>
testRoleName</H3>
<PRE>
public void <B>testRoleName</B>()
throws java.lang.Exception</PRE>
<DL>
<DD><DL>
<DT><B>Throws:</B>
<DD><CODE>java.lang.Exception</CODE></DL>
</DD>
</DL>
<HR>
<A NAME="testPrincipalName()"><!-- --></A><H3>
testPrincipalName</H3>
<PRE>
public void <B>testPrincipalName</B>()
throws java.lang.Exception</PRE>
<DL>
<DD><DL>
<DT><B>Throws:</B>
<DD><CODE>java.lang.Exception</CODE></DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/SecurityRoleTypeImplTestCase.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImpl.html" title="class in org.jboss.shrinkwrap.descriptor.impl.jbosscommon51"><B>PREV CLASS</B></A>
<A HREF="../../../../../../org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/ServiceRefTypeImpl.html" title="class in org.jboss.shrinkwrap.descriptor.impl.jbosscommon51"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html" target="_top"><B>FRAMES</B></A>
<A HREF="SecurityRoleTypeImplTestCase.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | <A HREF="#constructor_detail">CONSTR</A> | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"content_hash": "fc8ff0d325eca0899811dba04a6ba67e",
"timestamp": "",
"source": "github",
"line_count": 299,
"max_line_length": 210,
"avg_line_length": 40.080267558528426,
"alnum_prop": 0.6330106809078772,
"repo_name": "shrinkwrap/descriptors",
"id": "05ebe38b007d163da9a584c960c07dc10b1aa0f4",
"size": "11984",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gen/doc/org/jboss/shrinkwrap/descriptor/impl/jbosscommon51/SecurityRoleTypeImplTestCase.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1391"
},
{
"name": "HTML",
"bytes": "43640737"
},
{
"name": "Java",
"bytes": "986076"
},
{
"name": "XSLT",
"bytes": "1164486"
}
],
"symlink_target": ""
} |
<?php //-->
namespace Eden\Facebook;
use Eden\Curl\Base as Curl;
use Eden\Facebook\Graph\Base as GraphBase;
/**
* Facebook Graph API
*
* @vendor Eden
* @package Facebook
* @author Ian Mark Muninio <ianmuninio@openovate.com>
*/
class Graph extends Base
{
const INSTANCE = 0; // sets to multiton
const GRAPH_URL = 'https://graph.facebook.com/';
protected $token = null;
/**
* Preloads the token.
*
* @param string $token
* @return void
*/
public function __construct($token)
{
Argument::i()->test(1, 'string');
$this->token = $token;
}
/**
* Returns the facebook object.
*
* @param string $name name of the facebook object
* @param scalar $args the constructor arguments
* @return \Eden\Facebook\Graph\Base
*/
public function __call($name, $args)
{
Argument::i()->test(1, 'string');
return GraphBase::i($this->token)
->__call($name, $args);
}
/**
* Deletes an object based on id.
*
* @param string $id id of the object
* @param string|null $connection [optional] the connection
* @return array
*/
public function delete($id, $connection = null)
{
Argument::i()
->test(1, 'string')
->test(2, 'string', 'null');
$url = self::GRAPH_URL . '/' . $id;
if ($connection) {
$url .= '/' . $connection;
}
$url .= '?access_token=' . $this->token;
return $this->getResponse($url, array(), Curl::DELETE);
}
/**
* Returns specific fields of an object.
*
* @param string|int $id [optional]
* @param string|array $fields
* @return array
*/
public function getFields($id = 'me', $fields = array())
{
Argument::i()
->test(1, 'string', 'int')
->test(2, 'string', 'array');
// if fields is an array
if (is_array($fields)) {
//make it into a string
$fields = implode(',', $fields);
}
// call it
return $this->getObject($id, null, array('fields' => $fields));
}
/**
* Returns the detail of any object.
*
* @param string|int $id [optional] (defaul: me) id of the object
* @param string|null $connection [optional] the page name
* @param array $query [optional] the query
* @param bool $auth [optional] (default: true) required auth
* @return array json object
*/
public function getObject($id = 'me', $connection = null, array $query = array(), $auth = true)
{
Argument::i()
->test(1, 'string', 'int')
->test(2, 'string', 'null')
->test(3, 'array')
->test(4, 'bool');
// if we have a connection
if ($connection) {
//prepend a slash
$connection = '/' . $connection;
}
// for the url
$url = self::GRAPH_URL . $id . $connection;
// if this requires authentication
if ($auth) {
// add the token
$query['access_token'] = $this->token;
}
// if we have a query
if (!empty($query)) {
//append it to the url
$url .= '?' . http_build_query($query);
}
// call it
$object = $this->getResponse($url, array());
return $object;
}
/**
* Search over all public objects in the social graph
*
* @param string
* @param string
* @param boolean
* @return array
**/
public function search($query, $type, $auth = true)
{
Argument::i()
->test(1, 'string')
->test(2, 'string')
->test(3, 'bool');
// fix query
$query = array(
'q' => $query,
'type' => $type);
// fix url, append the search word
$url = self::GRAPH_URL.'/search';
// if this requires authentication
if ($auth) {
// add the token
$query['access_token'] = $this->token;
}
// if we have a query
if (!empty($query)) {
//append it to the url
$url .= '?' . http_build_query($query);
}
// call it
$object = $this->getResponse($url, array());
return $object;
}
/**
* Get response using curl.
*
* @param string $url graph url
* @param array $post post fields
* @param string $request the request method
* @return array
*/
protected function getResponse($url, array $post = array(), $request = Curl::GET)
{
Argument::i()
->test(1, 'string')
->test(2, 'array')
->test(3, 'string');
//send it off
$curl = Curl::i()
->setUrl($url)
->setConnectTimeout(10)
->setFollowLocation(true)
->setTimeout(60)
->verifyPeer(false)
->setUserAgent(Auth::USER_AGENT)
->setHeaders('Expect');
switch ($request) {
case Curl::PUT:
$curl->setCustomPut();
break;
case Curl::GET:
$curl->setCustomGet();
break;
case Curl::DELETE:
$curl->setCustomDelete();
break;
case Curl::POST:
$curl->setPost(true)
->setPostFields(http_build_query($post));
break;
default:
}
$response = $curl->getJsonResponse();
return $response;
}
}
| {
"content_hash": "e548bc602736e6325fb10a97bb91da02",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 99,
"avg_line_length": 25.51528384279476,
"alnum_prop": 0.47064863939756973,
"repo_name": "Eden-PHP/Facebook",
"id": "a03c37f2844db35654f6c0ff1ddfee3ab9e5a21e",
"size": "6028",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Graph.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "116128"
}
],
"symlink_target": ""
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2009 Gael Guennebaud <gael.guennebaud@inria.fr>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_TRIANGULARMATRIXVECTOR_H
#define EIGEN_TRIANGULARMATRIXVECTOR_H
namespace Eigen {
namespace internal {
template<typename Index, int Mode, typename LhsScalar, bool ConjLhs, typename RhsScalar, bool ConjRhs, int StorageOrder, int Version=Specialized>
struct triangular_matrix_vector_product;
template<typename Index, int Mode, typename LhsScalar, bool ConjLhs, typename RhsScalar, bool ConjRhs, int Version>
struct triangular_matrix_vector_product<Index,Mode,LhsScalar,ConjLhs,RhsScalar,ConjRhs,ColMajor,Version>
{
typedef typename scalar_product_traits<LhsScalar, RhsScalar>::ReturnType ResScalar;
enum {
IsLower = ((Mode&Lower)==Lower),
HasUnitDiag = (Mode & UnitDiag)==UnitDiag,
HasZeroDiag = (Mode & ZeroDiag)==ZeroDiag
};
static EIGEN_DONT_INLINE void run(Index _rows, Index _cols, const LhsScalar* _lhs, Index lhsStride,
const RhsScalar* _rhs, Index rhsIncr, ResScalar* _res, Index resIncr, const ResScalar& alpha);
};
template<typename Index, int Mode, typename LhsScalar, bool ConjLhs, typename RhsScalar, bool ConjRhs, int Version>
EIGEN_DONT_INLINE void triangular_matrix_vector_product<Index,Mode,LhsScalar,ConjLhs,RhsScalar,ConjRhs,ColMajor,Version>
::run(Index _rows, Index _cols, const LhsScalar* _lhs, Index lhsStride,
const RhsScalar* _rhs, Index rhsIncr, ResScalar* _res, Index resIncr, const ResScalar& alpha)
{
static const Index PanelWidth = EIGEN_TUNE_TRIANGULAR_PANEL_WIDTH;
Index size = (std::min)(_rows,_cols);
Index rows = IsLower ? _rows : (std::min)(_rows,_cols);
Index cols = IsLower ? (std::min)(_rows,_cols) : _cols;
typedef Map<const Matrix<LhsScalar,Dynamic,Dynamic,ColMajor>, 0, OuterStride<> > LhsMap;
const LhsMap lhs(_lhs,rows,cols,OuterStride<>(lhsStride));
typename conj_expr_if<ConjLhs,LhsMap>::type cjLhs(lhs);
typedef Map<const Matrix<RhsScalar,Dynamic,1>, 0, InnerStride<> > RhsMap;
const RhsMap rhs(_rhs,cols,InnerStride<>(rhsIncr));
typename conj_expr_if<ConjRhs,RhsMap>::type cjRhs(rhs);
typedef Map<Matrix<ResScalar,Dynamic,1> > ResMap;
ResMap res(_res,rows);
typedef const_blas_data_mapper<LhsScalar,Index,ColMajor> LhsMapper;
typedef const_blas_data_mapper<RhsScalar,Index,RowMajor> RhsMapper;
for (Index pi=0; pi<size; pi+=PanelWidth)
{
Index actualPanelWidth = (std::min)(PanelWidth, size-pi);
for (Index k=0; k<actualPanelWidth; ++k)
{
Index i = pi + k;
Index s = IsLower ? ((HasUnitDiag||HasZeroDiag) ? i+1 : i ) : pi;
Index r = IsLower ? actualPanelWidth-k : k+1;
if ((!(HasUnitDiag||HasZeroDiag)) || (--r)>0)
res.segment(s,r) += (alpha * cjRhs.coeff(i)) * cjLhs.col(i).segment(s,r);
if (HasUnitDiag)
res.coeffRef(i) += alpha * cjRhs.coeff(i);
}
Index r = IsLower ? rows - pi - actualPanelWidth : pi;
if (r>0)
{
Index s = IsLower ? pi+actualPanelWidth : 0;
general_matrix_vector_product<Index,LhsScalar,LhsMapper,ColMajor,ConjLhs,RhsScalar,RhsMapper,ConjRhs,BuiltIn>::run(
r, actualPanelWidth,
LhsMapper(&lhs.coeffRef(s,pi), lhsStride),
RhsMapper(&rhs.coeffRef(pi), rhsIncr),
&res.coeffRef(s), resIncr, alpha);
}
}
if((!IsLower) && cols>size)
{
general_matrix_vector_product<Index,LhsScalar,LhsMapper,ColMajor,ConjLhs,RhsScalar,RhsMapper,ConjRhs>::run(
rows, cols-size,
LhsMapper(&lhs.coeffRef(0,size), lhsStride),
RhsMapper(&rhs.coeffRef(size), rhsIncr),
_res, resIncr, alpha);
}
}
template<typename Index, int Mode, typename LhsScalar, bool ConjLhs, typename RhsScalar, bool ConjRhs,int Version>
struct triangular_matrix_vector_product<Index,Mode,LhsScalar,ConjLhs,RhsScalar,ConjRhs,RowMajor,Version>
{
typedef typename scalar_product_traits<LhsScalar, RhsScalar>::ReturnType ResScalar;
enum {
IsLower = ((Mode&Lower)==Lower),
HasUnitDiag = (Mode & UnitDiag)==UnitDiag,
HasZeroDiag = (Mode & ZeroDiag)==ZeroDiag
};
static EIGEN_DONT_INLINE void run(Index _rows, Index _cols, const LhsScalar* _lhs, Index lhsStride,
const RhsScalar* _rhs, Index rhsIncr, ResScalar* _res, Index resIncr, const ResScalar& alpha);
};
template<typename Index, int Mode, typename LhsScalar, bool ConjLhs, typename RhsScalar, bool ConjRhs,int Version>
EIGEN_DONT_INLINE void triangular_matrix_vector_product<Index,Mode,LhsScalar,ConjLhs,RhsScalar,ConjRhs,RowMajor,Version>
::run(Index _rows, Index _cols, const LhsScalar* _lhs, Index lhsStride,
const RhsScalar* _rhs, Index rhsIncr, ResScalar* _res, Index resIncr, const ResScalar& alpha)
{
static const Index PanelWidth = EIGEN_TUNE_TRIANGULAR_PANEL_WIDTH;
Index diagSize = (std::min)(_rows,_cols);
Index rows = IsLower ? _rows : diagSize;
Index cols = IsLower ? diagSize : _cols;
typedef Map<const Matrix<LhsScalar,Dynamic,Dynamic,RowMajor>, 0, OuterStride<> > LhsMap;
const LhsMap lhs(_lhs,rows,cols,OuterStride<>(lhsStride));
typename conj_expr_if<ConjLhs,LhsMap>::type cjLhs(lhs);
typedef Map<const Matrix<RhsScalar,Dynamic,1> > RhsMap;
const RhsMap rhs(_rhs,cols);
typename conj_expr_if<ConjRhs,RhsMap>::type cjRhs(rhs);
typedef Map<Matrix<ResScalar,Dynamic,1>, 0, InnerStride<> > ResMap;
ResMap res(_res,rows,InnerStride<>(resIncr));
typedef const_blas_data_mapper<LhsScalar,Index,RowMajor> LhsMapper;
typedef const_blas_data_mapper<RhsScalar,Index,RowMajor> RhsMapper;
for (Index pi=0; pi<diagSize; pi+=PanelWidth)
{
Index actualPanelWidth = (std::min)(PanelWidth, diagSize-pi);
for (Index k=0; k<actualPanelWidth; ++k)
{
Index i = pi + k;
Index s = IsLower ? pi : ((HasUnitDiag||HasZeroDiag) ? i+1 : i);
Index r = IsLower ? k+1 : actualPanelWidth-k;
if ((!(HasUnitDiag||HasZeroDiag)) || (--r)>0)
res.coeffRef(i) += alpha * (cjLhs.row(i).segment(s,r).cwiseProduct(cjRhs.segment(s,r).transpose())).sum();
if (HasUnitDiag)
res.coeffRef(i) += alpha * cjRhs.coeff(i);
}
Index r = IsLower ? pi : cols - pi - actualPanelWidth;
if (r>0)
{
Index s = IsLower ? 0 : pi + actualPanelWidth;
general_matrix_vector_product<Index,LhsScalar,LhsMapper,RowMajor,ConjLhs,RhsScalar,RhsMapper,ConjRhs,BuiltIn>::run(
actualPanelWidth, r,
LhsMapper(&lhs.coeffRef(pi,s), lhsStride),
RhsMapper(&rhs.coeffRef(s), rhsIncr),
&res.coeffRef(pi), resIncr, alpha);
}
}
if(IsLower && rows>diagSize)
{
general_matrix_vector_product<Index,LhsScalar,LhsMapper,RowMajor,ConjLhs,RhsScalar,RhsMapper,ConjRhs>::run(
rows-diagSize, cols,
LhsMapper(&lhs.coeffRef(diagSize,0), lhsStride),
RhsMapper(&rhs.coeffRef(0), rhsIncr),
&res.coeffRef(diagSize), resIncr, alpha);
}
}
/***************************************************************************
* Wrapper to product_triangular_vector
***************************************************************************/
template<int Mode, bool LhsIsTriangular, typename Lhs, typename Rhs>
struct traits<TriangularProduct<Mode,LhsIsTriangular,Lhs,false,Rhs,true> >
: traits<ProductBase<TriangularProduct<Mode,LhsIsTriangular,Lhs,false,Rhs,true>, Lhs, Rhs> >
{};
template<int Mode, bool LhsIsTriangular, typename Lhs, typename Rhs>
struct traits<TriangularProduct<Mode,LhsIsTriangular,Lhs,true,Rhs,false> >
: traits<ProductBase<TriangularProduct<Mode,LhsIsTriangular,Lhs,true,Rhs,false>, Lhs, Rhs> >
{};
template<int StorageOrder>
struct trmv_selector;
} // end namespace internal
template<int Mode, typename Lhs, typename Rhs>
struct TriangularProduct<Mode,true,Lhs,false,Rhs,true>
: public ProductBase<TriangularProduct<Mode,true,Lhs,false,Rhs,true>, Lhs, Rhs >
{
EIGEN_PRODUCT_PUBLIC_INTERFACE(TriangularProduct)
TriangularProduct(const Lhs& lhs, const Rhs& rhs) : Base(lhs,rhs) {}
template<typename Dest> void scaleAndAddTo(Dest& dst, const Scalar& alpha) const
{
eigen_assert(dst.rows()==m_lhs.rows() && dst.cols()==m_rhs.cols());
internal::trmv_selector<(int(internal::traits<Lhs>::Flags)&RowMajorBit) ? RowMajor : ColMajor>::run(*this, dst, alpha);
}
};
template<int Mode, typename Lhs, typename Rhs>
struct TriangularProduct<Mode,false,Lhs,true,Rhs,false>
: public ProductBase<TriangularProduct<Mode,false,Lhs,true,Rhs,false>, Lhs, Rhs >
{
EIGEN_PRODUCT_PUBLIC_INTERFACE(TriangularProduct)
TriangularProduct(const Lhs& lhs, const Rhs& rhs) : Base(lhs,rhs) {}
template<typename Dest> void scaleAndAddTo(Dest& dst, const Scalar& alpha) const
{
eigen_assert(dst.rows()==m_lhs.rows() && dst.cols()==m_rhs.cols());
typedef TriangularProduct<(Mode & (UnitDiag|ZeroDiag)) | ((Mode & Lower) ? Upper : Lower),true,Transpose<const Rhs>,false,Transpose<const Lhs>,true> TriangularProductTranspose;
Transpose<Dest> dstT(dst);
internal::trmv_selector<(int(internal::traits<Rhs>::Flags)&RowMajorBit) ? ColMajor : RowMajor>::run(
TriangularProductTranspose(m_rhs.transpose(),m_lhs.transpose()), dstT, alpha);
}
};
namespace internal {
// TODO: find a way to factorize this piece of code with gemv_selector since the logic is exactly the same.
template<> struct trmv_selector<ColMajor>
{
template<int Mode, typename Lhs, typename Rhs, typename Dest>
static void run(const TriangularProduct<Mode,true,Lhs,false,Rhs,true>& prod, Dest& dest, const typename TriangularProduct<Mode,true,Lhs,false,Rhs,true>::Scalar& alpha)
{
typedef TriangularProduct<Mode,true,Lhs,false,Rhs,true> ProductType;
typedef typename ProductType::Index Index;
typedef typename ProductType::LhsScalar LhsScalar;
typedef typename ProductType::RhsScalar RhsScalar;
typedef typename ProductType::Scalar ResScalar;
typedef typename ProductType::RealScalar RealScalar;
typedef typename ProductType::ActualLhsType ActualLhsType;
typedef typename ProductType::ActualRhsType ActualRhsType;
typedef typename ProductType::LhsBlasTraits LhsBlasTraits;
typedef typename ProductType::RhsBlasTraits RhsBlasTraits;
typedef Map<Matrix<ResScalar,Dynamic,1>, Aligned> MappedDest;
typename internal::add_const_on_value_type<ActualLhsType>::type actualLhs = LhsBlasTraits::extract(prod.lhs());
typename internal::add_const_on_value_type<ActualRhsType>::type actualRhs = RhsBlasTraits::extract(prod.rhs());
ResScalar actualAlpha = alpha * LhsBlasTraits::extractScalarFactor(prod.lhs())
* RhsBlasTraits::extractScalarFactor(prod.rhs());
enum {
// FIXME find a way to allow an inner stride on the result if packet_traits<Scalar>::size==1
// on, the other hand it is good for the cache to pack the vector anyways...
EvalToDestAtCompileTime = Dest::InnerStrideAtCompileTime==1,
ComplexByReal = (NumTraits<LhsScalar>::IsComplex) && (!NumTraits<RhsScalar>::IsComplex),
MightCannotUseDest = (Dest::InnerStrideAtCompileTime!=1) || ComplexByReal
};
gemv_static_vector_if<ResScalar,Dest::SizeAtCompileTime,Dest::MaxSizeAtCompileTime,MightCannotUseDest> static_dest;
bool alphaIsCompatible = (!ComplexByReal) || (numext::imag(actualAlpha)==RealScalar(0));
bool evalToDest = EvalToDestAtCompileTime && alphaIsCompatible;
RhsScalar compatibleAlpha = get_factor<ResScalar,RhsScalar>::run(actualAlpha);
ei_declare_aligned_stack_constructed_variable(ResScalar,actualDestPtr,dest.size(),
evalToDest ? dest.data() : static_dest.data());
if(!evalToDest)
{
#ifdef EIGEN_DENSE_STORAGE_CTOR_PLUGIN
Index size = dest.size();
EIGEN_DENSE_STORAGE_CTOR_PLUGIN
#endif
if(!alphaIsCompatible)
{
MappedDest(actualDestPtr, dest.size()).setZero();
compatibleAlpha = RhsScalar(1);
}
else
MappedDest(actualDestPtr, dest.size()) = dest;
}
internal::triangular_matrix_vector_product
<Index,Mode,
LhsScalar, LhsBlasTraits::NeedToConjugate,
RhsScalar, RhsBlasTraits::NeedToConjugate,
ColMajor>
::run(actualLhs.rows(),actualLhs.cols(),
actualLhs.data(),actualLhs.outerStride(),
actualRhs.data(),actualRhs.innerStride(),
actualDestPtr,1,compatibleAlpha);
if (!evalToDest)
{
if(!alphaIsCompatible)
dest += actualAlpha * MappedDest(actualDestPtr, dest.size());
else
dest = MappedDest(actualDestPtr, dest.size());
}
}
};
template<> struct trmv_selector<RowMajor>
{
template<int Mode, typename Lhs, typename Rhs, typename Dest>
static void run(const TriangularProduct<Mode,true,Lhs,false,Rhs,true>& prod, Dest& dest, const typename TriangularProduct<Mode,true,Lhs,false,Rhs,true>::Scalar& alpha)
{
typedef TriangularProduct<Mode,true,Lhs,false,Rhs,true> ProductType;
typedef typename ProductType::LhsScalar LhsScalar;
typedef typename ProductType::RhsScalar RhsScalar;
typedef typename ProductType::Scalar ResScalar;
typedef typename ProductType::Index Index;
typedef typename ProductType::ActualLhsType ActualLhsType;
typedef typename ProductType::ActualRhsType ActualRhsType;
typedef typename ProductType::_ActualRhsType _ActualRhsType;
typedef typename ProductType::LhsBlasTraits LhsBlasTraits;
typedef typename ProductType::RhsBlasTraits RhsBlasTraits;
typename add_const<ActualLhsType>::type actualLhs = LhsBlasTraits::extract(prod.lhs());
typename add_const<ActualRhsType>::type actualRhs = RhsBlasTraits::extract(prod.rhs());
ResScalar actualAlpha = alpha * LhsBlasTraits::extractScalarFactor(prod.lhs())
* RhsBlasTraits::extractScalarFactor(prod.rhs());
enum {
DirectlyUseRhs = _ActualRhsType::InnerStrideAtCompileTime==1
};
gemv_static_vector_if<RhsScalar,_ActualRhsType::SizeAtCompileTime,_ActualRhsType::MaxSizeAtCompileTime,!DirectlyUseRhs> static_rhs;
ei_declare_aligned_stack_constructed_variable(RhsScalar,actualRhsPtr,actualRhs.size(),
DirectlyUseRhs ? const_cast<RhsScalar*>(actualRhs.data()) : static_rhs.data());
if(!DirectlyUseRhs)
{
#ifdef EIGEN_DENSE_STORAGE_CTOR_PLUGIN
int size = actualRhs.size();
EIGEN_DENSE_STORAGE_CTOR_PLUGIN
#endif
Map<typename _ActualRhsType::PlainObject>(actualRhsPtr, actualRhs.size()) = actualRhs;
}
internal::triangular_matrix_vector_product
<Index,Mode,
LhsScalar, LhsBlasTraits::NeedToConjugate,
RhsScalar, RhsBlasTraits::NeedToConjugate,
RowMajor>
::run(actualLhs.rows(),actualLhs.cols(),
actualLhs.data(),actualLhs.outerStride(),
actualRhsPtr,1,
dest.data(),dest.innerStride(),
actualAlpha);
}
};
} // end namespace internal
} // end namespace Eigen
#endif // EIGEN_TRIANGULARMATRIXVECTOR_H
| {
"content_hash": "9a54fe7644938506d0cb65cbc0a6046a",
"timestamp": "",
"source": "github",
"line_count": 354,
"max_line_length": 180,
"avg_line_length": 43.59604519774011,
"alnum_prop": 0.6881358128685284,
"repo_name": "liyu1990/tensorflow",
"id": "9863076958e41079f91dea1f981fe4c1afa3266d",
"size": "15433",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "third_party/eigen3/Eigen/src/Core/products/TriangularMatrixVector.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "127080"
},
{
"name": "C++",
"bytes": "4875335"
},
{
"name": "CSS",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "631255"
},
{
"name": "Java",
"bytes": "44192"
},
{
"name": "JavaScript",
"bytes": "5067"
},
{
"name": "Objective-C",
"bytes": "630"
},
{
"name": "Protocol Buffer",
"bytes": "44898"
},
{
"name": "Python",
"bytes": "2425565"
},
{
"name": "Shell",
"bytes": "1036"
},
{
"name": "TypeScript",
"bytes": "236089"
}
],
"symlink_target": ""
} |
package org.willianzhao.omnitureanalysis.mapred.commons.model;
import org.willianzhao.omnitureanalysis.mapred.commons.dm.LocationIdentifier;
import org.willianzhao.omnitureanalysis.mapred.commons.dm.PageIdentifier;
import org.willianzhao.omnitureanalysis.mapred.commons.misc.ProjectConstant;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
/**
* Created by weilzhao on 8/25/14.
*/
public class TransactionStudyFactory extends MRBaseFactory {
public TransactionStudyFactory(Configuration conf, Path inputSplitPath) throws Exception {
super(conf, inputSplitPath);
}
public TransactionRecord readFromStubTrans(String[] rawFields) {
TransactionRecord tRecord = new TransactionRecord();
if (rawFields.length == 10) {
//Use buyerID not the guid as the user ID in transaction record
tRecord.setUserID(rawFields[8]);
tRecord.setPurchaseTime(rawFields[1]);
tRecord.setTransactionID(rawFields[2]);
tRecord.setTicketID(rawFields[3]);
tRecord.setPurchaseEventID(rawFields[4]);
tRecord.setPurchaseGenreID(rawFields[5]);
tRecord.setDataSource(ProjectConstant.SOURCE_STUB_TRANS);
}
return tRecord;
}
public TransactionRecord readFromOmniture(String[] rawFields) {
String transactionID;
String ip;
String geoCity;
String geoRegion;
String geoCountry;
String geoZipcode;
//Construct the identifier instances
TransactionRecord tRecord = new TransactionRecord();
PageIdentifier pageIden = new PageIdentifier(conf, rawFields);
LocationIdentifier locIden = new LocationIdentifier(conf, rawFields);
transactionID = pageIden.getTransactionID();
ip = locIden.getIPAddress();
geoCity = locIden.getGeoCity();
geoRegion = locIden.getGeoRegion();
geoCountry = locIden.getGeoCountry();
geoZipcode = locIden.getGeoZipcode();
tRecord.setTransactionID(transactionID);
tRecord.setIp(ip);
tRecord.setGeoCity(geoCity);
tRecord.setGeoRegion(geoRegion);
tRecord.setGeoCountry(geoCountry);
tRecord.setGeoZipcode(geoZipcode);
tRecord.setDataSource(ProjectConstant.SOURCE_OMNITURE);
return tRecord;
}
}
| {
"content_hash": "222ba0936746f7a1c934cc8858a21b74",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 94,
"avg_line_length": 40.88135593220339,
"alnum_prop": 0.6811774461028193,
"repo_name": "willianzhao/omnitureanalysis",
"id": "981173eebda2f8521a54faa92694299f040ef2bc",
"size": "2412",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/public/src/main/java/org/willianzhao/omnitureanalysis/mapred/commons/model/TransactionStudyFactory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "421634"
}
],
"symlink_target": ""
} |
package com.clockwork.asset;
import com.clockwork.material.Material;
import com.clockwork.shader.Shader;
/**
* AssetProcessor is used to apply processing to assets
* after they have been loaded. They are assigned to a particular
* asset type (which is represented by a Class} and any assets
* loaded that are of that class will be processed by the assigned
* processor.
*
*
*/
public interface AssetProcessor {
/**
* Applies post processing to an asset.
* The method may return an object that is not the same
* instance as the parameter object, and it could be from a different class.
*
* @param obj The asset that was loaded from an AssetLoader}.
* @return Either the same object with processing applied, or an instance
* of a new object.
*/
public Object postProcess(AssetKey key, Object obj);
/**
* Creates a clone of the given asset.
* If no clone is desired, then the same instance can be returned,
* otherwise, a clone should be created.
* For example, a clone of a Material} should have its own set
* of unique parameters that can be changed just for that instance,
* but it may share certain other data if it sees fit (like the Shader}).
*
* @param obj The asset to clone
* @return The cloned asset, or the same as the given argument if no
* clone is needed.
*/
public Object createClone(Object obj);
}
| {
"content_hash": "55941b25307a958186f9c622a60b7dfe",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 80,
"avg_line_length": 35.170731707317074,
"alnum_prop": 0.6865464632454924,
"repo_name": "PlanetWaves/clockworkengine",
"id": "8ed5187116cd2a0934c88a683aad505d1bfbe442",
"size": "1442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "branches/3.0/engine/src/core/com/clockwork/asset/AssetProcessor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "43610"
},
{
"name": "C++",
"bytes": "613083"
},
{
"name": "GLSL",
"bytes": "564936"
},
{
"name": "HTML",
"bytes": "73891"
},
{
"name": "Java",
"bytes": "19438695"
},
{
"name": "Makefile",
"bytes": "26492"
},
{
"name": "Shell",
"bytes": "1085"
},
{
"name": "XSLT",
"bytes": "10130"
}
],
"symlink_target": ""
} |
<?php
/*
* (c) Kévin Dunglas <dunglas@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
*/
namespace Dunglas\ActionBundle\Tests\Fixtures\TestBundle\Action;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
/**
* @author Kévin Dunglas <dunglas@gmail.com>
*/
class RouteAnnotationAction
{
/**
* @Route("/annotation")
*/
public function __invoke()
{
return new Response('Hey, ho, let\'s go!');
}
}
| {
"content_hash": "20c05b84237cc25ba4f045660d0d782a",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 65,
"avg_line_length": 20.666666666666668,
"alnum_prop": 0.6756272401433692,
"repo_name": "yoan-durand/test-api-platform",
"id": "243d9d74add27a7b3e549ea1c9367b9d7dc1f886",
"size": "560",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "apps/platform/vendor/dunglas/action-bundle/Tests/Fixtures/TestBundle/Action/RouteAnnotationAction.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "102118"
},
{
"name": "HTML",
"bytes": "729"
},
{
"name": "JavaScript",
"bytes": "335574"
},
{
"name": "PHP",
"bytes": "68732"
},
{
"name": "Shell",
"bytes": "893"
},
{
"name": "TypeScript",
"bytes": "1933"
}
],
"symlink_target": ""
} |
<?php
$installer = $this;
/* @var $installer Mage_Core_Model_Resource_Setup */
$installer->startSetup();
$installer->getConnection()->dropKey($this->getTable('cms/page'), 'identifier');
$installer->run("ALTER TABLE `{$this->getTable('cms/page')}` ADD KEY `identifier` (`identifier`)");
$installer->getConnection()->dropColumn($this->getTable('cms/page'), 'store_id');
$installer->getConnection()->dropColumn($this->getTable('cms/block'), 'store_id');
$installer->endSetup();
| {
"content_hash": "70dc834e54a738e6a068b852b1f8c584",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 99,
"avg_line_length": 28.41176470588235,
"alnum_prop": 0.6873706004140787,
"repo_name": "tagalpha/library",
"id": "454d6232b9eca0d2bedade5e5b7be7a768e50257",
"size": "1433",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "app/code/core/Mage/Cms/sql/cms_setup/mysql4-upgrade-0.7.5-0.7.6.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "20063"
},
{
"name": "ApacheConf",
"bytes": "8117"
},
{
"name": "Batchfile",
"bytes": "1036"
},
{
"name": "CSS",
"bytes": "1805855"
},
{
"name": "HTML",
"bytes": "5531269"
},
{
"name": "JavaScript",
"bytes": "1295882"
},
{
"name": "PHP",
"bytes": "45317581"
},
{
"name": "PowerShell",
"bytes": "1028"
},
{
"name": "Ruby",
"bytes": "288"
},
{
"name": "Shell",
"bytes": "19717"
},
{
"name": "XSLT",
"bytes": "2066"
}
],
"symlink_target": ""
} |
function e = testSetReset()
% test the reset and setState methods of the pelican object
clear all;
cd('setreset');
e = 0;
% after a setState X should be what was set
e = e | simpleSetState();
% with random seed
% init should give different eX than an init + reset but the same X
e = e | loudTest('initAndResetFromRandomSeed','init and reset with random seed');
% with fix seed
% init should give same eX and X than an init + reset
e = e | loudTest('initAndResetFromFixedSeed','init and reset with fixed seed');
% with fix seed, no wind no GPS:
e = e | loudTest('initAndResetFromFixedSeedNoiseless','init and reset with fixed seed no wind no GPS');
% with random seed:
% two setState to the same state should give the same X but different eX
e = e | loudTest('setAndRunFromRandomSeed','set and run twice with random seed');
% with fix seed:
% two setSate should give the same X and same eX
e = e | loudTest('setAndRunFromFixedSeed','set and run twice with fixed seed');
% with fix seed, no wind no GPS:
% two setSate should give the same X and same eX
e = e | loudTest('setAndRunNoiselessFromFixedSeed','set and run twice with fixed seed no wind no GPS');
% with random seed:
% two reset of qrsim should give the same X but different eX
e = e | loudTest('doubleQRSimResetWithRandomSeed','double qrsim reset with random seed','TaskNoWindRandomSeed');
e = e | loudTest('doubleQRSimResetWithRandomSeed','double qrsim reset with random seed windy','TaskWindRandomSeed');
% with fix seed:
% two reset of qrsim should give the same X and same eX
e = e | loudTest('doubleQRSimResetWithFixedSeed','double qrsim reset with fixed seed','TaskNoWindFixedSeed');
e = e | loudTest('doubleQRSimResetWithFixedSeed','double qrsim reset with fixed seed windy','TaskWindFixedSeed');
% with random seed:
% two reset of qrsim should give the same X but different eX
e = e | loudTest('initAndQRSimResetWithRandomSeed','init vs reset qrsim with random seed','TaskNoWindRandomSeed');
e = e | loudTest('initAndQRSimResetWithRandomSeed','init vs reset qrsim with random seed windy','TaskWindRandomSeed');
% with fix seed:
% the init E and eX and the ones after a reset of qrsim should be the same
e = e | loudTest('initAndQRSimResetWithFixedSeed','init vs reset qrsim with fixed seed','TaskNoWindFixedSeed');
e = e | loudTest('initAndQRSimResetWithFixedSeed','init vs reset qrsim with fixed seed windy','TaskWindFixedSeed');
cd('..');
end
function e = initAndQRSimResetWithFixedSeed(tsk)
e = 0;
U = [0;0;0.59004353928;0;11];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init(tsk);
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = initAndQRSimResetWithRandomSeed(tsk)
e = 0;
U = [0;0;0.59004353928;0;11];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init(tsk);
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || all(eX1==eX2);
% clear the state
clear state;
end
function e = doubleQRSimResetWithRandomSeed(tsk)
e = 0;
U = [0;0;0.59004353928;0;11];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init(tsk);
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || all(eX1==eX2);
% clear the state
clear state;
end
function e = doubleQRSimResetWithFixedSeed(tsk)
e = 0;
U = [0;0;0.59004353928;0;11];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init(tsk);
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = initAndResetFromRandomSeed()
e = 0;
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoWindRandomSeed');
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
state.platforms{1}.setX(setX);
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || all(eX1==eX2);
% clear the state
clear state;
end
function e = initAndResetFromFixedSeed()
e = 0;
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoWindFixedSeed');
state.t=0;
state.rStreams = RandStream.create('mrg32k3a','seed',12345,'NumStreams',state.numRStreams,'CellOutput',1);
state.environment.gpsspacesegment.reset();
state.platforms{1}.setX(setX);
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
state.t=0;
state.rStreams = RandStream.create('mrg32k3a','seed',12345,'NumStreams',state.numRStreams,'CellOutput',1);
state.environment.gpsspacesegment.reset();
state.platforms{1}.setX(setX);
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = initAndResetFromFixedSeedNoiseless()
e = 0;
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoiselessGPSNoWindFixedSeed');
state.platforms{1}.setX(setX);
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
state.platforms{1}.setX(setX);
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = setAndRunFromRandomSeed()
e = 0;
U = [0;0;0.59004353928;0;11];
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoWindRandomSeed');
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
state.platforms{1}.setX(setX);
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || all(eX1==eX2);
% clear the state
clear state;
end
function e = setAndRunFromFixedSeed()
e = 0;
U = [0;0;0.59004353928;0;11];
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoWindFixedSeed');
state.t=0;
state.rStreams = RandStream.create('mrg32k3a','seed',12345,'NumStreams',state.numRStreams,'CellOutput',1);
state.environment.gpsspacesegment.reset();
state.platforms{1}.setX(setX);
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
state.t=0;
state.rStreams = RandStream.create('mrg32k3a','seed',12345,'NumStreams',state.numRStreams,'CellOutput',1);
state.environment.gpsspacesegment.reset();
state.platforms{1}.setX(setX);
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = setAndRunNoiselessFromFixedSeed()
e = 0;
U = [0;0;0.59004353928;0;11];
setX = [1;2;3;0;0;pi;0;0;0;0;0;0];
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoiselessGPSNoWindFixedSeed');
state.platforms{1}.setX(setX);
for i=1:50
qrsim.step(U);
end
X1 = state.platforms{1}.getX();
eX1 = state.platforms{1}.getEX();
qrsim.resetSeed();
qrsim.reset();
state.platforms{1}.setX(setX);
for i=1:50
qrsim.step(U);
end
X2 = state.platforms{1}.getX();
eX2 = state.platforms{1}.getEX();
e = e || ~all(X1==X2) || ~all(eX1==eX2);
% clear the state
clear state;
end
function e = simpleSetState()
e = 0;
% create simulator object
qrsim = QRSim();
% load task parameters and do housekeeping
state = qrsim.init('TaskNoWindRandomSeed');
% failing
shortX = [0;1;2];
e = e | loudTest('failingState','state too short',shortX,state,'pelican:wrongsetstate');
longX = [1;2;3;4;5;6;7;8;9;10;11;12;13;14];
e = e | loudTest('failingState','state too long',longX,state,'pelican:wrongsetstate');
wrongX = [1;2;3;4;5];
e = e | loudTest('failingState','state size wrong 1',wrongX,state,'pelican:wrongsetstate');
wrongX = [1;2;3;4;5;6;7;8];
e = e | loudTest('failingState','state size wrong 2',wrongX,state,'pelican:wrongsetstate');
limits = state.platforms{1}.getStateLimits();
oobX = [limits(1,2)*1.1;0;0];
e = e | loudTest('failingState','posx value out of bounds',oobX,state,'pelican:wrongsetstate');
oobX = [0;limits(2,2)*1.1;0];
e = e | loudTest('failingState','posy value out of bounds',oobX,state,'pelican:wrongsetstate');
oobX = [0;0;limits(3,2)*1.1];
e = e | loudTest('failingState','posz value out of bounds',oobX,state,'pelican:wrongsetstate');
validX = [1;2;3;0.01;0.01;1];
e = e | loudTest('validSetState','valid state of size 6',validX,state);
validX = [1;2;3;0.01;0.01;1;0.01;0.01;0.01;0.01;0.01;0.01];
e = e | loudTest('validSetState','valid state of size 12',validX,state);
validX = [1;2;3;0.01;0.01;1;0.01;0.01;0.01;0.01;0.01;0.01;state.platforms{1}.MASS*state.platforms{1}.G];
e = e | loudTest('validSetState','valid state of size 13',validX,state);
% clear the state
clear state;
end
function e = failingState(X,state,id)
e = 0;
try
state.platforms{1}.setX(X);
e = 1;
catch exception
if(~strcmp(exception.identifier,id))
e = 1;
fprintf('\nUNEXPECTED EXCEPTION:%s \nMESSAGE:%s\n',exception.identifier,exception.message);
end
end
end
function e = validSetState(x,state)
e = 0;
try
state.platforms{1}.setX(x);
catch exception
e = 1;
fprintf('\nUNEXPECTED EXCEPTION:%s \nMESSAGE:%s\n',exception.identifier,exception.message);
end
X = state.platforms{1}.getX();
if(length(x)==6)
e = e | ~all(X(1:12)==[x;zeros(6,1)]);
else
if (length(x)==12)
e = e | ~all(X(1:12)==x);
else
if (length(x)==13)
e = e | ~all(X==x);
end
end
end
end
function [ e ] = loudTest(fun,msg,varargin)
%LOUDTEST run a test function an print result in console
switch size(varargin,2)
case 3
e = feval(fun,varargin{1},varargin{2},varargin{3});
case 2
e = feval(fun,varargin{1},varargin{2});
case 1
e = feval(fun,varargin{1});
otherwise
e = feval(fun);
end
if(e)
fprintf(['Test ',msg,' [FAILED]\n']);
else
fprintf(['Test ',msg,' [PASSED]\n']);
end
end
| {
"content_hash": "38a19e9ac72cca33fc2fb0cbbe2f9fc3",
"timestamp": "",
"source": "github",
"line_count": 534,
"max_line_length": 118,
"avg_line_length": 21.09550561797753,
"alnum_prop": 0.6824678206835331,
"repo_name": "UCL-CompLACS/qrsim",
"id": "e773b551d8f9991d71e2d289d6f3797aedad1805",
"size": "11265",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/testSetReset.m",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "38977"
},
{
"name": "C++",
"bytes": "34935"
},
{
"name": "Java",
"bytes": "4907"
},
{
"name": "M",
"bytes": "236788"
},
{
"name": "Matlab",
"bytes": "1526777"
},
{
"name": "Objective-C",
"bytes": "961"
},
{
"name": "TeX",
"bytes": "160387"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Z. Mykol. 46(2): 217 (1980)
#### Original name
Lamproderma laxum H. Neubert, 1980
### Remarks
null | {
"content_hash": "75b724abf811b1e4c5de0e3dda55ebd3",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 14.384615384615385,
"alnum_prop": 0.6898395721925134,
"repo_name": "mdoering/backbone",
"id": "da23c4f027468add7f05daa67606656a862a6452",
"size": "245",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Protozoa/Mycetozoa/Myxomycetes/Stemonitales/Stemonitidaceae/Lamproderma/Lamproderma laxum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
from unittest import mock
from oslotest import base
from monasca_api.common.rest import exceptions
from monasca_api.common.rest import utils
class TestRestUtils(base.BaseTestCase):
def setUp(self):
super(TestRestUtils, self).setUp()
self.mock_json_patcher = mock.patch('monasca_api.common.rest.utils.json')
self.mock_json = self.mock_json_patcher.start()
def tearDown(self):
super(TestRestUtils, self).tearDown()
self.mock_json_patcher.stop()
def test_read_body_with_success(self):
self.mock_json.loads.return_value = ""
payload = mock.Mock()
utils.read_body(payload)
self.mock_json.loads.assert_called_once_with(payload.read.return_value)
def test_read_body_empty_content_in_payload(self):
self.mock_json.loads.return_value = ""
payload = mock.Mock()
payload.read.return_value = None
self.assertIsNone(utils.read_body(payload))
def test_read_body_json_loads_exception(self):
self.mock_json.loads.side_effect = Exception
payload = mock.Mock()
self.assertRaises(exceptions.DataConversionException,
utils.read_body, payload)
def test_read_body_unsupported_content_type(self):
unsupported_content_type = mock.Mock()
self.assertRaises(
exceptions.UnsupportedContentTypeException, utils.read_body, None,
unsupported_content_type)
def test_read_body_unreadable_content_error(self):
unreadable_content = mock.Mock()
unreadable_content.read.side_effect = Exception
self.assertRaises(
exceptions.UnreadableContentError,
utils.read_body, unreadable_content)
def test_as_json_success(self):
data = mock.Mock()
dumped_json = utils.as_json(data)
self.assertEqual(dumped_json, self.mock_json.dumps.return_value)
def test_as_json_with_exception(self):
data = mock.Mock()
self.mock_json.dumps.side_effect = Exception
self.assertRaises(exceptions.DataConversionException,
utils.as_json, data)
| {
"content_hash": "35be15991a7f5ef91ea6bba151f2520a",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 81,
"avg_line_length": 31.159420289855074,
"alnum_prop": 0.6576744186046511,
"repo_name": "openstack/monasca-api",
"id": "33e60066c6c2117e0895af01a1004f9b04497e18",
"size": "2696",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "monasca_api/tests/test_rest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2638"
},
{
"name": "Java",
"bytes": "883947"
},
{
"name": "Jinja",
"bytes": "32747"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "936668"
},
{
"name": "Shell",
"bytes": "129514"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.