code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
<?php
/**
* Base class that represents a query for the 'address' table.
*
*
*
* @method Oops_Db_AddressQuery orderByIdAddress($order = Criteria::ASC) Order by the id_address column
* @method Oops_Db_AddressQuery orderByIdCountry($order = Criteria::ASC) Order by the id_country column
* @method Oops_Db_AddressQuery orderByIdState($order = Criteria::ASC) Order by the id_state column
* @method Oops_Db_AddressQuery orderByIdCustomer($order = Criteria::ASC) Order by the id_customer column
* @method Oops_Db_AddressQuery orderByIdManufacturer($order = Criteria::ASC) Order by the id_manufacturer column
* @method Oops_Db_AddressQuery orderByIdSupplier($order = Criteria::ASC) Order by the id_supplier column
* @method Oops_Db_AddressQuery orderByAlias($order = Criteria::ASC) Order by the alias column
* @method Oops_Db_AddressQuery orderByCompany($order = Criteria::ASC) Order by the company column
* @method Oops_Db_AddressQuery orderByLastname($order = Criteria::ASC) Order by the lastname column
* @method Oops_Db_AddressQuery orderByFirstname($order = Criteria::ASC) Order by the firstname column
* @method Oops_Db_AddressQuery orderByAddress1($order = Criteria::ASC) Order by the address1 column
* @method Oops_Db_AddressQuery orderByAddress2($order = Criteria::ASC) Order by the address2 column
* @method Oops_Db_AddressQuery orderByPostcode($order = Criteria::ASC) Order by the postcode column
* @method Oops_Db_AddressQuery orderByCity($order = Criteria::ASC) Order by the city column
* @method Oops_Db_AddressQuery orderByOther($order = Criteria::ASC) Order by the other column
* @method Oops_Db_AddressQuery orderByPhone($order = Criteria::ASC) Order by the phone column
* @method Oops_Db_AddressQuery orderByPhoneMobile($order = Criteria::ASC) Order by the phone_mobile column
* @method Oops_Db_AddressQuery orderByVatNumber($order = Criteria::ASC) Order by the vat_number column
* @method Oops_Db_AddressQuery orderByDni($order = Criteria::ASC) Order by the dni column
* @method Oops_Db_AddressQuery orderByDateAdd($order = Criteria::ASC) Order by the date_add column
* @method Oops_Db_AddressQuery orderByDateUpd($order = Criteria::ASC) Order by the date_upd column
* @method Oops_Db_AddressQuery orderByActive($order = Criteria::ASC) Order by the active column
* @method Oops_Db_AddressQuery orderByDeleted($order = Criteria::ASC) Order by the deleted column
*
* @method Oops_Db_AddressQuery groupByIdAddress() Group by the id_address column
* @method Oops_Db_AddressQuery groupByIdCountry() Group by the id_country column
* @method Oops_Db_AddressQuery groupByIdState() Group by the id_state column
* @method Oops_Db_AddressQuery groupByIdCustomer() Group by the id_customer column
* @method Oops_Db_AddressQuery groupByIdManufacturer() Group by the id_manufacturer column
* @method Oops_Db_AddressQuery groupByIdSupplier() Group by the id_supplier column
* @method Oops_Db_AddressQuery groupByAlias() Group by the alias column
* @method Oops_Db_AddressQuery groupByCompany() Group by the company column
* @method Oops_Db_AddressQuery groupByLastname() Group by the lastname column
* @method Oops_Db_AddressQuery groupByFirstname() Group by the firstname column
* @method Oops_Db_AddressQuery groupByAddress1() Group by the address1 column
* @method Oops_Db_AddressQuery groupByAddress2() Group by the address2 column
* @method Oops_Db_AddressQuery groupByPostcode() Group by the postcode column
* @method Oops_Db_AddressQuery groupByCity() Group by the city column
* @method Oops_Db_AddressQuery groupByOther() Group by the other column
* @method Oops_Db_AddressQuery groupByPhone() Group by the phone column
* @method Oops_Db_AddressQuery groupByPhoneMobile() Group by the phone_mobile column
* @method Oops_Db_AddressQuery groupByVatNumber() Group by the vat_number column
* @method Oops_Db_AddressQuery groupByDni() Group by the dni column
* @method Oops_Db_AddressQuery groupByDateAdd() Group by the date_add column
* @method Oops_Db_AddressQuery groupByDateUpd() Group by the date_upd column
* @method Oops_Db_AddressQuery groupByActive() Group by the active column
* @method Oops_Db_AddressQuery groupByDeleted() Group by the deleted column
*
* @method Oops_Db_AddressQuery leftJoin($relation) Adds a LEFT JOIN clause to the query
* @method Oops_Db_AddressQuery rightJoin($relation) Adds a RIGHT JOIN clause to the query
* @method Oops_Db_AddressQuery innerJoin($relation) Adds a INNER JOIN clause to the query
*
* @method Oops_Db_Address findOne(PropelPDO $con = null) Return the first Oops_Db_Address matching the query
* @method Oops_Db_Address findOneOrCreate(PropelPDO $con = null) Return the first Oops_Db_Address matching the query, or a new Oops_Db_Address object populated from the query conditions when no match is found
*
* @method Oops_Db_Address findOneByIdAddress(int $id_address) Return the first Oops_Db_Address filtered by the id_address column
* @method Oops_Db_Address findOneByIdCountry(int $id_country) Return the first Oops_Db_Address filtered by the id_country column
* @method Oops_Db_Address findOneByIdState(int $id_state) Return the first Oops_Db_Address filtered by the id_state column
* @method Oops_Db_Address findOneByIdCustomer(int $id_customer) Return the first Oops_Db_Address filtered by the id_customer column
* @method Oops_Db_Address findOneByIdManufacturer(int $id_manufacturer) Return the first Oops_Db_Address filtered by the id_manufacturer column
* @method Oops_Db_Address findOneByIdSupplier(int $id_supplier) Return the first Oops_Db_Address filtered by the id_supplier column
* @method Oops_Db_Address findOneByAlias(string $alias) Return the first Oops_Db_Address filtered by the alias column
* @method Oops_Db_Address findOneByCompany(string $company) Return the first Oops_Db_Address filtered by the company column
* @method Oops_Db_Address findOneByLastname(string $lastname) Return the first Oops_Db_Address filtered by the lastname column
* @method Oops_Db_Address findOneByFirstname(string $firstname) Return the first Oops_Db_Address filtered by the firstname column
* @method Oops_Db_Address findOneByAddress1(string $address1) Return the first Oops_Db_Address filtered by the address1 column
* @method Oops_Db_Address findOneByAddress2(string $address2) Return the first Oops_Db_Address filtered by the address2 column
* @method Oops_Db_Address findOneByPostcode(string $postcode) Return the first Oops_Db_Address filtered by the postcode column
* @method Oops_Db_Address findOneByCity(string $city) Return the first Oops_Db_Address filtered by the city column
* @method Oops_Db_Address findOneByOther(string $other) Return the first Oops_Db_Address filtered by the other column
* @method Oops_Db_Address findOneByPhone(string $phone) Return the first Oops_Db_Address filtered by the phone column
* @method Oops_Db_Address findOneByPhoneMobile(string $phone_mobile) Return the first Oops_Db_Address filtered by the phone_mobile column
* @method Oops_Db_Address findOneByVatNumber(string $vat_number) Return the first Oops_Db_Address filtered by the vat_number column
* @method Oops_Db_Address findOneByDni(string $dni) Return the first Oops_Db_Address filtered by the dni column
* @method Oops_Db_Address findOneByDateAdd(string $date_add) Return the first Oops_Db_Address filtered by the date_add column
* @method Oops_Db_Address findOneByDateUpd(string $date_upd) Return the first Oops_Db_Address filtered by the date_upd column
* @method Oops_Db_Address findOneByActive(boolean $active) Return the first Oops_Db_Address filtered by the active column
* @method Oops_Db_Address findOneByDeleted(boolean $deleted) Return the first Oops_Db_Address filtered by the deleted column
*
* @method array findByIdAddress(int $id_address) Return Oops_Db_Address objects filtered by the id_address column
* @method array findByIdCountry(int $id_country) Return Oops_Db_Address objects filtered by the id_country column
* @method array findByIdState(int $id_state) Return Oops_Db_Address objects filtered by the id_state column
* @method array findByIdCustomer(int $id_customer) Return Oops_Db_Address objects filtered by the id_customer column
* @method array findByIdManufacturer(int $id_manufacturer) Return Oops_Db_Address objects filtered by the id_manufacturer column
* @method array findByIdSupplier(int $id_supplier) Return Oops_Db_Address objects filtered by the id_supplier column
* @method array findByAlias(string $alias) Return Oops_Db_Address objects filtered by the alias column
* @method array findByCompany(string $company) Return Oops_Db_Address objects filtered by the company column
* @method array findByLastname(string $lastname) Return Oops_Db_Address objects filtered by the lastname column
* @method array findByFirstname(string $firstname) Return Oops_Db_Address objects filtered by the firstname column
* @method array findByAddress1(string $address1) Return Oops_Db_Address objects filtered by the address1 column
* @method array findByAddress2(string $address2) Return Oops_Db_Address objects filtered by the address2 column
* @method array findByPostcode(string $postcode) Return Oops_Db_Address objects filtered by the postcode column
* @method array findByCity(string $city) Return Oops_Db_Address objects filtered by the city column
* @method array findByOther(string $other) Return Oops_Db_Address objects filtered by the other column
* @method array findByPhone(string $phone) Return Oops_Db_Address objects filtered by the phone column
* @method array findByPhoneMobile(string $phone_mobile) Return Oops_Db_Address objects filtered by the phone_mobile column
* @method array findByVatNumber(string $vat_number) Return Oops_Db_Address objects filtered by the vat_number column
* @method array findByDni(string $dni) Return Oops_Db_Address objects filtered by the dni column
* @method array findByDateAdd(string $date_add) Return Oops_Db_Address objects filtered by the date_add column
* @method array findByDateUpd(string $date_upd) Return Oops_Db_Address objects filtered by the date_upd column
* @method array findByActive(boolean $active) Return Oops_Db_Address objects filtered by the active column
* @method array findByDeleted(boolean $deleted) Return Oops_Db_Address objects filtered by the deleted column
*
* @package propel.generator.prestashop.om
*/
abstract class Oops_Db_Propel_AddressQuery extends ModelCriteria
{
/**
* Initializes internal state of Oops_Db_Propel_AddressQuery object.
*
* @param string $dbName The dabase name
* @param string $modelName The phpName of a model, e.g. 'Book'
* @param string $modelAlias The alias for the model in this query, e.g. 'b'
*/
public function __construct($dbName = 'prestashop', $modelName = 'Oops_Db_Address', $modelAlias = null)
{
parent::__construct($dbName, $modelName, $modelAlias);
}
/**
* Returns a new Oops_Db_AddressQuery object.
*
* @param string $modelAlias The alias of a model in the query
* @param Criteria $criteria Optional Criteria to build the query from
*
* @return Oops_Db_AddressQuery
*/
public static function create($modelAlias = null, $criteria = null)
{
if ($criteria instanceof Oops_Db_AddressQuery) {
return $criteria;
}
$query = new Oops_Db_AddressQuery();
if (null !== $modelAlias) {
$query->setModelAlias($modelAlias);
}
if ($criteria instanceof Criteria) {
$query->mergeWith($criteria);
}
return $query;
}
/**
* Find object by primary key.
* Propel uses the instance pool to skip the database if the object exists.
* Go fast if the query is untouched.
*
* <code>
* $obj = $c->findPk(12, $con);
* </code>
*
* @param mixed $key Primary key to use for the query
* @param PropelPDO $con an optional connection object
*
* @return Oops_Db_Address|array|mixed the result, formatted by the current formatter
*/
public function findPk($key, $con = null)
{
if ($key === null) {
return null;
}
if ((null !== ($obj = Oops_Db_AddressPeer::getInstanceFromPool((string) $key))) && !$this->formatter) {
// the object is alredy in the instance pool
return $obj;
}
if ($con === null) {
$con = Propel::getConnection(Oops_Db_AddressPeer::DATABASE_NAME, Propel::CONNECTION_READ);
}
$this->basePreSelect($con);
if ($this->formatter || $this->modelAlias || $this->with || $this->select
|| $this->selectColumns || $this->asColumns || $this->selectModifiers
|| $this->map || $this->having || $this->joins) {
return $this->findPkComplex($key, $con);
} else {
return $this->findPkSimple($key, $con);
}
}
/**
* Find object by primary key using raw SQL to go fast.
* Bypass doSelect() and the object formatter by using generated code.
*
* @param mixed $key Primary key to use for the query
* @param PropelPDO $con A connection object
*
* @return Oops_Db_Address A model object, or null if the key is not found
*/
protected function findPkSimple($key, $con)
{
$sql = 'SELECT `ID_ADDRESS`, `ID_COUNTRY`, `ID_STATE`, `ID_CUSTOMER`, `ID_MANUFACTURER`, `ID_SUPPLIER`, `ALIAS`, `COMPANY`, `LASTNAME`, `FIRSTNAME`, `ADDRESS1`, `ADDRESS2`, `POSTCODE`, `CITY`, `OTHER`, `PHONE`, `PHONE_MOBILE`, `VAT_NUMBER`, `DNI`, `DATE_ADD`, `DATE_UPD`, `ACTIVE`, `DELETED` FROM `' . _DB_PREFIX_ . 'address` WHERE `ID_ADDRESS` = :p0';
try {
$stmt = $con->prepare($sql);
$stmt->bindValue(':p0', $key, PDO::PARAM_INT);
$stmt->execute();
} catch (Exception $e) {
Propel::log($e->getMessage(), Propel::LOG_ERR);
throw new PropelException(sprintf('Unable to execute SELECT statement [%s]', $sql), $e);
}
$obj = null;
if ($row = $stmt->fetch(PDO::FETCH_NUM)) {
$obj = new Oops_Db_Address();
$obj->hydrate($row);
Oops_Db_AddressPeer::addInstanceToPool($obj, (string) $row[0]);
}
$stmt->closeCursor();
return $obj;
}
/**
* Find object by primary key.
*
* @param mixed $key Primary key to use for the query
* @param PropelPDO $con A connection object
*
* @return Oops_Db_Address|array|mixed the result, formatted by the current formatter
*/
protected function findPkComplex($key, $con)
{
// As the query uses a PK condition, no limit(1) is necessary.
$criteria = $this->isKeepQuery() ? clone $this : $this;
$stmt = $criteria
->filterByPrimaryKey($key)
->doSelect($con);
return $criteria->getFormatter()->init($criteria)->formatOne($stmt);
}
/**
* Find objects by primary key
* <code>
* $objs = $c->findPks(array(12, 56, 832), $con);
* </code>
* @param array $keys Primary keys to use for the query
* @param PropelPDO $con an optional connection object
*
* @return PropelObjectCollection|array|mixed the list of results, formatted by the current formatter
*/
public function findPks($keys, $con = null)
{
if ($con === null) {
$con = Propel::getConnection($this->getDbName(), Propel::CONNECTION_READ);
}
$this->basePreSelect($con);
$criteria = $this->isKeepQuery() ? clone $this : $this;
$stmt = $criteria
->filterByPrimaryKeys($keys)
->doSelect($con);
return $criteria->getFormatter()->init($criteria)->format($stmt);
}
/**
* Filter the query by primary key
*
* @param mixed $key Primary key to use for the query
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByPrimaryKey($key)
{
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_ADDRESS, $key, Criteria::EQUAL);
}
/**
* Filter the query by a list of primary keys
*
* @param array $keys The list of primary key to use for the query
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByPrimaryKeys($keys)
{
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_ADDRESS, $keys, Criteria::IN);
}
/**
* Filter the query on the id_address column
*
* Example usage:
* <code>
* $query->filterByIdAddress(1234); // WHERE id_address = 1234
* $query->filterByIdAddress(array(12, 34)); // WHERE id_address IN (12, 34)
* $query->filterByIdAddress(array('min' => 12)); // WHERE id_address > 12
* </code>
*
* @param mixed $idAddress The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdAddress($idAddress = null, $comparison = null)
{
if (is_array($idAddress) && null === $comparison) {
$comparison = Criteria::IN;
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_ADDRESS, $idAddress, $comparison);
}
/**
* Filter the query on the id_country column
*
* Example usage:
* <code>
* $query->filterByIdCountry(1234); // WHERE id_country = 1234
* $query->filterByIdCountry(array(12, 34)); // WHERE id_country IN (12, 34)
* $query->filterByIdCountry(array('min' => 12)); // WHERE id_country > 12
* </code>
*
* @param mixed $idCountry The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdCountry($idCountry = null, $comparison = null)
{
if (is_array($idCountry)) {
$useMinMax = false;
if (isset($idCountry['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_COUNTRY, $idCountry['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($idCountry['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_COUNTRY, $idCountry['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_COUNTRY, $idCountry, $comparison);
}
/**
* Filter the query on the id_state column
*
* Example usage:
* <code>
* $query->filterByIdState(1234); // WHERE id_state = 1234
* $query->filterByIdState(array(12, 34)); // WHERE id_state IN (12, 34)
* $query->filterByIdState(array('min' => 12)); // WHERE id_state > 12
* </code>
*
* @param mixed $idState The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdState($idState = null, $comparison = null)
{
if (is_array($idState)) {
$useMinMax = false;
if (isset($idState['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_STATE, $idState['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($idState['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_STATE, $idState['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_STATE, $idState, $comparison);
}
/**
* Filter the query on the id_customer column
*
* Example usage:
* <code>
* $query->filterByIdCustomer(1234); // WHERE id_customer = 1234
* $query->filterByIdCustomer(array(12, 34)); // WHERE id_customer IN (12, 34)
* $query->filterByIdCustomer(array('min' => 12)); // WHERE id_customer > 12
* </code>
*
* @param mixed $idCustomer The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdCustomer($idCustomer = null, $comparison = null)
{
if (is_array($idCustomer)) {
$useMinMax = false;
if (isset($idCustomer['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_CUSTOMER, $idCustomer['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($idCustomer['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_CUSTOMER, $idCustomer['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_CUSTOMER, $idCustomer, $comparison);
}
/**
* Filter the query on the id_manufacturer column
*
* Example usage:
* <code>
* $query->filterByIdManufacturer(1234); // WHERE id_manufacturer = 1234
* $query->filterByIdManufacturer(array(12, 34)); // WHERE id_manufacturer IN (12, 34)
* $query->filterByIdManufacturer(array('min' => 12)); // WHERE id_manufacturer > 12
* </code>
*
* @param mixed $idManufacturer The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdManufacturer($idManufacturer = null, $comparison = null)
{
if (is_array($idManufacturer)) {
$useMinMax = false;
if (isset($idManufacturer['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_MANUFACTURER, $idManufacturer['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($idManufacturer['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_MANUFACTURER, $idManufacturer['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_MANUFACTURER, $idManufacturer, $comparison);
}
/**
* Filter the query on the id_supplier column
*
* Example usage:
* <code>
* $query->filterByIdSupplier(1234); // WHERE id_supplier = 1234
* $query->filterByIdSupplier(array(12, 34)); // WHERE id_supplier IN (12, 34)
* $query->filterByIdSupplier(array('min' => 12)); // WHERE id_supplier > 12
* </code>
*
* @param mixed $idSupplier The value to use as filter.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByIdSupplier($idSupplier = null, $comparison = null)
{
if (is_array($idSupplier)) {
$useMinMax = false;
if (isset($idSupplier['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_SUPPLIER, $idSupplier['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($idSupplier['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_SUPPLIER, $idSupplier['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ID_SUPPLIER, $idSupplier, $comparison);
}
/**
* Filter the query on the alias column
*
* Example usage:
* <code>
* $query->filterByAlias('fooValue'); // WHERE alias = 'fooValue'
* $query->filterByAlias('%fooValue%'); // WHERE alias LIKE '%fooValue%'
* </code>
*
* @param string $alias The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByAlias($alias = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($alias)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $alias)) {
$alias = str_replace('*', '%', $alias);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ALIAS, $alias, $comparison);
}
/**
* Filter the query on the company column
*
* Example usage:
* <code>
* $query->filterByCompany('fooValue'); // WHERE company = 'fooValue'
* $query->filterByCompany('%fooValue%'); // WHERE company LIKE '%fooValue%'
* </code>
*
* @param string $company The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByCompany($company = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($company)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $company)) {
$company = str_replace('*', '%', $company);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::COMPANY, $company, $comparison);
}
/**
* Filter the query on the lastname column
*
* Example usage:
* <code>
* $query->filterByLastname('fooValue'); // WHERE lastname = 'fooValue'
* $query->filterByLastname('%fooValue%'); // WHERE lastname LIKE '%fooValue%'
* </code>
*
* @param string $lastname The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByLastname($lastname = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($lastname)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $lastname)) {
$lastname = str_replace('*', '%', $lastname);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::LASTNAME, $lastname, $comparison);
}
/**
* Filter the query on the firstname column
*
* Example usage:
* <code>
* $query->filterByFirstname('fooValue'); // WHERE firstname = 'fooValue'
* $query->filterByFirstname('%fooValue%'); // WHERE firstname LIKE '%fooValue%'
* </code>
*
* @param string $firstname The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByFirstname($firstname = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($firstname)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $firstname)) {
$firstname = str_replace('*', '%', $firstname);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::FIRSTNAME, $firstname, $comparison);
}
/**
* Filter the query on the address1 column
*
* Example usage:
* <code>
* $query->filterByAddress1('fooValue'); // WHERE address1 = 'fooValue'
* $query->filterByAddress1('%fooValue%'); // WHERE address1 LIKE '%fooValue%'
* </code>
*
* @param string $address1 The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByAddress1($address1 = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($address1)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $address1)) {
$address1 = str_replace('*', '%', $address1);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ADDRESS1, $address1, $comparison);
}
/**
* Filter the query on the address2 column
*
* Example usage:
* <code>
* $query->filterByAddress2('fooValue'); // WHERE address2 = 'fooValue'
* $query->filterByAddress2('%fooValue%'); // WHERE address2 LIKE '%fooValue%'
* </code>
*
* @param string $address2 The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByAddress2($address2 = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($address2)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $address2)) {
$address2 = str_replace('*', '%', $address2);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ADDRESS2, $address2, $comparison);
}
/**
* Filter the query on the postcode column
*
* Example usage:
* <code>
* $query->filterByPostcode('fooValue'); // WHERE postcode = 'fooValue'
* $query->filterByPostcode('%fooValue%'); // WHERE postcode LIKE '%fooValue%'
* </code>
*
* @param string $postcode The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByPostcode($postcode = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($postcode)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $postcode)) {
$postcode = str_replace('*', '%', $postcode);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::POSTCODE, $postcode, $comparison);
}
/**
* Filter the query on the city column
*
* Example usage:
* <code>
* $query->filterByCity('fooValue'); // WHERE city = 'fooValue'
* $query->filterByCity('%fooValue%'); // WHERE city LIKE '%fooValue%'
* </code>
*
* @param string $city The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByCity($city = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($city)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $city)) {
$city = str_replace('*', '%', $city);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::CITY, $city, $comparison);
}
/**
* Filter the query on the other column
*
* Example usage:
* <code>
* $query->filterByOther('fooValue'); // WHERE other = 'fooValue'
* $query->filterByOther('%fooValue%'); // WHERE other LIKE '%fooValue%'
* </code>
*
* @param string $other The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByOther($other = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($other)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $other)) {
$other = str_replace('*', '%', $other);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::OTHER, $other, $comparison);
}
/**
* Filter the query on the phone column
*
* Example usage:
* <code>
* $query->filterByPhone('fooValue'); // WHERE phone = 'fooValue'
* $query->filterByPhone('%fooValue%'); // WHERE phone LIKE '%fooValue%'
* </code>
*
* @param string $phone The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByPhone($phone = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($phone)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $phone)) {
$phone = str_replace('*', '%', $phone);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::PHONE, $phone, $comparison);
}
/**
* Filter the query on the phone_mobile column
*
* Example usage:
* <code>
* $query->filterByPhoneMobile('fooValue'); // WHERE phone_mobile = 'fooValue'
* $query->filterByPhoneMobile('%fooValue%'); // WHERE phone_mobile LIKE '%fooValue%'
* </code>
*
* @param string $phoneMobile The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByPhoneMobile($phoneMobile = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($phoneMobile)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $phoneMobile)) {
$phoneMobile = str_replace('*', '%', $phoneMobile);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::PHONE_MOBILE, $phoneMobile, $comparison);
}
/**
* Filter the query on the vat_number column
*
* Example usage:
* <code>
* $query->filterByVatNumber('fooValue'); // WHERE vat_number = 'fooValue'
* $query->filterByVatNumber('%fooValue%'); // WHERE vat_number LIKE '%fooValue%'
* </code>
*
* @param string $vatNumber The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByVatNumber($vatNumber = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($vatNumber)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $vatNumber)) {
$vatNumber = str_replace('*', '%', $vatNumber);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::VAT_NUMBER, $vatNumber, $comparison);
}
/**
* Filter the query on the dni column
*
* Example usage:
* <code>
* $query->filterByDni('fooValue'); // WHERE dni = 'fooValue'
* $query->filterByDni('%fooValue%'); // WHERE dni LIKE '%fooValue%'
* </code>
*
* @param string $dni The value to use as filter.
* Accepts wildcards (* and % trigger a LIKE)
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByDni($dni = null, $comparison = null)
{
if (null === $comparison) {
if (is_array($dni)) {
$comparison = Criteria::IN;
} elseif (preg_match('/[\%\*]/', $dni)) {
$dni = str_replace('*', '%', $dni);
$comparison = Criteria::LIKE;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::DNI, $dni, $comparison);
}
/**
* Filter the query on the date_add column
*
* Example usage:
* <code>
* $query->filterByDateAdd('2011-03-14'); // WHERE date_add = '2011-03-14'
* $query->filterByDateAdd('now'); // WHERE date_add = '2011-03-14'
* $query->filterByDateAdd(array('max' => 'yesterday')); // WHERE date_add > '2011-03-13'
* </code>
*
* @param mixed $dateAdd The value to use as filter.
* Values can be integers (unix timestamps), DateTime objects, or strings.
* Empty strings are treated as NULL.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByDateAdd($dateAdd = null, $comparison = null)
{
if (is_array($dateAdd)) {
$useMinMax = false;
if (isset($dateAdd['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::DATE_ADD, $dateAdd['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($dateAdd['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::DATE_ADD, $dateAdd['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::DATE_ADD, $dateAdd, $comparison);
}
/**
* Filter the query on the date_upd column
*
* Example usage:
* <code>
* $query->filterByDateUpd('2011-03-14'); // WHERE date_upd = '2011-03-14'
* $query->filterByDateUpd('now'); // WHERE date_upd = '2011-03-14'
* $query->filterByDateUpd(array('max' => 'yesterday')); // WHERE date_upd > '2011-03-13'
* </code>
*
* @param mixed $dateUpd The value to use as filter.
* Values can be integers (unix timestamps), DateTime objects, or strings.
* Empty strings are treated as NULL.
* Use scalar values for equality.
* Use array values for in_array() equivalent.
* Use associative array('min' => $minValue, 'max' => $maxValue) for intervals.
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByDateUpd($dateUpd = null, $comparison = null)
{
if (is_array($dateUpd)) {
$useMinMax = false;
if (isset($dateUpd['min'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::DATE_UPD, $dateUpd['min'], Criteria::GREATER_EQUAL);
$useMinMax = true;
}
if (isset($dateUpd['max'])) {
$this->addUsingAlias(Oops_Db_AddressPeer::DATE_UPD, $dateUpd['max'], Criteria::LESS_EQUAL);
$useMinMax = true;
}
if ($useMinMax) {
return $this;
}
if (null === $comparison) {
$comparison = Criteria::IN;
}
}
return $this->addUsingAlias(Oops_Db_AddressPeer::DATE_UPD, $dateUpd, $comparison);
}
/**
* Filter the query on the active column
*
* Example usage:
* <code>
* $query->filterByActive(true); // WHERE active = true
* $query->filterByActive('yes'); // WHERE active = true
* </code>
*
* @param boolean|string $active The value to use as filter.
* Non-boolean arguments are converted using the following rules:
* * 1, '1', 'true', 'on', and 'yes' are converted to boolean true
* * 0, '0', 'false', 'off', and 'no' are converted to boolean false
* Check on string values is case insensitive (so 'FaLsE' is seen as 'false').
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByActive($active = null, $comparison = null)
{
if (is_string($active)) {
$active = in_array(strtolower($active), array('false', 'off', '-', 'no', 'n', '0', '')) ? false : true;
}
return $this->addUsingAlias(Oops_Db_AddressPeer::ACTIVE, $active, $comparison);
}
/**
* Filter the query on the deleted column
*
* Example usage:
* <code>
* $query->filterByDeleted(true); // WHERE deleted = true
* $query->filterByDeleted('yes'); // WHERE deleted = true
* </code>
*
* @param boolean|string $deleted The value to use as filter.
* Non-boolean arguments are converted using the following rules:
* * 1, '1', 'true', 'on', and 'yes' are converted to boolean true
* * 0, '0', 'false', 'off', and 'no' are converted to boolean false
* Check on string values is case insensitive (so 'FaLsE' is seen as 'false').
* @param string $comparison Operator to use for the column comparison, defaults to Criteria::EQUAL
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function filterByDeleted($deleted = null, $comparison = null)
{
if (is_string($deleted)) {
$deleted = in_array(strtolower($deleted), array('false', 'off', '-', 'no', 'n', '0', '')) ? false : true;
}
return $this->addUsingAlias(Oops_Db_AddressPeer::DELETED, $deleted, $comparison);
}
/**
* Exclude object from result
*
* @param Oops_Db_Address $address Object to remove from the list of results
*
* @return Oops_Db_AddressQuery The current query, for fluid interface
*/
public function prune($address = null)
{
if ($address) {
$this->addUsingAlias(Oops_Db_AddressPeer::ID_ADDRESS, $address->getIdAddress(), Criteria::NOT_EQUAL);
}
return $this;
}
} // Oops_Db_Propel_AddressQuery | alexsegura/Oops | library/Oops/Db/Propel/AddressQuery.php | PHP | bsd-3-clause | 42,728 |
#!/usr/bin/env python
"""
New Drawing class to create new mark and style on axes.
"""
# from copy import deepcopy, copy
from decimal import Decimal
import numpy as np
import toyplot
# from .Admixture import AdmixEdges
# for setting values from iterables
ITERABLE = (list, tuple, np.ndarray)
class GridSetup:
"""
Returns Canvas and Cartesian axes objects to fit a grid of trees.
"""
def __init__(self, nrows, ncols, width, height, layout):
# style args can include height/width, nrows, ncols, shared,...
self.nrows = nrows
self.ncols = ncols
self.width = width
self.height = height
self.layout = layout
# get .canvas and .axes
self.get_tree_dims()
self.get_canvas_and_axes()
def get_canvas_and_axes(self):
"""
Set .canvas and .axes objects
"""
self.canvas = toyplot.Canvas(
height=self.height,
width=self.width,
)
self.axes = [
self.canvas.cartesian(
grid=(self.nrows, self.ncols, i),
padding=10,
margin=25,
)
for i in range(self.nrows * self.ncols)
]
def get_tree_dims(self):
"""
get height and width if not set by user
"""
if self.ncols * self.nrows < 4:
minx = 250
miny = 250
else:
minx = 200
miny = 140
# wider than tall
if self.layout in ("d", "u"):
self.width = (
self.width if self.width
else min(750, minx * self.ncols)
)
self.height = (
self.height if self.height
else min(750, miny * self.nrows)
)
else:
self.height = (
self.height if self.height
else min(750, minx * self.nrows)
)
self.width = (
self.width if self.width
else min(750, miny * self.ncols)
)
class CanvasSetup:
"""
Returns Canvas and Cartesian axes objects
"""
def __init__(self, tree, axes, style):
# args includes axes
self.tree = tree
self.axes = axes
self.style = style
self.canvas = None
self.external_axis = False
# get the longest name for dimension fitting
self.lname = 0
if not all([i is None for i in self.style.tip_labels]):
self.lname = max([len(str(i)) for i in self.style.tip_labels])
# ntips and shape to fit with provided args
self.get_dims_from_tree_size()
# fills canvas and axes
self.get_canvas_and_axes()
# expand the domain/extents for the text
# self.fit_tip_labels()
# ticks for tree and scalebar
self.add_axes_style()
def get_dims_from_tree_size(self):
"""
Calculate reasonable canvas height and width for tree given N tips
"""
if self.style.layout == "c":
radius = max(
[0] + [i for i in [self.style.height, self.style.width] if i])
if not radius:
radius = 400
self.style.width = self.style.height = radius
return
if self.style.layout in ("r", "l"):
# height fit by tree size
if not self.style.height:
self.style.height = max(275, min(1000, 18 * self.tree.ntips))
# width fit by name size
if not self.style.width:
self.style.width = max(250, min(500, 250 + 5 * self.lname))
else:
# height fit by name size
if not self.style.height:
self.style.height = max(250, min(500, 250 + 5 * self.lname))
# width fit by tree size
if not self.style.width:
self.style.width = max(350, min(1000, 18 * self.tree.ntips))
def get_canvas_and_axes(self):
"""
"""
if self.axes is not None:
self.canvas = None
self.external_axis = True
else:
self.canvas = toyplot.Canvas(
height=self.style.height,
width=self.style.width,
)
self.axes = self.canvas.cartesian(
padding=self.style.padding
)
def add_axes_style(self):
"""
"""
# style axes with padding and show axes
self.axes.padding = self.style.padding
if not self.external_axis:
self.axes.show = True
if not self.style.scalebar:
self.axes.show = False
# scalebar
if self.style.scalebar:
if self.style.layout in ("r", "l"):
nticks = max((3, np.floor(self.style.width / 100).astype(int)))
self.axes.y.show = False
self.axes.x.show = True
self.axes.x.ticks.show = True
# generate locations
if self.style.use_edge_lengths:
th = self.tree.treenode.height
else:
th = self.tree.treenode.get_farthest_leaf(True)[1] + 1
if self.style.layout == "r":
top = self.style.xbaseline - th
else:
top = self.style.xbaseline + th
locs = np.linspace(self.style.xbaseline, top, nticks)
# auto-formatter for axes ticks labels
zer = abs(min(0, Decimal(locs[1]).adjusted()))
fmt = "{:." + str(zer) + "f}"
self.axes.x.ticks.locator = toyplot.locator.Explicit(
locations=locs,
labels=[fmt.format(i) for i in np.abs(locs)],
)
elif self.style.layout in ("u", "d"):
nticks = max((3, np.floor(self.style.height / 100).astype(int)))
self.axes.x.show = False
self.axes.y.show = True
self.axes.y.ticks.show = True
# generate locations
if self.style.use_edge_lengths:
th = self.tree.treenode.height
else:
th = self.tree.treenode.get_farthest_leaf(True)[1] + 1
if self.style.layout == "d":
top = self.style.ybaseline + th
else:
top = self.style.ybaseline - th
locs = np.linspace(self.style.ybaseline, top, nticks)
# auto-formatter for axes ticks labels
zer = abs(min(0, Decimal(locs[1]).adjusted()))
fmt = "{:." + str(zer) + "f}"
self.axes.y.ticks.locator = toyplot.locator.Explicit(
locations=locs,
labels=[fmt.format(i) for i in np.abs(locs)],
)
# elif self.style.layout == "d":
# nticks = max((3, np.floor(self.style.height / 100).astype(int)))
# self.axes.x.show = False
# self.axes.y.show = True
# self.axes.y.ticks.show = True
# # generate locations
# locs = np.linspace(0, self.tree.treenode.height, nticks)
# # auto-formatter for axes ticks labels
# zer = abs(min(0, Decimal(locs[1]).adjusted()))
# fmt = "{:." + str(zer) + "f}"
# self.axes.y.ticks.locator = toyplot.locator.Explicit(
# locations=locs,
# labels=[fmt.format(i) for i in np.abs(locs)],
# )
# def fit_tip_labels(self):
# """
# DEPRECATED SINCE V2 since Mark now sets its own extents correctly.
# Modifies display range to ensure tip labels fit. This is a bit hackish
# still. The problem is that the 'extents' range of the rendered text
# is not totally correct. So we add a little buffer here. Should add for
# user to be able to modify this if needed. If not using edge lengths
# then need to use unit length for treeheight.
# """
# # bail on unrooted for now; TODO
# if self.style.layout == "c":
# return
# # if names
# if self.lname:
# # get ratio of names to tree in plot
# ratio = max(self.lname / 10, 0.15)
# # have tree figure make up 85% of plot
# if self.style.use_edge_lengths:
# addon = self.tree.treenode.height
# else:
# addon = self.tree.treenode.get_farthest_leaf(True)[1] + 1
# addon *= ratio
# # modify display for layout
# if self.style.layout == "r":
# self.axes.x.domain.max = (addon / 2.) + self.style.xbaseline
# elif self.style.layout == "l":
# self.axes.x.domain.min = (-addon / 2.) + self.style.xbaseline
# # self.axes.x.domain.min -= self.style.xbaseline
# elif self.style.layout == "d":
# self.axes.y.domain.min = (-addon / 2.) + self.style.ybaseline
# elif self.style.layout == "u":
# self.axes.y.domain.max = (addon / 2.) + self.style.ybaseline
# # print(addon, ratio, self.axes.x.domain.min, self.axes.x.domain.max)
| eaton-lab/toytree | toytree/CanvasSetup.py | Python | bsd-3-clause | 9,455 |
# Copyright (c) 2014, Salesforce.com, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of Salesforce.com nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
import scipy.stats
from collections import defaultdict
def scores_to_probs(scores):
scores = numpy.array(scores)
scores -= scores.max()
probs = numpy.exp(scores, out=scores)
probs /= probs.sum()
return probs
def score_to_empirical_kl(score, count):
"""
Convert total log score to KL( empirical || model ),
where the empirical pdf is uniform over `count` datapoints.
"""
count = float(count)
return -score / count - numpy.log(count)
def print_histogram(probs, counts):
WIDTH = 60.0
max_count = max(counts)
print '{: >8} {: >8}'.format('Prob', 'Count')
for prob, count in sorted(zip(probs, counts), reverse=True):
width = int(round(WIDTH * count / max_count))
print '{: >8.3f} {: >8d} {}'.format(prob, count, '-' * width)
def multinomial_goodness_of_fit(
probs,
counts,
total_count,
truncated=False,
plot=False):
"""
Pearson's chi^2 test, on possibly truncated data.
http://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
Returns:
p-value of truncated multinomial sample.
"""
assert len(probs) == len(counts)
assert truncated or total_count == sum(counts)
chi_squared = 0
dof = 0
if plot:
print_histogram(probs, counts)
for p, c in zip(probs, counts):
if p == 1:
return 1 if c == total_count else 0
assert p < 1, 'bad probability: %g' % p
if p > 0:
mean = total_count * p
variance = total_count * p * (1 - p)
assert variance > 1,\
'WARNING goodness of fit is inaccurate; use more samples'
chi_squared += (c - mean) ** 2 / variance
dof += 1
else:
print 'WARNING zero probability in goodness-of-fit test'
if c > 0:
return float('inf')
if not truncated:
dof -= 1
survival = scipy.stats.chi2.sf(chi_squared, dof)
return survival
def unif01_goodness_of_fit(samples, plot=False):
"""
Bin uniformly distributed samples and apply Pearson's chi^2 test.
"""
samples = numpy.array(samples, dtype=float)
assert samples.min() >= 0.0
assert samples.max() <= 1.0
bin_count = int(round(len(samples) ** 0.333))
assert bin_count >= 7, 'WARNING imprecise test, use more samples'
probs = numpy.ones(bin_count, dtype=numpy.float) / bin_count
counts = numpy.zeros(bin_count, dtype=numpy.int)
for sample in samples:
counts[int(bin_count * sample)] += 1
return multinomial_goodness_of_fit(probs, counts, len(samples), plot=plot)
def density_goodness_of_fit(samples, probs, plot=False):
"""
Transform arbitrary continuous samples to unif01 distribution
and assess goodness of fit via Pearson's chi^2 test.
Inputs:
samples - a list of real-valued samples from a distribution
probs - a list of probability densities evaluated at those samples
"""
assert len(samples) == len(probs)
assert len(samples) > 100, 'WARNING imprecision; use more samples'
pairs = zip(samples, probs)
pairs.sort()
samples = numpy.array([x for x, p in pairs])
probs = numpy.array([p for x, p in pairs])
density = numpy.sqrt(probs[1:] * probs[:-1])
gaps = samples[1:] - samples[:-1]
unif01_samples = 1.0 - numpy.exp(-len(samples) * gaps * density)
return unif01_goodness_of_fit(unif01_samples, plot=plot)
def discrete_goodness_of_fit(
samples,
probs_dict,
truncate_beyond=8,
plot=False):
"""
Transform arbitrary discrete data to multinomial
and assess goodness of fit via Pearson's chi^2 test.
"""
assert len(samples) > 100, 'WARNING imprecision; use more samples'
counts = defaultdict(lambda: 0)
for sample in samples:
assert sample in probs_dict
counts[sample] += 1
items = [(prob, counts.get(i, 0)) for i, prob in probs_dict.iteritems()]
items.sort(reverse=True)
truncated = (truncate_beyond and truncate_beyond < len(items))
if truncated:
items = items[:truncate_beyond]
probs = [prob for prob, count in items]
counts = [count for prob, count in items]
return multinomial_goodness_of_fit(
probs,
counts,
len(samples),
truncated=truncated,
plot=plot)
def bin_samples(samples, k=10, support=[]):
"""
Bins a collection of univariate samples into k bins of equal
fill via the empirical cdf, to be used in goodness of fit testing.
Returns
counts : array k x 1
bin_ranges : arrary k x 2
each count is the number of samples in [bin_min, bin_max)
except for the last bin which is [bin_min, bin_max]
list partitioning algorithm adapted from Mark Dickinson:
http://stackoverflow.com/questions/2659900
"""
samples = sorted(samples)
N = len(samples)
q, r = divmod(N, k)
#we need to distribute the remainder relatively evenly
#tests will be inaccurate if we have small bins at the end
indices = [i * q + min(r, i) for i in range(k + 1)]
bins = [samples[indices[i]: indices[i + 1]] for i in range(k)]
bin_ranges = []
counts = []
for i in range(k):
bin_min = bins[i][0]
try:
bin_max = bins[i + 1][0]
except IndexError:
bin_max = bins[i][-1]
bin_ranges.append([bin_min, bin_max])
counts.append(len(bins[i]))
if support:
bin_ranges[0][0] = support[0]
bin_ranges[-1][1] = support[1]
return numpy.array(counts), numpy.array(bin_ranges)
def histogram(samples, bin_count=None):
if bin_count is None:
bin_count = numpy.max(samples) + 1
v = numpy.zeros(bin_count, dtype=int)
for sample in samples:
v[sample] += 1
return v
| forcedotcom/distributions | distributions/util.py | Python | bsd-3-clause | 7,375 |
// -----------------------------------------------------------------------
// <copyright file="BestSplitter.cs" company="Sharpkit.Learn">
// Authors: Gilles Louppe <g.louppe@gmail.com>
// Peter Prettenhofer <peter.prettenhofer@gmail.com>
// Brian Holt <bdholt1@gmail.com>
// Noel Dawe <noel@dawe.me>
// Satrajit Gosh <satrajit.ghosh@gmail.com>
// Lars Buitinck <L.J.Buitinck@uva.nl>
// Sergey Zyuzin
// Licence: BSD 3 clause
// </copyright>
// -----------------------------------------------------------------------
namespace Sharpkit.Learn.Tree
{
using System;
/// <summary>
/// Splitter for finding the best split.
/// </summary>
/// <remarks>
/// https://github.com/scikit-learn/scikit-learn/tree/30eb78de8d1e7b25fb1a4b0d8c63afdcc972ee84/sklearn/tree/_tree.pyx
/// </remarks>
internal class BestSplitter : SplitterBase
{
public BestSplitter(ICriterion criterion, uint max_features, uint min_samples_leaf, Random random_state)
: base(criterion, max_features, min_samples_leaf, random_state)
{
}
/// <summary>
/// Find the best split on node samples[start:end].
/// </summary>
/// <param name="pos"></param>
/// <param name="feature"></param>
/// <param name="threshold"></param>
public override void node_split(ref uint pos, ref uint feature, ref double threshold)
{
// Find the best split
double best_impurity = double.PositiveInfinity;
uint best_pos = end;
uint best_feature = 0;
double best_threshold = 0;
int visited_features = 0;
for (uint f_idx = 0; f_idx < n_features; f_idx++)
{
// Draw a feature at random
uint f_i = n_features - f_idx - 1;
uint f_j = Util.rand_int(n_features - f_idx, ref rand_r_state);
uint tmp = features[f_i];
features[f_i] = features[f_j];
features[f_j] = tmp;
uint current_feature = features[f_i];
// Sort samples along that feature
Sort(X, X_stride, current_feature, samples, start, end - start);
// Evaluate all splits
this.criterion.Reset();
uint p = start;
while (p < end)
{
while ((p + 1 < end) &&
(X[X_stride * samples[p + 1] + current_feature] <=
X[X_stride * samples[p] + current_feature] + 1e-7))
{
p += 1;
}
// (p + 1 >= end) or (X[samples[p + 1], current_feature] >
// X[samples[p], current_feature])
p += 1;
// (p >= end) or (X[samples[p], current_feature] >
// X[samples[p - 1], current_feature])
if (p < end)
{
uint current_pos = p;
// Reject if min_samples_leaf is not guaranteed
if (((current_pos - start) < min_samples_leaf) ||
((end - current_pos) < min_samples_leaf))
{
continue;
}
this.criterion.Update(current_pos);
double current_impurity = this.criterion.ChildrenImpurity();
if (current_impurity < (best_impurity - 1e-7))
{
best_impurity = current_impurity;
best_pos = current_pos;
best_feature = current_feature;
double current_threshold = (X[X_stride * samples[p - 1] + current_feature] +
X[X_stride * samples[p] + current_feature]) / 2.0;
if (current_threshold == X[X_stride * samples[p] + current_feature])
{
current_threshold = X[X_stride * samples[p - 1] + current_feature];
}
best_threshold = current_threshold;
}
}
}
if (best_pos == end) // No valid split was ever found
{
continue;
}
// Count one more visited feature
visited_features += 1;
if (visited_features >= max_features)
{
break;
}
}
// Reorganize into samples[start:best_pos] + samples[best_pos:end]
if (best_pos < end)
{
uint partition_start = start;
uint partition_end = end;
uint p = start;
while (p < partition_end)
{
if (X[X_stride * samples[p] + best_feature] <= best_threshold)
p += 1;
else
{
partition_end -= 1;
uint tmp = samples[partition_end];
samples[partition_end] = samples[p];
samples[p] = tmp;
}
}
}
// Return values
pos = best_pos;
feature = best_feature;
threshold = best_threshold;
}
/// <summary>
/// In-place sorting of samples[start:end] using
/// X[sample[i], current_feature] as key.
/// </summary>
private static void Sort(
double[] x,
uint xStride,
uint currentFeature,
uint[] samples,
uint samplesOffset,
uint length)
{
//# Heapsort, adapted from Numerical Recipes in C
uint n = length;
uint parent = length / 2;
while (true)
{
uint tmp;
if (parent > 0)
{
parent -= 1;
tmp = samples[parent + samplesOffset];
}
else
{
n -= 1;
if (n == 0)
{
return;
}
tmp = samples[n + samplesOffset];
samples[n + samplesOffset] = samples[0 + samplesOffset];
}
double tmp_value = x[xStride * tmp + currentFeature];
uint index = parent;
uint child = index * 2 + 1;
while (child < n)
{
if ((child + 1 < n) &&
(x[xStride * samples[child + 1 + samplesOffset] + currentFeature] >
x[xStride * samples[child + samplesOffset] + currentFeature]))
{
child += 1;
}
if (x[xStride * samples[child + samplesOffset] + currentFeature] > tmp_value)
{
samples[index + samplesOffset] = samples[child + samplesOffset];
index = child;
child = index * 2 + 1;
}
else
{
break;
}
}
samples[index + samplesOffset] = tmp;
}
}
}
}
| foreverzet/Sharpkit.Learn | src/Sharpkit.Learn/Tree/BestSplitter.cs | C# | bsd-3-clause | 7,788 |
/**
* The Affix Component
*
* @module aui-affix
*/
var win = A.config.win;
/**
* A base class for Affix.
*
* Check the [live demo](http://alloyui.com/examples/affix/).
*
* @class A.Affix
* @extends Base
* @param {Object} config Object literal specifying scrollspy configuration
* properties.
* @constructor
*/
A.Affix = A.Base.create('affix', A.Base, [], {
/**
* Holds the scroll event handle.
*
* @type {Node}
* @private
*/
_eventHandle: null,
/**
* Holds the last event (bottom, default, top).
* @type {String}
* @private
*/
_lastPosition: null,
/**
* Constructor for the Affix component.
*
* @method initializer
* @protected
*/
initializer: function() {
this.publish({
bottom: {
defaultFn: this._defAffixBottomFn
},
'default': {
defaultFn: this._defAffixFn
},
top: {
defaultFn: this._defAffixTopFn
}
});
this.after({
offsetBottomChange: this._afterOffsetChange,
offsetTopChange: this._afterOffsetChange
});
this.refresh();
this._eventHandle = A.one(win).on('scroll', this._onScroll, this);
},
/**
* Destructor for the Affix component
* @method destructor
* @private
*/
destructor: function() {
this._eventHandle.detach();
},
/**
* Refreshes the affix component to its current state.
*
* @method refresh
*/
refresh: function() {
var scrollY = A.DOM.docScrollY(),
offsetBottom = this.get('offsetBottom'),
offsetTop = this.get('offsetTop'),
targetRegion;
if ((offsetTop >= 0) && (offsetTop >= scrollY)) {
this._handleAffixEvent(A.Affix.EVENTS.TOP);
return;
}
targetRegion = this.get('target').get('region');
if ((offsetBottom >= 0) && ((A.DOM.docHeight() - A.DOM.winHeight() - offsetBottom) <= targetRegion.bottom)) {
this._handleAffixEvent(A.Affix.EVENTS.BOTTOM);
return;
}
this._handleAffixEvent(A.Affix.EVENTS.DEFAULT);
},
/**
* Call the refresh method after changing the offset
*
* @method _afterOffsetChange
* @private
*/
_afterOffsetChange: function() {
this.refresh();
},
/**
* Affix bottom position syncing callback function.
*
* @method _defAffixBottomFn
* @private
*/
_defAffixBottomFn: function() {
this._syncClassesUI(A.Affix.EVENTS.BOTTOM);
},
/**
* Affix default position syncing callback function.
*
* @method _defAffixFn
* @private
*/
_defAffixFn: function() {
this._syncClassesUI(A.Affix.EVENTS.DEFAULT);
},
/**
* Affix top position syncing callback function.
*
* @method _defAffixTopFn
* @private
*/
_defAffixTopFn: function() {
this._syncClassesUI(A.Affix.EVENTS.TOP);
},
/**
* Get the `offset` attribute.
*
* @method _getOffset
* @param {Number | Number.NEGATIVE_INFINITY} val
* @protected
*/
_getOffset: function(val) {
if (A.Lang.isFunction(val)) {
val = val.call(this);
}
return val;
},
/**
* Safeguard function for firing the affix change event only when necessary.
*
* @method _handleAffixEvent
* @param {String} Position value, could be 'bottom', 'default' or 'top'.
* @private
*/
_handleAffixEvent: function(position) {
if (position !== this._lastPosition) {
this.fire(position);
}
},
/**
* Scroll event listener function.
*
* @method _onScroll
* @private
*/
_onScroll: function() {
this.refresh();
},
/**
* Sync the target element class based on the affix positioning.
*
* @method _syncClassesUI
* @param {String} Position value, could be 'bottom', 'default' or 'top'.
* @private
*/
_syncClassesUI: function(position) {
var target = this.get('target');
target.toggleClass(A.Affix.CSS_CLASSES.BOTTOM, position === A.Affix.EVENTS.BOTTOM);
target.toggleClass(A.Affix.CSS_CLASSES.DEFAULT, position === A.Affix.EVENTS.DEFAULT);
target.toggleClass(A.Affix.CSS_CLASSES.TOP, position === A.Affix.EVENTS.TOP);
this._lastPosition = position;
},
/**
* Validate the offset type.
*
* @method _validateOffset
* @param {Function | Number | Number.NEGATIVE_INFINITY} val
*/
_validateOffset: function(val) {
if (A.Lang.isFunction(val)) {
val = val.call(this);
}
return A.Lang.isNumber(val) ||
A.Lang.isFunction(val) ||
(val === Number.NEGATIVE_INFINITY);
}
}, {
ATTRS: {
/**
* Defines the bottom offset.
*
* @attribute offsetBottom
* @type {Function | Number}
*/
offsetBottom: {
getter: '_getOffset',
validator: '_validateOffset',
value: Number.NEGATIVE_INFINITY
},
/**
* Defines the top offset.
*
* @attribute offsetTop
* @type {Function | Number}
*/
offsetTop: {
getter: '_getOffset',
validator: '_validateOffset',
value: Number.NEGATIVE_INFINITY
},
/**
* Defines the target element.
*
* @attribute target
* @type {Node | String}
*/
target: {
setter: A.one
}
},
/**
* Map of events containing `BOTTOM`, `DEFAULT` or `TOP` keys.
*
* @type {Object}
*/
EVENTS: {
BOTTOM: 'bottom',
DEFAULT: 'default',
TOP: 'top'
},
/**
* Map of class names containing `BOTTOM`, `DEFAULT` or `TOP` keys.
*
* @type {Object}
*/
CSS_CLASSES: {
BOTTOM: A.getClassName('affix', 'bottom'),
DEFAULT: A.getClassName('affix'),
TOP: A.getClassName('affix', 'top')
}
});
| adorjan/alloy-ui | src/aui-affix/js/aui-affix.js | JavaScript | bsd-3-clause | 6,261 |
<?php
declare(strict_types=1);
class MonthField extends CustomField
{
public function render($value = null): string
{
ViewBag::set("field", $this);
ViewBag::set("field_value", $value);
ViewBag::set("field_name", !is_null($this->contentType) ?
$this->contentType . "_" . $this->name : $this->name);
return Template::executeDefaultOrOwnTemplate("fields/monthfield.php");
}
}
| derUli/ulicms | ulicms/classes/objects/content/types/fields/MonthField.php | PHP | bsd-3-clause | 444 |
class Spree::InquiryMailer < ActionMailer::Base
default from: "from@example.com"
end
| shimoyamadaniel/spree_contact_us | app/mailers/spree/inquiry_mailer.rb | Ruby | bsd-3-clause | 87 |
<?php
namespace frontend\controllers;
use common\models\Order;
use common\models\OrderDetail;
use common\models\Product;
use common\models\Subcriber;
use frontend\models\Cart;
use Yii;
use yii\web\Controller;
use yii\helpers\Json;
/**
* Product Controller
*/
class ProductController extends Controller
{
public function actionIndex()
{
$product = Product::find()->andWhere(['status'=>Product::STATUS_ACTIVE])->all();
return $this->render('index',[
'product'=>$product
]);
}
public function actionDetail($id)
{
$product = Product::findOne(['id'=>$id]);
return $this->render('view',[
'product'=>$product
]);
}
public function actionAddCart($id){
$productInfo = Product::findOne($id);
// echo "<pre>";print_r($productInfo);die();
$cart = new Cart();
$cart->addCart($id,$productInfo);
$session = Yii::$app->session;
$cartInfo = $session['cart'];
$totalAmount = $total=0;
foreach($cartInfo as $key => $value){
$totalAmount += $value['amount'];
// if($value['sale'] == 0) {
// $total += $value['price'] * $value['amount'];
// }else {
// $sal = ($value['price']*(100-$value['sale']))/100;
// $total += $sal * $value['amount'];
// }
}
return $this->renderAjax('cart',['cartInfo'=>$totalAmount]);
}
public function actionListMyCart(){
$session = Yii::$app->session;
$cart = $session['cart'];
// echo "<pre>";print_r($cart);die();
$totalAmount = $total_all=0;
if(isset($cart)){
foreach($cart as $key => $value){
$totalAmount += $value['amount'];
$total_all += $value['price'] * $value['amount'];
}
}
return $this->render('list-my-cart',['cart'=>$cart,'total_all'=>$total_all,'totalAmount'=>$totalAmount]);
}
public function actionUpdateCart($id,$amount){
$cart = new Cart();
$cart->updateItem($id,$amount);
$session = Yii::$app->session;
$cartInfo = $session['cart'];
$totalAmount = $total=0;
foreach($cartInfo as $key => $value){
$totalAmount += $value['amount'];
}
return $this->renderAjax('cart',['cartInfo'=>$totalAmount]);
}
public function actionDelCart($id){
$cart = new Cart();
$cart->deleteItem($id);
$session = Yii::$app->session;
$cartInfo = $session['cart'];
$totalAmount = $total=0;
foreach($cartInfo as $key => $value){
$totalAmount += $value['amount'];
}
return $this->renderAjax('cart',['cartInfo'=>$totalAmount]);
}
public function actionSaveBuy()
{
$name = $_POST['name'];
$phone = $_POST['phone'];
$address = $_POST['address'];
$state = $_POST['state'];
// save table subcriber
$sub = new Subcriber();
$sub->phone = $phone;
$sub->name = $name;
$sub->state = $state;
$sub->status = Subcriber::STATUS_ORDER;
$sub->address = $address;
if($sub->save()){
// save table order
$id_sub = $sub->id;
$session = Yii::$app->session;
$cart = $session['cart'];
// echo "<pre>";print_r($cart);die();
$totalAmount = $total=0;
foreach($cart as $key => $value){
$totalAmount += $value['amount'];
$total += $value['price'] * $value['amount'];
}
$order = new Order();
$order->phone = $phone;
$order->number = $totalAmount;
$order->address = $address;
$order->id_sub = $id_sub;
$order->status = Order::STATUS_TP;
$order->total = $total;
if($order->save()){
$order_id = $order->id;
foreach ($cart as $key => $value) {
$total_one = $value['amount']*$value['price'];
$order_detail = new OrderDetail();
$order_detail->order_id = $order_id;
$order_detail->product_id = $value['id'];
$order_detail->price = $value["price"];
$order_detail->number = $value["amount"];
$order_detail->total = $total_one;
$order_detail->price_sale = null;
if(!$order_detail->save()){
$message = 'Đặt hàng không thành công. không lưu thành công chi tiết đơn hàng!';
return Json::encode(['success' => false, 'message' => $message]);
}
}
$session->remove('cart');
$message = 'Đặt hàng thành công, bộ phận chăm sóc khách hàng sẽ gọi lại để xác nhận đơn hàng.';
return Json::encode(['success' => true, 'message' => $message]);
}else{
$message = 'Đặt hàng không thành công vui lòng thử lại.';
return Json::encode(['success' => false, 'message' => $message]);
}
}else{
$message = 'Đặt hàng không thành công vui lòng thử lại.';
return Json::encode(['success' => false, 'message' => $message]);
}
}
public function actionSaveSub(){
$phone = $_POST['phone'];
$name = '';
$message = '';
if(isset($_POST['name'])){
$name = $_POST['name']?$_POST['name']:'';
$message = $_POST['message']?$_POST['message']:'';
}
$sub = new Subcriber();
$sub->phone = $phone;
$sub->name = $name;
$sub->state = $message;
$sub->status = Subcriber::STATUS_NOTCALL;
if($sub->save()){
if($sub->name == ''){
$message = 'Đã yêu cầu gọi lại thành công, bộ phận chăm sóc khách hàng sẽ gọi lại cho quý khách sau ít phút.';
}else{
$message = 'Đã gửi yêu cầu thành công, bộ phận chăm sóc khách hàng sẽ liên hệ lại cho quý khách sớm nhất.';
}
return Json::encode(['success' => true, 'message' => $message]);
}else{
$message = 'Yêu cầu chưa được thực hiện vui lòng thử lại.';
return Json::encode(['success' => false, 'message' => $message]);
}
}
}
| tuanpv1/drugstore | frontend/controllers/ProductController.php | PHP | bsd-3-clause | 6,595 |
<?php
use yii\helpers\Html;
use yii\grid\GridView;
/* @var $this yii\web\View */
/* @var $searchModel backend\models\UserSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = 'Управление пользователями';
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="user-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<div class="col-md-8">
<?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'showHeader' => false,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
'username',
'fio',
['class' => 'yii\grid\ActionColumn',
'headerOptions' => ['width' => '80'],
'template' => '{update} - {view}',
],
],
]); ?>
</div>
<div class="col-md-4">
<p>
<?= Html::a('Создать пользователя', ['create'], ['class' => 'btn btn-success']) ?>
</p>
</div>
</div> | N1kolayS/PCExpert | backend/views/user/index.php | PHP | bsd-3-clause | 1,178 |
export default function(path, width, height) {
this._images.push({path:path, width: width, height:height})
return this;
};
| magjac/d3-graphviz | src/images.js | JavaScript | bsd-3-clause | 133 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/task/thread_pool/task_tracker.h"
#include <atomic>
#include <string>
#include <vector>
#include "base/base_switches.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/compiler_specific.h"
#include "base/json/json_writer.h"
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/optional.h"
#include "base/sequence_token.h"
#include "base/synchronization/condition_variable.h"
#include "base/task/scoped_set_task_priority_for_current_thread.h"
#include "base/task/task_executor.h"
#include "base/threading/sequence_local_storage_map.h"
#include "base/threading/sequenced_task_runner_handle.h"
#include "base/threading/thread_restrictions.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "base/trace_event/trace_event.h"
#include "base/values.h"
#include "build/build_config.h"
namespace base {
namespace internal {
namespace {
constexpr const char* kExecutionModeString[] = {"parallel", "sequenced",
"single thread", "job"};
static_assert(
size(kExecutionModeString) ==
static_cast<size_t>(TaskSourceExecutionMode::kMax) + 1,
"Array kExecutionModeString is out of sync with TaskSourceExecutionMode.");
// An immutable copy of a thread pool task's info required by tracing.
class TaskTracingInfo : public trace_event::ConvertableToTraceFormat {
public:
TaskTracingInfo(const TaskTraits& task_traits,
const char* execution_mode,
const SequenceToken& sequence_token)
: task_traits_(task_traits),
execution_mode_(execution_mode),
sequence_token_(sequence_token) {}
// trace_event::ConvertableToTraceFormat implementation.
void AppendAsTraceFormat(std::string* out) const override;
private:
const TaskTraits task_traits_;
const char* const execution_mode_;
const SequenceToken sequence_token_;
DISALLOW_COPY_AND_ASSIGN(TaskTracingInfo);
};
void TaskTracingInfo::AppendAsTraceFormat(std::string* out) const {
DictionaryValue dict;
dict.SetStringKey("task_priority",
base::TaskPriorityToString(task_traits_.priority()));
dict.SetStringKey("execution_mode", execution_mode_);
if (sequence_token_.IsValid())
dict.SetIntKey("sequence_token", sequence_token_.ToInternalValue());
std::string tmp;
JSONWriter::Write(dict, &tmp);
out->append(tmp);
}
// Constructs a histogram to track latency which is logging to
// "ThreadPool.{histogram_name}.{histogram_label}.{task_type_suffix}".
HistogramBase* GetLatencyHistogram(StringPiece histogram_name,
StringPiece histogram_label,
StringPiece task_type_suffix) {
DCHECK(!histogram_name.empty());
DCHECK(!task_type_suffix.empty());
if (histogram_label.empty())
return nullptr;
// Mimics the UMA_HISTOGRAM_HIGH_RESOLUTION_CUSTOM_TIMES macro. The minimums
// and maximums were chosen to place the 1ms mark at around the 70% range
// coverage for buckets giving us good info for tasks that have a latency
// below 1ms (most of them) and enough info to assess how bad the latency is
// for tasks that exceed this threshold.
const std::string histogram = JoinString(
{"ThreadPool", histogram_name, histogram_label, task_type_suffix}, ".");
return Histogram::FactoryMicrosecondsTimeGet(
histogram, TimeDelta::FromMicroseconds(1),
TimeDelta::FromMilliseconds(20), 50,
HistogramBase::kUmaTargetedHistogramFlag);
}
// Constructs a histogram to track task count which is logging to
// "ThreadPool.{histogram_name}.{histogram_label}.{task_type_suffix}".
HistogramBase* GetCountHistogram(StringPiece histogram_name,
StringPiece histogram_label,
StringPiece task_type_suffix) {
DCHECK(!histogram_name.empty());
DCHECK(!task_type_suffix.empty());
if (histogram_label.empty())
return nullptr;
// Mimics the UMA_HISTOGRAM_CUSTOM_COUNTS macro.
const std::string histogram = JoinString(
{"ThreadPool", histogram_name, histogram_label, task_type_suffix}, ".");
// 500 was chosen as the maximum number of tasks run while queuing because
// values this high would likely indicate an error, beyond which knowing the
// actual number of tasks is not informative.
return Histogram::FactoryGet(histogram, 1, 500, 50,
HistogramBase::kUmaTargetedHistogramFlag);
}
// Returns a histogram stored in an array indexed by task priority.
// TODO(jessemckenna): use the STATIC_HISTOGRAM_POINTER_GROUP macro from
// histogram_macros.h instead.
HistogramBase* GetHistogramForTaskPriority(TaskPriority task_priority,
HistogramBase* const histograms[3]) {
return histograms[static_cast<int>(task_priority)];
}
bool HasLogBestEffortTasksSwitch() {
// The CommandLine might not be initialized if ThreadPool is initialized in a
// dynamic library which doesn't have access to argc/argv.
return CommandLine::InitializedForCurrentProcess() &&
CommandLine::ForCurrentProcess()->HasSwitch(
switches::kLogBestEffortTasks);
}
// Needed for PostTaskHere and CurrentThread. This executor lives for the
// duration of a threadpool task invocation.
class EphemeralTaskExecutor : public TaskExecutor {
public:
// |sequenced_task_runner| and |single_thread_task_runner| must outlive this
// EphemeralTaskExecutor.
EphemeralTaskExecutor(SequencedTaskRunner* sequenced_task_runner,
SingleThreadTaskRunner* single_thread_task_runner,
const TaskTraits* sequence_traits)
: sequenced_task_runner_(sequenced_task_runner),
single_thread_task_runner_(single_thread_task_runner),
sequence_traits_(sequence_traits) {
SetTaskExecutorForCurrentThread(this);
}
~EphemeralTaskExecutor() override {
SetTaskExecutorForCurrentThread(nullptr);
}
// TaskExecutor:
bool PostDelayedTask(const Location& from_here,
const TaskTraits& traits,
OnceClosure task,
TimeDelta delay) override {
CheckTraitsCompatibleWithSequenceTraits(traits);
return sequenced_task_runner_->PostDelayedTask(from_here, std::move(task),
delay);
}
scoped_refptr<TaskRunner> CreateTaskRunner(
const TaskTraits& traits) override {
CheckTraitsCompatibleWithSequenceTraits(traits);
return sequenced_task_runner_;
}
scoped_refptr<SequencedTaskRunner> CreateSequencedTaskRunner(
const TaskTraits& traits) override {
CheckTraitsCompatibleWithSequenceTraits(traits);
return sequenced_task_runner_;
}
scoped_refptr<SingleThreadTaskRunner> CreateSingleThreadTaskRunner(
const TaskTraits& traits,
SingleThreadTaskRunnerThreadMode thread_mode) override {
CheckTraitsCompatibleWithSequenceTraits(traits);
return single_thread_task_runner_;
}
#if defined(OS_WIN)
scoped_refptr<SingleThreadTaskRunner> CreateCOMSTATaskRunner(
const TaskTraits& traits,
SingleThreadTaskRunnerThreadMode thread_mode) override {
CheckTraitsCompatibleWithSequenceTraits(traits);
return single_thread_task_runner_;
}
#endif // defined(OS_WIN)
private:
// Currently ignores |traits.priority()|.
void CheckTraitsCompatibleWithSequenceTraits(const TaskTraits& traits) {
if (traits.shutdown_behavior_set_explicitly()) {
DCHECK_EQ(traits.shutdown_behavior(),
sequence_traits_->shutdown_behavior());
}
DCHECK(!traits.may_block() ||
traits.may_block() == sequence_traits_->may_block());
DCHECK(!traits.with_base_sync_primitives() ||
traits.with_base_sync_primitives() ==
sequence_traits_->with_base_sync_primitives());
}
SequencedTaskRunner* const sequenced_task_runner_;
SingleThreadTaskRunner* const single_thread_task_runner_;
const TaskTraits* const sequence_traits_;
};
} // namespace
// Atomic internal state used by TaskTracker to track items that are blocking
// Shutdown. An "item" consist of either:
// - A running SKIP_ON_SHUTDOWN task
// - A queued/running BLOCK_SHUTDOWN TaskSource.
// Sequential consistency shouldn't be assumed from these calls (i.e. a thread
// reading |HasShutdownStarted() == true| isn't guaranteed to see all writes
// made before |StartShutdown()| on the thread that invoked it).
class TaskTracker::State {
public:
State() = default;
// Sets a flag indicating that shutdown has started. Returns true if there are
// items blocking shutdown. Can only be called once.
bool StartShutdown() {
const auto new_value =
subtle::NoBarrier_AtomicIncrement(&bits_, kShutdownHasStartedMask);
// Check that the "shutdown has started" bit isn't zero. This would happen
// if it was incremented twice.
DCHECK(new_value & kShutdownHasStartedMask);
const auto num_items_blocking_shutdown =
new_value >> kNumItemsBlockingShutdownBitOffset;
return num_items_blocking_shutdown != 0;
}
// Returns true if shutdown has started.
bool HasShutdownStarted() const {
return subtle::NoBarrier_Load(&bits_) & kShutdownHasStartedMask;
}
// Returns true if there are items blocking shutdown.
bool AreItemsBlockingShutdown() const {
const auto num_items_blocking_shutdown =
subtle::NoBarrier_Load(&bits_) >> kNumItemsBlockingShutdownBitOffset;
DCHECK_GE(num_items_blocking_shutdown, 0);
return num_items_blocking_shutdown != 0;
}
// Increments the number of items blocking shutdown. Returns true if
// shutdown has started.
bool IncrementNumItemsBlockingShutdown() {
#if DCHECK_IS_ON()
// Verify that no overflow will occur.
const auto num_items_blocking_shutdown =
subtle::NoBarrier_Load(&bits_) >> kNumItemsBlockingShutdownBitOffset;
DCHECK_LT(num_items_blocking_shutdown,
std::numeric_limits<subtle::Atomic32>::max() -
kNumItemsBlockingShutdownIncrement);
#endif
const auto new_bits = subtle::NoBarrier_AtomicIncrement(
&bits_, kNumItemsBlockingShutdownIncrement);
return new_bits & kShutdownHasStartedMask;
}
// Decrements the number of items blocking shutdown. Returns true if shutdown
// has started and the number of tasks blocking shutdown becomes zero.
bool DecrementNumItemsBlockingShutdown() {
const auto new_bits = subtle::NoBarrier_AtomicIncrement(
&bits_, -kNumItemsBlockingShutdownIncrement);
const bool shutdown_has_started = new_bits & kShutdownHasStartedMask;
const auto num_items_blocking_shutdown =
new_bits >> kNumItemsBlockingShutdownBitOffset;
DCHECK_GE(num_items_blocking_shutdown, 0);
return shutdown_has_started && num_items_blocking_shutdown == 0;
}
private:
static constexpr subtle::Atomic32 kShutdownHasStartedMask = 1;
static constexpr subtle::Atomic32 kNumItemsBlockingShutdownBitOffset = 1;
static constexpr subtle::Atomic32 kNumItemsBlockingShutdownIncrement =
1 << kNumItemsBlockingShutdownBitOffset;
// The LSB indicates whether shutdown has started. The other bits count the
// number of items blocking shutdown.
// No barriers are required to read/write |bits_| as this class is only used
// as an atomic state checker, it doesn't provide sequential consistency
// guarantees w.r.t. external state. Sequencing of the TaskTracker::State
// operations themselves is guaranteed by the AtomicIncrement RMW (read-
// modify-write) semantics however. For example, if two threads are racing to
// call IncrementNumItemsBlockingShutdown() and StartShutdown() respectively,
// either the first thread will win and the StartShutdown() call will see the
// blocking task or the second thread will win and
// IncrementNumItemsBlockingShutdown() will know that shutdown has started.
subtle::Atomic32 bits_ = 0;
DISALLOW_COPY_AND_ASSIGN(State);
};
// TODO(jessemckenna): Write a helper function to avoid code duplication below.
TaskTracker::TaskTracker(StringPiece histogram_label)
: histogram_label_(histogram_label),
has_log_best_effort_tasks_switch_(HasLogBestEffortTasksSwitch()),
state_(new State),
can_run_policy_(CanRunPolicy::kAll),
flush_cv_(flush_lock_.CreateConditionVariable()),
shutdown_lock_(&flush_lock_),
task_latency_histograms_{GetLatencyHistogram("TaskLatencyMicroseconds",
histogram_label,
"BackgroundTaskPriority"),
GetLatencyHistogram("TaskLatencyMicroseconds",
histogram_label,
"UserVisibleTaskPriority"),
GetLatencyHistogram("TaskLatencyMicroseconds",
histogram_label,
"UserBlockingTaskPriority")},
heartbeat_latency_histograms_{
GetLatencyHistogram("HeartbeatLatencyMicroseconds",
histogram_label,
"BackgroundTaskPriority"),
GetLatencyHistogram("HeartbeatLatencyMicroseconds",
histogram_label,
"UserVisibleTaskPriority"),
GetLatencyHistogram("HeartbeatLatencyMicroseconds",
histogram_label,
"UserBlockingTaskPriority")},
num_tasks_run_while_queuing_histograms_{
GetCountHistogram("NumTasksRunWhileQueuing",
histogram_label,
"BackgroundTaskPriority"),
GetCountHistogram("NumTasksRunWhileQueuing",
histogram_label,
"UserVisibleTaskPriority"),
GetCountHistogram("NumTasksRunWhileQueuing",
histogram_label,
"UserBlockingTaskPriority")},
tracked_ref_factory_(this) {}
TaskTracker::~TaskTracker() = default;
void TaskTracker::StartShutdown() {
CheckedAutoLock auto_lock(shutdown_lock_);
// This method can only be called once.
DCHECK(!shutdown_event_);
DCHECK(!state_->HasShutdownStarted());
shutdown_event_ = std::make_unique<WaitableEvent>();
const bool tasks_are_blocking_shutdown = state_->StartShutdown();
// From now, if a thread causes the number of tasks blocking shutdown to
// become zero, it will call OnBlockingShutdownTasksComplete().
if (!tasks_are_blocking_shutdown) {
// If another thread posts a BLOCK_SHUTDOWN task at this moment, it will
// block until this method releases |shutdown_lock_|. Then, it will fail
// DCHECK(!shutdown_event_->IsSignaled()). This is the desired behavior
// because posting a BLOCK_SHUTDOWN task after StartShutdown() when no
// tasks are blocking shutdown isn't allowed.
shutdown_event_->Signal();
return;
}
}
void TaskTracker::CompleteShutdown() {
// It is safe to access |shutdown_event_| without holding |lock_| because the
// pointer never changes after being set by StartShutdown(), which must
// happen-before before this.
DCHECK(TS_UNCHECKED_READ(shutdown_event_));
{
base::ScopedAllowBaseSyncPrimitives allow_wait;
TS_UNCHECKED_READ(shutdown_event_)->Wait();
}
// Unblock FlushForTesting() and perform the FlushAsyncForTesting callback
// when shutdown completes.
{
CheckedAutoLock auto_lock(flush_lock_);
flush_cv_->Signal();
}
CallFlushCallbackForTesting();
}
void TaskTracker::FlushForTesting() {
CheckedAutoLock auto_lock(flush_lock_);
while (num_incomplete_task_sources_.load(std::memory_order_acquire) != 0 &&
!IsShutdownComplete()) {
flush_cv_->Wait();
}
}
void TaskTracker::FlushAsyncForTesting(OnceClosure flush_callback) {
DCHECK(flush_callback);
{
CheckedAutoLock auto_lock(flush_lock_);
DCHECK(!flush_callback_for_testing_)
<< "Only one FlushAsyncForTesting() may be pending at any time.";
flush_callback_for_testing_ = std::move(flush_callback);
}
if (num_incomplete_task_sources_.load(std::memory_order_acquire) == 0 ||
IsShutdownComplete()) {
CallFlushCallbackForTesting();
}
}
void TaskTracker::SetCanRunPolicy(CanRunPolicy can_run_policy) {
can_run_policy_.store(can_run_policy);
}
bool TaskTracker::WillPostTask(Task* task,
TaskShutdownBehavior shutdown_behavior) {
DCHECK(task);
DCHECK(task->task);
if (state_->HasShutdownStarted()) {
// A non BLOCK_SHUTDOWN task is allowed to be posted iff shutdown hasn't
// started and the task is not delayed.
if (shutdown_behavior != TaskShutdownBehavior::BLOCK_SHUTDOWN ||
!task->delayed_run_time.is_null()) {
return false;
}
// A BLOCK_SHUTDOWN task posted after shutdown has completed is an
// ordering bug. This aims to catch those early.
CheckedAutoLock auto_lock(shutdown_lock_);
DCHECK(shutdown_event_);
DCHECK(!shutdown_event_->IsSignaled());
}
// TODO(scheduler-dev): Record the task traits here.
task_annotator_.WillQueueTask("ThreadPool_PostTask", task, "");
return true;
}
bool TaskTracker::WillPostTaskNow(const Task& task, TaskPriority priority) {
if (!task.delayed_run_time.is_null() && state_->HasShutdownStarted())
return false;
if (has_log_best_effort_tasks_switch_ &&
priority == TaskPriority::BEST_EFFORT) {
// A TaskPriority::BEST_EFFORT task is being posted.
LOG(INFO) << task.posted_from.ToString();
}
return true;
}
RegisteredTaskSource TaskTracker::RegisterTaskSource(
scoped_refptr<TaskSource> task_source) {
DCHECK(task_source);
TaskShutdownBehavior shutdown_behavior = task_source->shutdown_behavior();
if (!BeforeQueueTaskSource(shutdown_behavior))
return nullptr;
num_incomplete_task_sources_.fetch_add(1, std::memory_order_relaxed);
return RegisteredTaskSource(std::move(task_source), this);
}
bool TaskTracker::CanRunPriority(TaskPriority priority) const {
auto can_run_policy = can_run_policy_.load();
if (can_run_policy == CanRunPolicy::kAll)
return true;
if (can_run_policy == CanRunPolicy::kForegroundOnly &&
priority >= TaskPriority::USER_VISIBLE) {
return true;
}
return false;
}
RegisteredTaskSource TaskTracker::RunAndPopNextTask(
RegisteredTaskSource task_source) {
DCHECK(task_source);
const bool should_run_tasks = BeforeRunTask(task_source->shutdown_behavior());
// Run the next task in |task_source|.
Optional<Task> task;
TaskTraits traits;
{
auto transaction = task_source->BeginTransaction();
task = should_run_tasks ? task_source.TakeTask(&transaction)
: task_source.Clear(&transaction);
traits = transaction.traits();
}
if (task) {
// Run the |task| (whether it's a worker task or the Clear() closure).
RunTask(std::move(task.value()), task_source.get(), traits);
}
if (should_run_tasks)
AfterRunTask(task_source->shutdown_behavior());
const bool task_source_must_be_queued = task_source.DidProcessTask();
// |task_source| should be reenqueued iff requested by DidProcessTask().
if (task_source_must_be_queued)
return task_source;
return nullptr;
}
bool TaskTracker::HasShutdownStarted() const {
return state_->HasShutdownStarted();
}
bool TaskTracker::IsShutdownComplete() const {
CheckedAutoLock auto_lock(shutdown_lock_);
return shutdown_event_ && shutdown_event_->IsSignaled();
}
void TaskTracker::RecordLatencyHistogram(TaskPriority priority,
TimeTicks posted_time) const {
if (histogram_label_.empty())
return;
const TimeDelta task_latency = TimeTicks::Now() - posted_time;
GetHistogramForTaskPriority(priority, task_latency_histograms_)
->AddTimeMicrosecondsGranularity(task_latency);
}
void TaskTracker::RecordHeartbeatLatencyAndTasksRunWhileQueuingHistograms(
TaskPriority priority,
TimeTicks posted_time,
int num_tasks_run_when_posted) const {
if (histogram_label_.empty())
return;
const TimeDelta task_latency = TimeTicks::Now() - posted_time;
GetHistogramForTaskPriority(priority, heartbeat_latency_histograms_)
->AddTimeMicrosecondsGranularity(task_latency);
GetHistogramForTaskPriority(priority, num_tasks_run_while_queuing_histograms_)
->Add(GetNumTasksRun() - num_tasks_run_when_posted);
}
int TaskTracker::GetNumTasksRun() const {
return num_tasks_run_.load(std::memory_order_relaxed);
}
void TaskTracker::IncrementNumTasksRun() {
num_tasks_run_.fetch_add(1, std::memory_order_relaxed);
}
void TaskTracker::RunTask(Task task,
TaskSource* task_source,
const TaskTraits& traits) {
DCHECK(task_source);
RecordLatencyHistogram(traits.priority(), task.queue_time);
const auto environment = task_source->GetExecutionEnvironment();
const bool previous_singleton_allowed =
ThreadRestrictions::SetSingletonAllowed(
traits.shutdown_behavior() !=
TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN);
const bool previous_io_allowed =
ThreadRestrictions::SetIOAllowed(traits.may_block());
const bool previous_wait_allowed =
ThreadRestrictions::SetWaitAllowed(traits.with_base_sync_primitives());
{
DCHECK(environment.token.IsValid());
ScopedSetSequenceTokenForCurrentThread
scoped_set_sequence_token_for_current_thread(environment.token);
ScopedSetTaskPriorityForCurrentThread
scoped_set_task_priority_for_current_thread(traits.priority());
// Local storage map used if none is provided by |environment|.
Optional<SequenceLocalStorageMap> local_storage_map;
if (!environment.sequence_local_storage)
local_storage_map.emplace();
ScopedSetSequenceLocalStorageMapForCurrentThread
scoped_set_sequence_local_storage_map_for_current_thread(
environment.sequence_local_storage
? environment.sequence_local_storage
: &local_storage_map.value());
// Set up TaskRunnerHandle as expected for the scope of the task.
Optional<SequencedTaskRunnerHandle> sequenced_task_runner_handle;
Optional<ThreadTaskRunnerHandle> single_thread_task_runner_handle;
Optional<EphemeralTaskExecutor> ephemeral_task_executor;
switch (task_source->execution_mode()) {
case TaskSourceExecutionMode::kJob:
case TaskSourceExecutionMode::kParallel:
break;
case TaskSourceExecutionMode::kSequenced:
DCHECK(task_source->task_runner());
sequenced_task_runner_handle.emplace(
static_cast<SequencedTaskRunner*>(task_source->task_runner()));
ephemeral_task_executor.emplace(
static_cast<SequencedTaskRunner*>(task_source->task_runner()),
nullptr, &traits);
break;
case TaskSourceExecutionMode::kSingleThread:
DCHECK(task_source->task_runner());
single_thread_task_runner_handle.emplace(
static_cast<SingleThreadTaskRunner*>(task_source->task_runner()));
ephemeral_task_executor.emplace(
static_cast<SequencedTaskRunner*>(task_source->task_runner()),
static_cast<SingleThreadTaskRunner*>(task_source->task_runner()),
&traits);
break;
}
TRACE_TASK_EXECUTION("ThreadPool_RunTask", task);
// TODO(gab): In a better world this would be tacked on as an extra arg
// to the trace event generated above. This is not possible however until
// http://crbug.com/652692 is resolved.
TRACE_EVENT1("thread_pool", "ThreadPool_TaskInfo", "task_info",
std::make_unique<TaskTracingInfo>(
traits,
kExecutionModeString[static_cast<size_t>(
task_source->execution_mode())],
environment.token));
RunTaskWithShutdownBehavior(traits.shutdown_behavior(), &task);
// Make sure the arguments bound to the callback are deleted within the
// scope in which the callback runs.
task.task = OnceClosure();
}
ThreadRestrictions::SetWaitAllowed(previous_wait_allowed);
ThreadRestrictions::SetIOAllowed(previous_io_allowed);
ThreadRestrictions::SetSingletonAllowed(previous_singleton_allowed);
}
bool TaskTracker::HasIncompleteTaskSourcesForTesting() const {
return num_incomplete_task_sources_.load(std::memory_order_acquire) != 0;
}
bool TaskTracker::BeforeQueueTaskSource(
TaskShutdownBehavior shutdown_behavior) {
if (shutdown_behavior == TaskShutdownBehavior::BLOCK_SHUTDOWN) {
// BLOCK_SHUTDOWN task sources block shutdown between the moment they are
// queued and the moment their last task completes its execution.
const bool shutdown_started = state_->IncrementNumItemsBlockingShutdown();
if (shutdown_started) {
// A BLOCK_SHUTDOWN task posted after shutdown has completed is an
// ordering bug. This aims to catch those early.
CheckedAutoLock auto_lock(shutdown_lock_);
DCHECK(shutdown_event_);
DCHECK(!shutdown_event_->IsSignaled());
}
return true;
}
// A non BLOCK_SHUTDOWN task is allowed to be posted iff shutdown hasn't
// started.
return !state_->HasShutdownStarted();
}
bool TaskTracker::BeforeRunTask(TaskShutdownBehavior shutdown_behavior) {
switch (shutdown_behavior) {
case TaskShutdownBehavior::BLOCK_SHUTDOWN: {
// The number of tasks blocking shutdown has been incremented when the
// task was posted.
DCHECK(state_->AreItemsBlockingShutdown());
// Trying to run a BLOCK_SHUTDOWN task after shutdown has completed is
// unexpected as it either shouldn't have been posted if shutdown
// completed or should be blocking shutdown if it was posted before it
// did.
DCHECK(!state_->HasShutdownStarted() || !IsShutdownComplete());
return true;
}
case TaskShutdownBehavior::SKIP_ON_SHUTDOWN: {
// SKIP_ON_SHUTDOWN tasks block shutdown while they are running.
const bool shutdown_started = state_->IncrementNumItemsBlockingShutdown();
if (shutdown_started) {
// The SKIP_ON_SHUTDOWN task isn't allowed to run during shutdown.
// Decrement the number of tasks blocking shutdown that was wrongly
// incremented.
DecrementNumItemsBlockingShutdown();
return false;
}
return true;
}
case TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN: {
return !state_->HasShutdownStarted();
}
}
NOTREACHED();
return false;
}
void TaskTracker::AfterRunTask(TaskShutdownBehavior shutdown_behavior) {
IncrementNumTasksRun();
if (shutdown_behavior == TaskShutdownBehavior::SKIP_ON_SHUTDOWN) {
DecrementNumItemsBlockingShutdown();
}
}
scoped_refptr<TaskSource> TaskTracker::UnregisterTaskSource(
scoped_refptr<TaskSource> task_source) {
DCHECK(task_source);
if (task_source->shutdown_behavior() ==
TaskShutdownBehavior::BLOCK_SHUTDOWN) {
DecrementNumItemsBlockingShutdown();
}
DecrementNumIncompleteTaskSources();
return task_source;
}
void TaskTracker::DecrementNumItemsBlockingShutdown() {
const bool shutdown_started_and_no_items_block_shutdown =
state_->DecrementNumItemsBlockingShutdown();
if (!shutdown_started_and_no_items_block_shutdown)
return;
CheckedAutoLock auto_lock(shutdown_lock_);
DCHECK(shutdown_event_);
shutdown_event_->Signal();
}
void TaskTracker::DecrementNumIncompleteTaskSources() {
const auto prev_num_incomplete_task_sources =
num_incomplete_task_sources_.fetch_sub(1);
DCHECK_GE(prev_num_incomplete_task_sources, 1);
if (prev_num_incomplete_task_sources == 1) {
{
CheckedAutoLock auto_lock(flush_lock_);
flush_cv_->Signal();
}
CallFlushCallbackForTesting();
}
}
void TaskTracker::CallFlushCallbackForTesting() {
OnceClosure flush_callback;
{
CheckedAutoLock auto_lock(flush_lock_);
flush_callback = std::move(flush_callback_for_testing_);
}
if (flush_callback)
std::move(flush_callback).Run();
}
NOINLINE void TaskTracker::RunContinueOnShutdown(Task* task) {
task_annotator_.RunTask("ThreadPool_RunTask_ContinueOnShutdown", task);
}
NOINLINE void TaskTracker::RunSkipOnShutdown(Task* task) {
task_annotator_.RunTask("ThreadPool_RunTask_SkipOnShutdown", task);
}
NOINLINE void TaskTracker::RunBlockShutdown(Task* task) {
task_annotator_.RunTask("ThreadPool_RunTask_BlockShutdown", task);
}
void TaskTracker::RunTaskWithShutdownBehavior(
TaskShutdownBehavior shutdown_behavior,
Task* task) {
switch (shutdown_behavior) {
case TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN:
RunContinueOnShutdown(task);
return;
case TaskShutdownBehavior::SKIP_ON_SHUTDOWN:
RunSkipOnShutdown(task);
return;
case TaskShutdownBehavior::BLOCK_SHUTDOWN:
RunBlockShutdown(task);
return;
}
}
} // namespace internal
} // namespace base
| endlessm/chromium-browser | base/task/thread_pool/task_tracker.cc | C++ | bsd-3-clause | 29,383 |
<div id="loi-giai">
<strong><p><u>Giải:</u></p></strong>
<p>Gọi ma trận đầu vào là ma trận $A$ vậy </p>
$$
A=<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
$$
<?php
$ketqua=[];
$nghichdao=[];
$dinhthuc =0;
if($hang==$cot){
$dinhthuc = $Thuvienchung->dinhthucthuong( $matran, $hang);
if($somu > 0){
?>
<p> và số mũ là <?=$somu?> </p>
<?php
if($somu>2){
$tongluythua2=$Thuvienchung->tongluythua2($somu);
$textmu='';
echo '$$ '.$somu.' = ';
$sophantumu=count($tongluythua2)-1;
for($i=0;$i<$sophantumu;$i++){
echo $tongluythua2[$i].' + ';
$textmu=$textmu.$tongluythua2[$i].' + ';
}
echo $tongluythua2[$i].'$$';
$textmu=$textmu.$tongluythua2[$i];
}
$ketqua = $Thuvienchung->luythuamatran($matran, $hang, $somu);
}else if($somu == 0){
?>
<p>Do số mũ bằng 0 nên ta thực hiện tính định thức </p>
$$
<?= $Thuvienchung->hienthimatran($matran,'vmatrix')?> =
<?= $dinhthuc?>
$$
<?php
if($dinhthuc!=0){
$ketqua= $Thuvienchung->matrandonvi($hang);
echo '<p>Định thức của $A$ khác 0 nên </p>';
}else{
?>
<div class="alert alert-danger" role="alert">
<?=Yii::t('app','Ma trận $A$ có định thức bằng 0 nên không '
. 'thể mũ 0.')?></div>
<?php
}
}if($somu < 0){
?>
<p>Do số mũ bé hơn 0 nên ta thực hiện tính định thức </p>
$$
det(A)= <?= $Thuvienchung->hienthimatran($matran,'vmatrix')?> =
<?= $dinhthuc?>
$$
<?php
if($dinhthuc!=0){
echo '<p>Định thức $A$ khác 0 nên ta tính ma trận nghịch đảo </p>';
$nghichdao =$Thuvienchung->matrannghichdao($matran, $hang);
$ketqua= $Thuvienchung->luythuamatran($nghichdao, $hang, -1*$somu);
?>
$$
A^{-1} = <?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>^{-1}
=<?= $Thuvienchung->hienthimatran($nghichdao,'pmatrix')?>
$$
<?php
}else{
?>
<div class="alert alert-danger" role="alert">
<?=Yii::t('app','Ma trận $A$ có định thức bằng 0 nên không '
. 'thể mũ một số âm.')?></div>
<?php
}
}
}else{
?>
<div class="alert alert-danger" role="alert">
<?=Yii::t('app','Ma trận $A$ không phải là ma trận vuông
nên không thực hiện được phép lũy thừa.')?></div>
<?php
}
if($ketqua&&$hang==$cot){
if($somu<0){
?>
<p> Kết quả </p>
$$
A^{<?=$somu?>}
=
\begin{pmatrix}
<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
^{-1}
\end{pmatrix}^{<?=-1*$somu?>}
=
<?= $Thuvienchung->hienthimatran($nghichdao,'pmatrix')?>
^{<?=-1*$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$$
</div>
<div id="dap-an">
<p> Kết quả </p>
$$
<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$$
</div>
<?php
}else if($somu<=2){
?>
<p> Kết quả </p>
$$
A^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$$
</div>
<div id="dap-an">
<p> Kết quả </p>
$$
<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$$
</div>
<?php
}else{
?>
<p>Vậy ta cần phải thực hiện bình phương liên tiếp từ $A$ đến $A^{<?=end($tongluythua2)?>}$ là </p>
<?php
$lt2=1;
while($lt2<end($tongluythua2)){
?>
$
A^{<?=$lt2*2?>}=(A^{<?=$lt2?>})^2
=
<?=$Thuvienchung->hienthimatran($Thuvienchung->
luythuamatran($matran, $hang, $lt2 )
,'pmatrix')
?>^2
=
<?=$Thuvienchung->hienthimatran($Thuvienchung->
luythuamatran($matran, $hang, $lt2*2 )
,'pmatrix')
?>
$
$$$$
<?php
$lt2 = $lt2 * 2;
}
?>
<p> Phép biến đổi là </p>
$
A^{<?=$somu?>}
=
A^{<?=$textmu?>}
=
<?php
for($i=0;$i<$sophantumu;$i++){
?>
A^{<?=$tongluythua2[$i]?>} \cdot
<?php
}
?>
A^{<?=$tongluythua2[$i]?>}\\
=
<?php
for($i=0;$i<$sophantumu;$i++){
?>
<?=$Thuvienchung->hienthimatran($Thuvienchung->
luythuamatran($matran, $hang, $tongluythua2[$i])
,'pmatrix')
?> \cdot
<?php
}
?>
<?=$Thuvienchung->hienthimatran($Thuvienchung->
luythuamatran($matran, $hang, $tongluythua2[$i])
,'pmatrix')
?>\\
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$
<hr>
<p> Vậy kết quả là </p>
$
A^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$
</div>
<div id="dap-an">
<p> Kết quả là </p>
$
<?= $Thuvienchung->hienthimatran($matran,'pmatrix')?>
^{<?=$somu?>}
=
<?= $Thuvienchung->hienthimatran($ketqua,'pmatrix');?>
$
</div>
<?php
}
}
?> | thuctoa/ungdungtoanpro | views/matran/luythua.php | PHP | bsd-3-clause | 5,742 |
<?php
/**
* CodeIgniter
*
* An open source application development framework for PHP
*
* This content is released under the MIT License (MIT)
*
* Copyright (c) 2014 - 2015, British Columbia Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @package CodeIgniter
* @author EllisLab Dev Team
* @copyright Copyright (c) 2008 - 2014, EllisLab, Inc. (http://ellislab.com/)
* @copyright Copyright (c) 2014 - 2015, British Columbia Institute of Technology (http://bcit.ca/)
* @license http://opensource.org/licenses/MIT MIT License
* @link http://codeigniter.com
* @since Version 1.0.0
* @filesource
*/
defined('BASEPATH') OR exit('No direct script access allowed');
/**
* Query Builder Class
*
* This is the platform-independent base Query Builder implementation class.
*
* @package CodeIgniter
* @subpackage Drivers
* @category Database
* @author EllisLab Dev Team
* @link http://codeigniter.com/user_guide/database/
*/
abstract class CI_DB_query_builder extends CI_DB_driver {
/**
* Return DELETE SQL flag
*
* @var bool
*/
protected $return_delete_sql = FALSE;
/**
* Reset DELETE data flag
*
* @var bool
*/
protected $reset_delete_data = FALSE;
/**
* QB SELECT data
*
* @var array
*/
protected $qb_select = array();
/**
* QB DISTINCT flag
*
* @var bool
*/
protected $qb_distinct = FALSE;
/**
* QB FROM data
*
* @var array
*/
protected $qb_from = array();
/**
* QB JOIN data
*
* @var array
*/
protected $qb_join = array();
/**
* QB WHERE data
*
* @var array
*/
protected $qb_where = array();
/**
* QB GROUP BY data
*
* @var array
*/
protected $qb_groupby = array();
/**
* QB HAVING data
*
* @var array
*/
protected $qb_having = array();
/**
* QB keys
*
* @var array
*/
protected $qb_keys = array();
/**
* QB LIMIT data
*
* @var int
*/
protected $qb_limit = FALSE;
/**
* QB OFFSET data
*
* @var int
*/
protected $qb_offset = FALSE;
/**
* QB ORDER BY data
*
* @var array
*/
protected $qb_orderby = array();
/**
* QB data sets
*
* @var array
*/
protected $qb_set = array();
/**
* QB aliased tables list
*
* @var array
*/
protected $qb_aliased_tables = array();
/**
* QB WHERE group started flag
*
* @var bool
*/
protected $qb_where_group_started = FALSE;
/**
* QB WHERE group count
*
* @var int
*/
protected $qb_where_group_count = 0;
// Query Builder Caching variables
/**
* QB Caching flag
*
* @var bool
*/
protected $qb_caching = FALSE;
/**
* QB Cache exists list
*
* @var array
*/
protected $qb_cache_exists = array();
/**
* QB Cache SELECT data
*
* @var array
*/
protected $qb_cache_select = array();
/**
* QB Cache FROM data
*
* @var array
*/
protected $qb_cache_from = array();
/**
* QB Cache JOIN data
*
* @var array
*/
protected $qb_cache_join = array();
/**
* QB Cache WHERE data
*
* @var array
*/
protected $qb_cache_where = array();
/**
* QB Cache GROUP BY data
*
* @var array
*/
protected $qb_cache_groupby = array();
/**
* QB Cache HAVING data
*
* @var array
*/
protected $qb_cache_having = array();
/**
* QB Cache ORDER BY data
*
* @var array
*/
protected $qb_cache_orderby = array();
/**
* QB Cache data sets
*
* @var array
*/
protected $qb_cache_set = array();
/**
* QB No Escape data
*
* @var array
*/
protected $qb_no_escape = array();
/**
* QB Cache No Escape data
*
* @var array
*/
protected $qb_cache_no_escape = array();
// --------------------------------------------------------------------
/**
* Select
*
* Generates the SELECT portion of the query
*
* @param string
* @param mixed
* @return CI_DB_query_builder
*/
public function select($select = '*', $escape = NULL)
{
if (is_string($select))
{
$select = explode(',', $select);
}
// If the escape value was not set, we will base it on the global setting
is_bool($escape) OR $escape = $this->_protect_identifiers;
foreach ($select as $val)
{
$val = trim($val);
if ($val !== '')
{
$this->qb_select[] = $val;
$this->qb_no_escape[] = $escape;
if ($this->qb_caching === TRUE)
{
$this->qb_cache_select[] = $val;
$this->qb_cache_exists[] = 'select';
$this->qb_cache_no_escape[] = $escape;
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Select Max
*
* Generates a SELECT MAX(field) portion of a query
*
* @param string the field
* @param string an alias
* @return CI_DB_query_builder
*/
public function select_max($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'MAX');
}
// --------------------------------------------------------------------
/**
* Select Min
*
* Generates a SELECT MIN(field) portion of a query
*
* @param string the field
* @param string an alias
* @return CI_DB_query_builder
*/
public function select_min($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'MIN');
}
// --------------------------------------------------------------------
/**
* Select Average
*
* Generates a SELECT AVG(field) portion of a query
*
* @param string the field
* @param string an alias
* @return CI_DB_query_builder
*/
public function select_avg($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'AVG');
}
// --------------------------------------------------------------------
/**
* Select Sum
*
* Generates a SELECT SUM(field) portion of a query
*
* @param string the field
* @param string an alias
* @return CI_DB_query_builder
*/
public function select_sum($select = '', $alias = '')
{
return $this->_max_min_avg_sum($select, $alias, 'SUM');
}
// --------------------------------------------------------------------
/**
* SELECT [MAX|MIN|AVG|SUM]()
*
* @used-by select_max()
* @used-by select_min()
* @used-by select_avg()
* @used-by select_sum()
*
* @param string $select Field name
* @param string $alias
* @param string $type
* @return CI_DB_query_builder
*/
protected function _max_min_avg_sum($select = '', $alias = '', $type = 'MAX')
{
if ( ! is_string($select) OR $select === '')
{
$this->display_error('db_invalid_query');
}
$type = strtoupper($type);
if ( ! in_array($type, array('MAX', 'MIN', 'AVG', 'SUM')))
{
show_error('Invalid function type: '.$type);
}
if ($alias === '')
{
$alias = $this->_create_alias_from_table(trim($select));
}
$sql = $type.'('.$this->protect_identifiers(trim($select)).') AS '.$this->escape_identifiers(trim($alias));
$this->qb_select[] = $sql;
$this->qb_no_escape[] = NULL;
if ($this->qb_caching === TRUE)
{
$this->qb_cache_select[] = $sql;
$this->qb_cache_exists[] = 'select';
}
return $this;
}
// --------------------------------------------------------------------
/**
* Determines the alias name based on the table
*
* @param string $item
* @return string
*/
protected function _create_alias_from_table($item)
{
if (strpos($item, '.') !== FALSE)
{
$item = explode('.', $item);
return end($item);
}
return $item;
}
// --------------------------------------------------------------------
/**
* DISTINCT
*
* Sets a flag which tells the query string compiler to add DISTINCT
*
* @param bool $val
* @return CI_DB_query_builder
*/
public function distinct($val = TRUE)
{
$this->qb_distinct = is_bool($val) ? $val : TRUE;
return $this;
}
// --------------------------------------------------------------------
/**
* From
*
* Generates the FROM portion of the query
*
* @param mixed $from can be a string or array
* @return CI_DB_query_builder
*/
public function from($from)
{
foreach ((array) $from as $val)
{
if (strpos($val, ',') !== FALSE)
{
foreach (explode(',', $val) as $v)
{
$v = trim($v);
$this->_track_aliases($v);
$this->qb_from[] = $v = $this->protect_identifiers($v, TRUE, NULL, FALSE);
if ($this->qb_caching === TRUE)
{
$this->qb_cache_from[] = $v;
$this->qb_cache_exists[] = 'from';
}
}
}
else
{
$val = trim($val);
// Extract any aliases that might exist. We use this information
// in the protect_identifiers to know whether to add a table prefix
$this->_track_aliases($val);
$this->qb_from[] = $val = $this->protect_identifiers($val, TRUE, NULL, FALSE);
if ($this->qb_caching === TRUE)
{
$this->qb_cache_from[] = $val;
$this->qb_cache_exists[] = 'from';
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* JOIN
*
* Generates the JOIN portion of the query
*
* @param string
* @param string the join condition
* @param string the type of join
* @param string whether not to try to escape identifiers
* @return CI_DB_query_builder
*/
public function join($table, $cond, $type = '', $escape = NULL)
{
if ($type !== '')
{
$type = strtoupper(trim($type));
if ( ! in_array($type, array('LEFT', 'RIGHT', 'OUTER', 'INNER', 'LEFT OUTER', 'RIGHT OUTER'), TRUE))
{
$type = '';
}
else
{
$type .= ' ';
}
}
// Extract any aliases that might exist. We use this information
// in the protect_identifiers to know whether to add a table prefix
$this->_track_aliases($table);
is_bool($escape) OR $escape = $this->_protect_identifiers;
// Split multiple conditions
if ($escape === TRUE && preg_match_all('/\sAND\s|\sOR\s/i', $cond, $m, PREG_OFFSET_CAPTURE))
{
$newcond = '';
$m[0][] = array('', strlen($cond));
for ($i = 0, $c = count($m[0]), $s = 0;
$i < $c;
$s = $m[0][$i][1] + strlen($m[0][$i][0]), $i++)
{
$temp = substr($cond, $s, ($m[0][$i][1] - $s));
$newcond .= preg_match("/([\[\]\w\.'-]+)(\s*[^\"\[`'\w]+\s*)(.+)/i", $temp, $match)
? $this->protect_identifiers($match[1]).$match[2].$this->protect_identifiers($match[3])
: $temp;
$newcond .= $m[0][$i][0];
}
$cond = ' ON '.$newcond;
}
// Split apart the condition and protect the identifiers
elseif ($escape === TRUE && preg_match("/([\[\]\w\.'-]+)(\s*[^\"\[`'\w]+\s*)(.+)/i", $cond, $match))
{
$cond = ' ON '.$this->protect_identifiers($match[1]).$match[2].$this->protect_identifiers($match[3]);
}
elseif ( ! $this->_has_operator($cond))
{
$cond = ' USING ('.($escape ? $this->escape_identifiers($cond) : $cond).')';
}
else
{
$cond = ' ON '.$cond;
}
// Do we want to escape the table name?
if ($escape === TRUE)
{
$table = $this->protect_identifiers($table, TRUE, NULL, FALSE);
}
// Assemble the JOIN statement
$this->qb_join[] = $join = $type.'JOIN '.$table.$cond;
if ($this->qb_caching === TRUE)
{
$this->qb_cache_join[] = $join;
$this->qb_cache_exists[] = 'join';
}
return $this;
}
// --------------------------------------------------------------------
/**
* WHERE
*
* Generates the WHERE portion of the query.
* Separates multiple calls with 'AND'.
*
* @param mixed
* @param mixed
* @param bool
* @return CI_DB_query_builder
*/
public function where($key, $value = NULL, $escape = NULL)
{
return $this->_wh('qb_where', $key, $value, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* OR WHERE
*
* Generates the WHERE portion of the query.
* Separates multiple calls with 'OR'.
*
* @param mixed
* @param mixed
* @param bool
* @return CI_DB_query_builder
*/
public function or_where($key, $value = NULL, $escape = NULL)
{
return $this->_wh('qb_where', $key, $value, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* WHERE, HAVING
*
* @used-by where()
* @used-by or_where()
* @used-by having()
* @used-by or_having()
*
* @param string $qb_key 'qb_where' or 'qb_having'
* @param mixed $key
* @param mixed $value
* @param string $type
* @param bool $escape
* @return CI_DB_query_builder
*/
protected function _wh($qb_key, $key, $value = NULL, $type = 'AND ', $escape = NULL)
{
$qb_cache_key = ($qb_key === 'qb_having') ? 'qb_cache_having' : 'qb_cache_where';
if ( ! is_array($key))
{
$key = array($key => $value);
}
// If the escape value was not set will base it on the global setting
is_bool($escape) OR $escape = $this->_protect_identifiers;
foreach ($key as $k => $v)
{
$prefix = (count($this->$qb_key) === 0 && count($this->$qb_cache_key) === 0)
? $this->_group_get_type('')
: $this->_group_get_type($type);
if ($v !== NULL)
{
if ($escape === TRUE)
{
$v = ' '.$this->escape($v);
}
if ( ! $this->_has_operator($k))
{
$k .= ' = ';
}
}
elseif ( ! $this->_has_operator($k))
{
// value appears not to have been set, assign the test to IS NULL
$k .= ' IS NULL';
}
elseif (preg_match('/\s*(!?=|<>|IS(?:\s+NOT)?)\s*$/i', $k, $match, PREG_OFFSET_CAPTURE))
{
$k = substr($k, 0, $match[0][1]).($match[1][0] === '=' ? ' IS NULL' : ' IS NOT NULL');
}
$this->{$qb_key}[] = array('condition' => $prefix.$k.$v, 'escape' => $escape);
if ($this->qb_caching === TRUE)
{
$this->{$qb_cache_key}[] = array('condition' => $prefix.$k.$v, 'escape' => $escape);
$this->qb_cache_exists[] = substr($qb_key, 3);
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* WHERE IN
*
* Generates a WHERE field IN('item', 'item') SQL query,
* joined with 'AND' if appropriate.
*
* @param string $key The field to search
* @param array $values The values searched on
* @param bool $escape
* @return CI_DB_query_builder
*/
public function where_in($key = NULL, $values = NULL, $escape = NULL)
{
return $this->_where_in($key, $values, FALSE, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* OR WHERE IN
*
* Generates a WHERE field IN('item', 'item') SQL query,
* joined with 'OR' if appropriate.
*
* @param string $key The field to search
* @param array $values The values searched on
* @param bool $escape
* @return CI_DB_query_builder
*/
public function or_where_in($key = NULL, $values = NULL, $escape = NULL)
{
return $this->_where_in($key, $values, FALSE, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* WHERE NOT IN
*
* Generates a WHERE field NOT IN('item', 'item') SQL query,
* joined with 'AND' if appropriate.
*
* @param string $key The field to search
* @param array $values The values searched on
* @param bool $escape
* @return CI_DB_query_builder
*/
public function where_not_in($key = NULL, $values = NULL, $escape = NULL)
{
return $this->_where_in($key, $values, TRUE, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* OR WHERE NOT IN
*
* Generates a WHERE field NOT IN('item', 'item') SQL query,
* joined with 'OR' if appropriate.
*
* @param string $key The field to search
* @param array $values The values searched on
* @param bool $escape
* @return CI_DB_query_builder
*/
public function or_where_not_in($key = NULL, $values = NULL, $escape = NULL)
{
return $this->_where_in($key, $values, TRUE, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* Internal WHERE IN
*
* @used-by where_in()
* @used-by or_where_in()
* @used-by where_not_in()
* @used-by or_where_not_in()
*
* @param string $key The field to search
* @param array $values The values searched on
* @param bool $not If the statement would be IN or NOT IN
* @param string $type
* @param bool $escape
* @return CI_DB_query_builder
*/
protected function _where_in($key = NULL, $values = NULL, $not = FALSE, $type = 'AND ', $escape = NULL)
{
if ($key === NULL OR $values === NULL)
{
return $this;
}
if ( ! is_array($values))
{
$values = array($values);
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
$not = ($not) ? ' NOT' : '';
$where_in = array();
foreach ($values as $value)
{
$where_in[] = $this->escape($value);
}
$prefix = (count($this->qb_where) === 0) ? $this->_group_get_type('') : $this->_group_get_type($type);
$where_in = array(
'condition' => $prefix.$key.$not.' IN('.implode(', ', $where_in).')',
'escape' => $escape
);
$this->qb_where[] = $where_in;
if ($this->qb_caching === TRUE)
{
$this->qb_cache_where[] = $where_in;
$this->qb_cache_exists[] = 'where';
}
return $this;
}
// --------------------------------------------------------------------
/**
* LIKE
*
* Generates a %LIKE% portion of the query.
* Separates multiple calls with 'AND'.
*
* @param mixed $field
* @param string $match
* @param string $side
* @param bool $escape
* @return CI_DB_query_builder
*/
public function like($field, $match = '', $side = 'both', $escape = NULL)
{
return $this->_like($field, $match, 'AND ', $side, '', $escape);
}
// --------------------------------------------------------------------
/**
* NOT LIKE
*
* Generates a NOT LIKE portion of the query.
* Separates multiple calls with 'AND'.
*
* @param mixed $field
* @param string $match
* @param string $side
* @param bool $escape
* @return CI_DB_query_builder
*/
public function not_like($field, $match = '', $side = 'both', $escape = NULL)
{
return $this->_like($field, $match, 'AND ', $side, 'NOT', $escape);
}
// --------------------------------------------------------------------
/**
* OR LIKE
*
* Generates a %LIKE% portion of the query.
* Separates multiple calls with 'OR'.
*
* @param mixed $field
* @param string $match
* @param string $side
* @param bool $escape
* @return CI_DB_query_builder
*/
public function or_like($field, $match = '', $side = 'both', $escape = NULL)
{
return $this->_like($field, $match, 'OR ', $side, '', $escape);
}
// --------------------------------------------------------------------
/**
* OR NOT LIKE
*
* Generates a NOT LIKE portion of the query.
* Separates multiple calls with 'OR'.
*
* @param mixed $field
* @param string $match
* @param string $side
* @param bool $escape
* @return CI_DB_query_builder
*/
public function or_not_like($field, $match = '', $side = 'both', $escape = NULL)
{
return $this->_like($field, $match, 'OR ', $side, 'NOT', $escape);
}
// --------------------------------------------------------------------
/**
* Internal LIKE
*
* @used-by like()
* @used-by or_like()
* @used-by not_like()
* @used-by or_not_like()
*
* @param mixed $field
* @param string $match
* @param string $type
* @param string $side
* @param string $not
* @param bool $escape
* @return CI_DB_query_builder
*/
protected function _like($field, $match = '', $type = 'AND ', $side = 'both', $not = '', $escape = NULL)
{
if ( ! is_array($field))
{
$field = array($field => $match);
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
foreach ($field as $k => $v)
{
$prefix = (count($this->qb_where) === 0 && count($this->qb_cache_where) === 0)
? $this->_group_get_type('') : $this->_group_get_type($type);
$v = $this->escape_like_str($v);
if ($side === 'none')
{
$like_statement = "{$prefix} {$k} {$not} LIKE '{$v}'";
}
elseif ($side === 'before')
{
$like_statement = "{$prefix} {$k} {$not} LIKE '%{$v}'";
}
elseif ($side === 'after')
{
$like_statement = "{$prefix} {$k} {$not} LIKE '{$v}%'";
}
else
{
$like_statement = "{$prefix} {$k} {$not} LIKE '%{$v}%'";
}
// some platforms require an escape sequence definition for LIKE wildcards
if ($this->_like_escape_str !== '')
{
$like_statement .= sprintf($this->_like_escape_str, $this->_like_escape_chr);
}
$this->qb_where[] = array('condition' => $like_statement, 'escape' => $escape);
if ($this->qb_caching === TRUE)
{
$this->qb_cache_where[] = array('condition' => $like_statement, 'escape' => $escape);
$this->qb_cache_exists[] = 'where';
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Starts a query group.
*
* @param string $not (Internal use only)
* @param string $type (Internal use only)
* @return CI_DB_query_builder
*/
public function group_start($not = '', $type = 'AND ')
{
$type = $this->_group_get_type($type);
$this->qb_where_group_started = TRUE;
$prefix = (count($this->qb_where) === 0 && count($this->qb_cache_where) === 0) ? '' : $type;
$where = array(
'condition' => $prefix.$not.str_repeat(' ', ++$this->qb_where_group_count).' (',
'escape' => FALSE
);
$this->qb_where[] = $where;
if ($this->qb_caching)
{
$this->qb_cache_where[] = $where;
}
return $this;
}
// --------------------------------------------------------------------
/**
* Starts a query group, but ORs the group
*
* @return CI_DB_query_builder
*/
public function or_group_start()
{
return $this->group_start('', 'OR ');
}
// --------------------------------------------------------------------
/**
* Starts a query group, but NOTs the group
*
* @return CI_DB_query_builder
*/
public function not_group_start()
{
return $this->group_start('NOT ', 'AND ');
}
// --------------------------------------------------------------------
/**
* Starts a query group, but OR NOTs the group
*
* @return CI_DB_query_builder
*/
public function or_not_group_start()
{
return $this->group_start('NOT ', 'OR ');
}
// --------------------------------------------------------------------
/**
* Ends a query group
*
* @return CI_DB_query_builder
*/
public function group_end()
{
$this->qb_where_group_started = FALSE;
$where = array(
'condition' => str_repeat(' ', $this->qb_where_group_count--).')',
'escape' => FALSE
);
$this->qb_where[] = $where;
if ($this->qb_caching)
{
$this->qb_cache_where[] = $where;
}
return $this;
}
// --------------------------------------------------------------------
/**
* Group_get_type
*
* @used-by group_start()
* @used-by _like()
* @used-by _wh()
* @used-by _where_in()
*
* @param string $type
* @return string
*/
protected function _group_get_type($type)
{
if ($this->qb_where_group_started)
{
$type = '';
$this->qb_where_group_started = FALSE;
}
return $type;
}
// --------------------------------------------------------------------
/**
* GROUP BY
*
* @param string $by
* @param bool $escape
* @return CI_DB_query_builder
*/
public function group_by($by, $escape = NULL)
{
is_bool($escape) OR $escape = $this->_protect_identifiers;
if (is_string($by))
{
$by = ($escape === TRUE)
? explode(',', $by)
: array($by);
}
foreach ($by as $val)
{
$val = trim($val);
if ($val !== '')
{
$val = array('field' => $val, 'escape' => $escape);
$this->qb_groupby[] = $val;
if ($this->qb_caching === TRUE)
{
$this->qb_cache_groupby[] = $val;
$this->qb_cache_exists[] = 'groupby';
}
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* HAVING
*
* Separates multiple calls with 'AND'.
*
* @param string $key
* @param string $value
* @param bool $escape
* @return object
*/
public function having($key, $value = NULL, $escape = NULL)
{
return $this->_wh('qb_having', $key, $value, 'AND ', $escape);
}
// --------------------------------------------------------------------
/**
* OR HAVING
*
* Separates multiple calls with 'OR'.
*
* @param string $key
* @param string $value
* @param bool $escape
* @return object
*/
public function or_having($key, $value = NULL, $escape = NULL)
{
return $this->_wh('qb_having', $key, $value, 'OR ', $escape);
}
// --------------------------------------------------------------------
/**
* ORDER BY
*
* @param string $orderby
* @param string $direction ASC, DESC or RANDOM
* @param bool $escape
* @return CI_DB_query_builder
*/
public function order_by($orderby, $direction = '', $escape = NULL)
{
$direction = strtoupper(trim($direction));
if ($direction === 'RANDOM')
{
$direction = '';
// Do we have a seed value?
$orderby = ctype_digit((string) $orderby)
? sprintf($this->_random_keyword[1], $orderby)
: $this->_random_keyword[0];
}
elseif (empty($orderby))
{
return $this;
}
elseif ($direction !== '')
{
$direction = in_array($direction, array('ASC', 'DESC'), TRUE) ? ' '.$direction : '';
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
if ($escape === FALSE)
{
$qb_orderby[] = array('field' => $orderby, 'direction' => $direction, 'escape' => FALSE);
}
else
{
$qb_orderby = array();
foreach (explode(',', $orderby) as $field)
{
$qb_orderby[] = ($direction === '' && preg_match('/\s+(ASC|DESC)$/i', rtrim($field), $match, PREG_OFFSET_CAPTURE))
? array('field' => ltrim(substr($field, 0, $match[0][1])), 'direction' => ' '.$match[1][0], 'escape' => TRUE)
: array('field' => trim($field), 'direction' => $direction, 'escape' => TRUE);
}
}
$this->qb_orderby = array_merge($this->qb_orderby, $qb_orderby);
if ($this->qb_caching === TRUE)
{
$this->qb_cache_orderby = array_merge($this->qb_cache_orderby, $qb_orderby);
$this->qb_cache_exists[] = 'orderby';
}
return $this;
}
// --------------------------------------------------------------------
/**
* LIMIT
*
* @param int $value LIMIT value
* @param int $offset OFFSET value
* @return CI_DB_query_builder
*/
public function limit($value, $offset = 0)
{
is_null($value) OR $this->qb_limit = (int) $value;
empty($offset) OR $this->qb_offset = (int) $offset;
return $this;
}
// --------------------------------------------------------------------
/**
* Sets the OFFSET value
*
* @param int $offset OFFSET value
* @return CI_DB_query_builder
*/
public function offset($offset)
{
empty($offset) OR $this->qb_offset = (int) $offset;
return $this;
}
// --------------------------------------------------------------------
/**
* LIMIT string
*
* Generates a platform-specific LIMIT clause.
*
* @param string $sql SQL Query
* @return string
*/
protected function _limit($sql)
{
return $sql.' LIMIT '.($this->qb_offset ? $this->qb_offset.', ' : '').$this->qb_limit;
}
// --------------------------------------------------------------------
/**
* The "set" function.
*
* Allows key/value pairs to be set for inserting or updating
*
* @param mixed
* @param string
* @param bool
* @return CI_DB_query_builder
*/
public function set($key, $value = '', $escape = NULL)
{
$key = $this->_object_to_array($key);
if ( ! is_array($key))
{
$key = array($key => $value);
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
foreach ($key as $k => $v)
{
if($v == '') {
$this->qb_set[$this->protect_identifiers($k, FALSE, $escape)] = 'NULL';
} else {
$this->qb_set[$this->protect_identifiers($k, FALSE, $escape)] = ($escape) ? $this->escape($v) : $v;
}
}
return $this;
}
// --------------------------------------------------------------------
/**
* Get SELECT query string
*
* Compiles a SELECT query string and returns the sql.
*
* @param string the table name to select from (optional)
* @param bool TRUE: resets QB values; FALSE: leave QB vaules alone
* @return string
*/
public function get_compiled_select($table = '', $reset = TRUE)
{
if ($table !== '')
{
$this->_track_aliases($table);
$this->from($table);
}
$select = $this->_compile_select();
if ($reset === TRUE)
{
$this->_reset_select();
}
return $select;
}
// --------------------------------------------------------------------
/**
* Get
*
* Compiles the select statement based on the other functions called
* and runs the query
*
* @param string the table
* @param string the limit clause
* @param string the offset clause
* @return object
*/
public function get($table = '', $limit = NULL, $offset = NULL)
{
if ($table !== '')
{
$this->_track_aliases($table);
$this->from($table);
}
if ( ! empty($limit))
{
$this->limit($limit, $offset);
}
$result = $this->query($this->_compile_select());
$this->_reset_select();
return $result;
}
// --------------------------------------------------------------------
/**
* "Count All Results" query
*
* Generates a platform-specific query string that counts all records
* returned by an Query Builder query.
*
* @param string
* @param bool the reset clause
* @return int
*/
public function count_all_results($table = '', $reset = TRUE)
{
if ($table !== '')
{
$this->_track_aliases($table);
$this->from($table);
}
$result = ($this->qb_distinct === TRUE)
? $this->query($this->_count_string.$this->protect_identifiers('numrows')."\nFROM (\n".$this->_compile_select()."\n) CI_count_all_results")
: $this->query($this->_compile_select($this->_count_string.$this->protect_identifiers('numrows')));
if ($reset === TRUE)
{
$this->_reset_select();
}
if ($result->num_rows() === 0)
{
return 0;
}
$row = $result->row();
return (int) $row->numrows;
}
// --------------------------------------------------------------------
/**
* Get_Where
*
* Allows the where clause, limit and offset to be added directly
*
* @param string $table
* @param string $where
* @param int $limit
* @param int $offset
* @return object
*/
public function get_where($table = '', $where = NULL, $limit = NULL, $offset = NULL)
{
if ($table !== '')
{
$this->from($table);
}
if ($where !== NULL)
{
$this->where($where);
}
if ( ! empty($limit))
{
$this->limit($limit, $offset);
}
$result = $this->query($this->_compile_select());
$this->_reset_select();
return $result;
}
// --------------------------------------------------------------------
/**
* Insert_Batch
*
* Compiles batch insert strings and runs the queries
*
* @param string $table Table to insert into
* @param array $set An associative array of insert values
* @param bool $escape Whether to escape values and identifiers
* @return int Number of rows inserted or FALSE on failure
*/
public function insert_batch($table = '', $set = NULL, $escape = NULL)
{
if ($set !== NULL)
{
$this->set_insert_batch($set, '', $escape);
}
if (count($this->qb_set) === 0)
{
// No valid data array. Folds in cases where keys and values did not match up
return ($this->db_debug) ? $this->display_error('db_must_use_set') : FALSE;
}
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
// Batch this baby
$affected_rows = 0;
for ($i = 0, $total = count($this->qb_set); $i < $total; $i += 100)
{
$this->query($this->_insert_batch($this->protect_identifiers($table, TRUE, $escape, FALSE), $this->qb_keys, array_slice($this->qb_set, $i, 100)));
$affected_rows += $this->affected_rows();
}
$this->_reset_write();
return $affected_rows;
}
// --------------------------------------------------------------------
/**
* Insert batch statement
*
* Generates a platform-specific insert string from the supplied data.
*
* @param string $table Table name
* @param array $keys INSERT keys
* @param array $values INSERT values
* @return string
*/
protected function _insert_batch($table, $keys, $values)
{
return 'INSERT INTO '.$table.' ('.implode(', ', $keys).') VALUES '.implode(', ', $values);
}
// --------------------------------------------------------------------
/**
* The "set_insert_batch" function. Allows key/value pairs to be set for batch inserts
*
* @param mixed
* @param string
* @param bool
* @return CI_DB_query_builder
*/
public function set_insert_batch($key, $value = '', $escape = NULL)
{
$key = $this->_object_to_array_batch($key);
if ( ! is_array($key))
{
$key = array($key => $value);
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
$keys = array_keys($this->_object_to_array(current($key)));
sort($keys);
foreach ($key as $row)
{
$row = $this->_object_to_array($row);
if (count(array_diff($keys, array_keys($row))) > 0 OR count(array_diff(array_keys($row), $keys)) > 0)
{
// batch function above returns an error on an empty array
$this->qb_set[] = array();
return;
}
ksort($row); // puts $row in the same order as our keys
if ($escape !== FALSE)
{
$clean = array();
foreach ($row as $value)
{
$clean[] = $this->escape($value);
}
$row = $clean;
}
$this->qb_set[] = '('.implode(',', $row).')';
}
foreach ($keys as $k)
{
$this->qb_keys[] = $this->protect_identifiers($k, FALSE, $escape);
}
return $this;
}
// --------------------------------------------------------------------
/**
* Get INSERT query string
*
* Compiles an insert query and returns the sql
*
* @param string the table to insert into
* @param bool TRUE: reset QB values; FALSE: leave QB values alone
* @return string
*/
public function get_compiled_insert($table = '', $reset = TRUE)
{
if ($this->_validate_insert($table) === FALSE)
{
return FALSE;
}
$sql = $this->_insert(
$this->protect_identifiers(
$this->qb_from[0], TRUE, NULL, FALSE
),
array_keys($this->qb_set),
array_values($this->qb_set)
);
if ($reset === TRUE)
{
$this->_reset_write();
}
return $sql;
}
// --------------------------------------------------------------------
/**
* Insert
*
* Compiles an insert string and runs the query
*
* @param string the table to insert data into
* @param array an associative array of insert values
* @param bool $escape Whether to escape values and identifiers
* @return object
*/
public function insert($table = '', $set = NULL, $escape = NULL)
{
if ($set !== NULL)
{
$this->set($set, '', $escape);
}
if ($this->_validate_insert($table) === FALSE)
{
return FALSE;
}
$sql = $this->_insert(
$this->protect_identifiers(
$this->qb_from[0], TRUE, $escape, FALSE
),
array_keys($this->qb_set),
array_values($this->qb_set)
);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Validate Insert
*
* This method is used by both insert() and get_compiled_insert() to
* validate that the there data is actually being set and that table
* has been chosen to be inserted into.
*
* @param string the table to insert data into
* @return string
*/
protected function _validate_insert($table = '')
{
if (count($this->qb_set) === 0)
{
return ($this->db_debug) ? $this->display_error('db_must_use_set') : FALSE;
}
if ($table !== '')
{
$this->qb_from[0] = $table;
}
elseif ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
return TRUE;
}
// --------------------------------------------------------------------
/**
* Replace
*
* Compiles an replace into string and runs the query
*
* @param string the table to replace data into
* @param array an associative array of insert values
* @return object
*/
public function replace($table = '', $set = NULL)
{
if ($set !== NULL)
{
$this->set($set);
}
if (count($this->qb_set) === 0)
{
return ($this->db_debug) ? $this->display_error('db_must_use_set') : FALSE;
}
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
$sql = $this->_replace($this->protect_identifiers($table, TRUE, NULL, FALSE), array_keys($this->qb_set), array_values($this->qb_set));
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Replace statement
*
* Generates a platform-specific replace string from the supplied data
*
* @param string the table name
* @param array the insert keys
* @param array the insert values
* @return string
*/
protected function _replace($table, $keys, $values)
{
return 'REPLACE INTO '.$table.' ('.implode(', ', $keys).') VALUES ('.implode(', ', $values).')';
}
// --------------------------------------------------------------------
/**
* FROM tables
*
* Groups tables in FROM clauses if needed, so there is no confusion
* about operator precedence.
*
* Note: This is only used (and overridden) by MySQL and CUBRID.
*
* @return string
*/
protected function _from_tables()
{
return implode(', ', $this->qb_from);
}
// --------------------------------------------------------------------
/**
* Get UPDATE query string
*
* Compiles an update query and returns the sql
*
* @param string the table to update
* @param bool TRUE: reset QB values; FALSE: leave QB values alone
* @return string
*/
public function get_compiled_update($table = '', $reset = TRUE)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ($this->_validate_update($table) === FALSE)
{
return FALSE;
}
$sql = $this->_update($this->protect_identifiers($this->qb_from[0], TRUE, NULL, FALSE), $this->qb_set);
if ($reset === TRUE)
{
$this->_reset_write();
}
return $sql;
}
// --------------------------------------------------------------------
/**
* UPDATE
*
* Compiles an update string and runs the query.
*
* @param string $table
* @param array $set An associative array of update values
* @param mixed $where
* @param int $limit
* @return object
*/
public function update($table = '', $set = NULL, $where = NULL, $limit = NULL)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ($set !== NULL)
{
$this->set($set);
}
if ($this->_validate_update($table) === FALSE)
{
return FALSE;
}
if ($where !== NULL)
{
$this->where($where);
}
if ( ! empty($limit))
{
$this->limit($limit);
}
$sql = $this->_update($this->protect_identifiers($this->qb_from[0], TRUE, NULL, FALSE), $this->qb_set);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Validate Update
*
* This method is used by both update() and get_compiled_update() to
* validate that data is actually being set and that a table has been
* chosen to be update.
*
* @param string the table to update data on
* @return bool
*/
protected function _validate_update($table = '')
{
if (count($this->qb_set) === 0)
{
return ($this->db_debug) ? $this->display_error('db_must_use_set') : FALSE;
}
if ($table !== '')
{
$this->qb_from[0] = $table;
}
elseif ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
return TRUE;
}
// --------------------------------------------------------------------
/**
* Update_Batch
*
* Compiles an update string and runs the query
*
* @param string the table to retrieve the results from
* @param array an associative array of update values
* @param string the where key
* @return int number of rows affected or FALSE on failure
*/
public function update_batch($table = '', $set = NULL, $index = NULL)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ($index === NULL)
{
return ($this->db_debug) ? $this->display_error('db_must_use_index') : FALSE;
}
if ($set !== NULL)
{
$this->set_update_batch($set, $index);
}
if (count($this->qb_set) === 0)
{
return ($this->db_debug) ? $this->display_error('db_must_use_set') : FALSE;
}
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
// Batch this baby
$affected_rows = 0;
for ($i = 0, $total = count($this->qb_set); $i < $total; $i += 100)
{
$this->query($this->_update_batch($this->protect_identifiers($table, TRUE, NULL, FALSE), array_slice($this->qb_set, $i, 100), $this->protect_identifiers($index)));
$affected_rows += $this->affected_rows();
$this->qb_where = array();
}
$this->_reset_write();
return $affected_rows;
}
// --------------------------------------------------------------------
/**
* Update_Batch statement
*
* Generates a platform-specific batch update string from the supplied data
*
* @param string $table Table name
* @param array $values Update data
* @param string $index WHERE key
* @return string
*/
protected function _update_batch($table, $values, $index)
{
$ids = array();
foreach ($values as $key => $val)
{
$ids[] = $val[$index];
foreach (array_keys($val) as $field)
{
if ($field !== $index)
{
$final[$field][] = 'WHEN '.$index.' = '.$val[$index].' THEN '.$val[$field];
}
}
}
$cases = '';
foreach ($final as $k => $v)
{
$cases .= $k." = CASE \n"
.implode("\n", $v)."\n"
.'ELSE '.$k.' END, ';
}
$this->where($index.' IN('.implode(',', $ids).')', NULL, FALSE);
return 'UPDATE '.$table.' SET '.substr($cases, 0, -2).$this->_compile_wh('qb_where');
}
// --------------------------------------------------------------------
/**
* The "set_update_batch" function. Allows key/value pairs to be set for batch updating
*
* @param array
* @param string
* @param bool
* @return CI_DB_query_builder
*/
public function set_update_batch($key, $index = '', $escape = NULL)
{
$key = $this->_object_to_array_batch($key);
if ( ! is_array($key))
{
// @todo error
}
is_bool($escape) OR $escape = $this->_protect_identifiers;
foreach ($key as $k => $v)
{
$index_set = FALSE;
$clean = array();
foreach ($v as $k2 => $v2)
{
if ($k2 === $index)
{
$index_set = TRUE;
}
$clean[$this->protect_identifiers($k2, FALSE, $escape)] = ($escape === FALSE) ? $v2 : $this->escape($v2);
}
if ($index_set === FALSE)
{
return $this->display_error('db_batch_missing_index');
}
$this->qb_set[] = $clean;
}
return $this;
}
// --------------------------------------------------------------------
/**
* Empty Table
*
* Compiles a delete string and runs "DELETE FROM table"
*
* @param string the table to empty
* @return object
*/
public function empty_table($table = '')
{
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
else
{
$table = $this->protect_identifiers($table, TRUE, NULL, FALSE);
}
$sql = $this->_delete($table);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Truncate
*
* Compiles a truncate string and runs the query
* If the database does not support the truncate() command
* This function maps to "DELETE FROM table"
*
* @param string the table to truncate
* @return object
*/
public function truncate($table = '')
{
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
else
{
$table = $this->protect_identifiers($table, TRUE, NULL, FALSE);
}
$sql = $this->_truncate($table);
$this->_reset_write();
return $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Truncate statement
*
* Generates a platform-specific truncate string from the supplied data
*
* If the database does not support the truncate() command,
* then this method maps to 'DELETE FROM table'
*
* @param string the table name
* @return string
*/
protected function _truncate($table)
{
return 'TRUNCATE '.$table;
}
// --------------------------------------------------------------------
/**
* Get DELETE query string
*
* Compiles a delete query string and returns the sql
*
* @param string the table to delete from
* @param bool TRUE: reset QB values; FALSE: leave QB values alone
* @return string
*/
public function get_compiled_delete($table = '', $reset = TRUE)
{
$this->return_delete_sql = TRUE;
$sql = $this->delete($table, '', NULL, $reset);
$this->return_delete_sql = FALSE;
return $sql;
}
// --------------------------------------------------------------------
/**
* Delete
*
* Compiles a delete string and runs the query
*
* @param mixed the table(s) to delete from. String or array
* @param mixed the where clause
* @param mixed the limit clause
* @param bool
* @return mixed
*/
public function delete($table = '', $where = '', $limit = NULL, $reset_data = TRUE)
{
// Combine any cached components with the current statements
$this->_merge_cache();
if ($table === '')
{
if ( ! isset($this->qb_from[0]))
{
return ($this->db_debug) ? $this->display_error('db_must_set_table') : FALSE;
}
$table = $this->qb_from[0];
}
elseif (is_array($table))
{
foreach ($table as $single_table)
{
$this->delete($single_table, $where, $limit, $reset_data);
}
return;
}
else
{
$table = $this->protect_identifiers($table, TRUE, NULL, FALSE);
}
if ($where !== '')
{
$this->where($where);
}
if ( ! empty($limit))
{
$this->limit($limit);
}
if (count($this->qb_where) === 0)
{
return ($this->db_debug) ? $this->display_error('db_del_must_use_where') : FALSE;
}
$sql = $this->_delete($table);
if ($reset_data)
{
$this->_reset_write();
}
return ($this->return_delete_sql === TRUE) ? $sql : $this->query($sql);
}
// --------------------------------------------------------------------
/**
* Delete statement
*
* Generates a platform-specific delete string from the supplied data
*
* @param string the table name
* @return string
*/
protected function _delete($table)
{
return 'DELETE FROM '.$table.$this->_compile_wh('qb_where')
.($this->qb_limit ? ' LIMIT '.$this->qb_limit : '');
}
// --------------------------------------------------------------------
/**
* DB Prefix
*
* Prepends a database prefix if one exists in configuration
*
* @param string the table
* @return string
*/
public function dbprefix($table = '')
{
if ($table === '')
{
$this->display_error('db_table_name_required');
}
return $this->dbprefix.$table;
}
// --------------------------------------------------------------------
/**
* Set DB Prefix
*
* Set's the DB Prefix to something new without needing to reconnect
*
* @param string the prefix
* @return string
*/
public function set_dbprefix($prefix = '')
{
return $this->dbprefix = $prefix;
}
// --------------------------------------------------------------------
/**
* Track Aliases
*
* Used to track SQL statements written with aliased tables.
*
* @param string The table to inspect
* @return string
*/
protected function _track_aliases($table)
{
if (is_array($table))
{
foreach ($table as $t)
{
$this->_track_aliases($t);
}
return;
}
// Does the string contain a comma? If so, we need to separate
// the string into discreet statements
if (strpos($table, ',') !== FALSE)
{
return $this->_track_aliases(explode(',', $table));
}
// if a table alias is used we can recognize it by a space
if (strpos($table, ' ') !== FALSE)
{
// if the alias is written with the AS keyword, remove it
$table = preg_replace('/\s+AS\s+/i', ' ', $table);
// Grab the alias
$table = trim(strrchr($table, ' '));
// Store the alias, if it doesn't already exist
if ( ! in_array($table, $this->qb_aliased_tables))
{
$this->qb_aliased_tables[] = $table;
}
}
}
// --------------------------------------------------------------------
/**
* Compile the SELECT statement
*
* Generates a query string based on which functions were used.
* Should not be called directly.
*
* @param bool $select_override
* @return string
*/
protected function _compile_select($select_override = FALSE)
{
// Combine any cached components with the current statements
$this->_merge_cache();
// Write the "select" portion of the query
if ($select_override !== FALSE)
{
$sql = $select_override;
}
else
{
$sql = ( ! $this->qb_distinct) ? 'SELECT ' : 'SELECT DISTINCT ';
if (count($this->qb_select) === 0)
{
$sql .= '*';
}
else
{
// Cycle through the "select" portion of the query and prep each column name.
// The reason we protect identifiers here rather then in the select() function
// is because until the user calls the from() function we don't know if there are aliases
foreach ($this->qb_select as $key => $val)
{
$no_escape = isset($this->qb_no_escape[$key]) ? $this->qb_no_escape[$key] : NULL;
$this->qb_select[$key] = $this->protect_identifiers($val, FALSE, $no_escape);
}
$sql .= implode(', ', $this->qb_select);
}
}
// Write the "FROM" portion of the query
if (count($this->qb_from) > 0)
{
$sql .= "\nFROM ".$this->_from_tables();
}
// Write the "JOIN" portion of the query
if (count($this->qb_join) > 0)
{
$sql .= "\n".implode("\n", $this->qb_join);
}
$sql .= $this->_compile_wh('qb_where')
.$this->_compile_group_by()
.$this->_compile_wh('qb_having')
.$this->_compile_order_by(); // ORDER BY
// LIMIT
if ($this->qb_limit)
{
return $this->_limit($sql."\n");
}
return $sql;
}
// --------------------------------------------------------------------
/**
* Compile WHERE, HAVING statements
*
* Escapes identifiers in WHERE and HAVING statements at execution time.
*
* Required so that aliases are tracked properly, regardless of wether
* where(), or_where(), having(), or_having are called prior to from(),
* join() and dbprefix is added only if needed.
*
* @param string $qb_key 'qb_where' or 'qb_having'
* @return string SQL statement
*/
protected function _compile_wh($qb_key)
{
if (count($this->$qb_key) > 0)
{
for ($i = 0, $c = count($this->$qb_key); $i < $c; $i++)
{
// Is this condition already compiled?
if (is_string($this->{$qb_key}[$i]))
{
continue;
}
elseif ($this->{$qb_key}[$i]['escape'] === FALSE)
{
$this->{$qb_key}[$i] = $this->{$qb_key}[$i]['condition'];
continue;
}
// Split multiple conditions
$conditions = preg_split(
'/(\s*AND\s+|\s*OR\s+)/i',
$this->{$qb_key}[$i]['condition'],
-1,
PREG_SPLIT_DELIM_CAPTURE | PREG_SPLIT_NO_EMPTY
);
for ($ci = 0, $cc = count($conditions); $ci < $cc; $ci++)
{
if (($op = $this->_get_operator($conditions[$ci])) === FALSE
OR ! preg_match('/^(\(?)(.*)('.preg_quote($op, '/').')\s*(.*(?<!\)))?(\)?)$/i', $conditions[$ci], $matches))
{
continue;
}
// $matches = array(
// 0 => '(test <= foo)', /* the whole thing */
// 1 => '(', /* optional */
// 2 => 'test', /* the field name */
// 3 => ' <= ', /* $op */
// 4 => 'foo', /* optional, if $op is e.g. 'IS NULL' */
// 5 => ')' /* optional */
// );
if ( ! empty($matches[4]))
{
$this->_is_literal($matches[4]) OR $matches[4] = $this->protect_identifiers(trim($matches[4]));
$matches[4] = ' '.$matches[4];
}
$conditions[$ci] = $matches[1].$this->protect_identifiers(trim($matches[2]))
.' '.trim($matches[3]).$matches[4].$matches[5];
}
$this->{$qb_key}[$i] = implode('', $conditions);
}
return ($qb_key === 'qb_having' ? "\nHAVING " : "\nWHERE ")
.implode("\n", $this->$qb_key);
}
return '';
}
// --------------------------------------------------------------------
/**
* Compile GROUP BY
*
* Escapes identifiers in GROUP BY statements at execution time.
*
* Required so that aliases are tracked properly, regardless of wether
* group_by() is called prior to from(), join() and dbprefix is added
* only if needed.
*
* @return string SQL statement
*/
protected function _compile_group_by()
{
if (count($this->qb_groupby) > 0)
{
for ($i = 0, $c = count($this->qb_groupby); $i < $c; $i++)
{
// Is it already compiled?
if (is_string($this->qb_groupby[$i]))
{
continue;
}
$this->qb_groupby[$i] = ($this->qb_groupby[$i]['escape'] === FALSE OR $this->_is_literal($this->qb_groupby[$i]['field']))
? $this->qb_groupby[$i]['field']
: $this->protect_identifiers($this->qb_groupby[$i]['field']);
}
return "\nGROUP BY ".implode(', ', $this->qb_groupby);
}
return '';
}
// --------------------------------------------------------------------
/**
* Compile ORDER BY
*
* Escapes identifiers in ORDER BY statements at execution time.
*
* Required so that aliases are tracked properly, regardless of wether
* order_by() is called prior to from(), join() and dbprefix is added
* only if needed.
*
* @return string SQL statement
*/
protected function _compile_order_by()
{
if (is_array($this->qb_orderby) && count($this->qb_orderby) > 0)
{
for ($i = 0, $c = count($this->qb_orderby); $i < $c; $i++)
{
if ($this->qb_orderby[$i]['escape'] !== FALSE && ! $this->_is_literal($this->qb_orderby[$i]['field']))
{
$this->qb_orderby[$i]['field'] = $this->protect_identifiers($this->qb_orderby[$i]['field']);
}
$this->qb_orderby[$i] = $this->qb_orderby[$i]['field'].$this->qb_orderby[$i]['direction'];
}
return $this->qb_orderby = "\nORDER BY ".implode(', ', $this->qb_orderby);
}
elseif (is_string($this->qb_orderby))
{
return $this->qb_orderby;
}
return '';
}
// --------------------------------------------------------------------
/**
* Object to Array
*
* Takes an object as input and converts the class variables to array key/vals
*
* @param object
* @return array
*/
protected function _object_to_array($object)
{
if ( ! is_object($object))
{
return $object;
}
$array = array();
foreach (get_object_vars($object) as $key => $val)
{
// There are some built in keys we need to ignore for this conversion
if ( ! is_object($val) && ! is_array($val) && $key !== '_parent_name')
{
$array[$key] = $val;
}
}
return $array;
}
// --------------------------------------------------------------------
/**
* Object to Array
*
* Takes an object as input and converts the class variables to array key/vals
*
* @param object
* @return array
*/
protected function _object_to_array_batch($object)
{
if ( ! is_object($object))
{
return $object;
}
$array = array();
$out = get_object_vars($object);
$fields = array_keys($out);
foreach ($fields as $val)
{
// There are some built in keys we need to ignore for this conversion
if ($val !== '_parent_name')
{
$i = 0;
foreach ($out[$val] as $data)
{
$array[$i++][$val] = $data;
}
}
}
return $array;
}
// --------------------------------------------------------------------
/**
* Start Cache
*
* Starts QB caching
*
* @return CI_DB_query_builder
*/
public function start_cache()
{
$this->qb_caching = TRUE;
return $this;
}
// --------------------------------------------------------------------
/**
* Stop Cache
*
* Stops QB caching
*
* @return CI_DB_query_builder
*/
public function stop_cache()
{
$this->qb_caching = FALSE;
return $this;
}
// --------------------------------------------------------------------
/**
* Flush Cache
*
* Empties the QB cache
*
* @return CI_DB_query_builder
*/
public function flush_cache()
{
$this->_reset_run(array(
'qb_cache_select' => array(),
'qb_cache_from' => array(),
'qb_cache_join' => array(),
'qb_cache_where' => array(),
'qb_cache_groupby' => array(),
'qb_cache_having' => array(),
'qb_cache_orderby' => array(),
'qb_cache_set' => array(),
'qb_cache_exists' => array(),
'qb_cache_no_escape' => array()
));
return $this;
}
// --------------------------------------------------------------------
/**
* Merge Cache
*
* When called, this function merges any cached QB arrays with
* locally called ones.
*
* @return void
*/
protected function _merge_cache()
{
if (count($this->qb_cache_exists) === 0)
{
return;
}
elseif (in_array('select', $this->qb_cache_exists, TRUE))
{
$qb_no_escape = $this->qb_cache_no_escape;
}
foreach (array_unique($this->qb_cache_exists) as $val) // select, from, etc.
{
$qb_variable = 'qb_'.$val;
$qb_cache_var = 'qb_cache_'.$val;
$qb_new = $this->$qb_cache_var;
for ($i = 0, $c = count($this->$qb_variable); $i < $c; $i++)
{
if ( ! in_array($this->{$qb_variable}[$i], $qb_new, TRUE))
{
$qb_new[] = $this->{$qb_variable}[$i];
if ($val === 'select')
{
$qb_no_escape[] = $this->qb_no_escape[$i];
}
}
}
$this->$qb_variable = $qb_new;
if ($val === 'select')
{
$this->qb_no_escape = $qb_no_escape;
}
}
// If we are "protecting identifiers" we need to examine the "from"
// portion of the query to determine if there are any aliases
if ($this->_protect_identifiers === TRUE && count($this->qb_cache_from) > 0)
{
$this->_track_aliases($this->qb_from);
}
}
// --------------------------------------------------------------------
/**
* Is literal
*
* Determines if a string represents a literal value or a field name
*
* @param string $str
* @return bool
*/
protected function _is_literal($str)
{
$str = trim($str);
if (empty($str) OR ctype_digit($str) OR (string) (float) $str === $str OR in_array(strtoupper($str), array('TRUE', 'FALSE'), TRUE))
{
return TRUE;
}
static $_str;
if (empty($_str))
{
$_str = ($this->_escape_char !== '"')
? array('"', "'") : array("'");
}
return in_array($str[0], $_str, TRUE);
}
// --------------------------------------------------------------------
/**
* Reset Query Builder values.
*
* Publicly-visible method to reset the QB values.
*
* @return CI_DB_query_builder
*/
public function reset_query()
{
$this->_reset_select();
$this->_reset_write();
return $this;
}
// --------------------------------------------------------------------
/**
* Resets the query builder values. Called by the get() function
*
* @param array An array of fields to reset
* @return void
*/
protected function _reset_run($qb_reset_items)
{
foreach ($qb_reset_items as $item => $default_value)
{
$this->$item = $default_value;
}
}
// --------------------------------------------------------------------
/**
* Resets the query builder values. Called by the get() function
*
* @return void
*/
protected function _reset_select()
{
$this->_reset_run(array(
'qb_select' => array(),
'qb_from' => array(),
'qb_join' => array(),
'qb_where' => array(),
'qb_groupby' => array(),
'qb_having' => array(),
'qb_orderby' => array(),
'qb_aliased_tables' => array(),
'qb_no_escape' => array(),
'qb_distinct' => FALSE,
'qb_limit' => FALSE,
'qb_offset' => FALSE
));
}
// --------------------------------------------------------------------
/**
* Resets the query builder "write" values.
*
* Called by the insert() update() insert_batch() update_batch() and delete() functions
*
* @return void
*/
protected function _reset_write()
{
$this->_reset_run(array(
'qb_set' => array(),
'qb_from' => array(),
'qb_join' => array(),
'qb_where' => array(),
'qb_orderby' => array(),
'qb_keys' => array(),
'qb_limit' => FALSE
));
}
}
| readerself/readerself | system/database/DB_query_builder.php | PHP | bsd-3-clause | 61,969 |
<?php
use yii\helpers\Html;
use yii\widgets\DetailView;
/* @var $this yii\web\View */
/* @var $model frontend\models\News */
$this->title = $model->id;
$this->params['breadcrumbs'][] = ['label' => Yii::t('app', 'News'), 'url' => ['index']];
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="news-view">
<h1><?= Html::encode($this->title) ?></h1>
<p>
<?= Html::a(Yii::t('app', 'Update'), ['update', 'id' => $model->id], ['class' => 'btn btn-primary']) ?>
<?= Html::a(Yii::t('app', 'Delete'), ['delete', 'id' => $model->id], [
'class' => 'btn btn-danger',
'data' => [
'confirm' => Yii::t('app', 'Are you sure you want to delete this item?'),
'method' => 'post',
],
]) ?>
</p>
<?= DetailView::widget([
'model' => $model,
'attributes' => [
'id',
'author_id',
'preview',
'news:ntext',
'created_at',
'updated_at',
],
]) ?>
</div>
| marfarin/test | frontend/views/news/view.php | PHP | bsd-3-clause | 1,051 |
package tcplog
import (
"net"
"strings"
"sync"
"testing"
"github.com/stretchr/testify/assert"
"github.com/vektra/cypress"
"github.com/vektra/neko"
)
type TestFormatter struct{}
func (tf *TestFormatter) Format(m *cypress.Message) ([]byte, error) {
return []byte(m.KVString()), nil
}
func TestRead(t *testing.T) {
n := neko.Start(t)
var l *Logger
n.Setup(func() {
l = NewLogger("", false, &TestFormatter{})
})
n.It("reads a byte slice", func() {
ok := l.Read([]byte("This is a long line"))
assert.NoError(t, ok)
})
n.It("reads a string", func() {
ok := l.Read("This is a long line")
assert.NoError(t, ok)
})
n.It("reads a cypress.Message", func() {
message := NewMessage(t)
ok := l.Read(message)
assert.NoError(t, ok)
})
n.It("does not read an int", func() {
ok := l.Read(1)
assert.Error(t, ok)
})
n.Meow()
}
func TestWrite(t *testing.T) {
n := neko.Start(t)
var (
l *Logger
line = []byte("This is a log line")
)
n.Setup(func() {
l = NewLogger("", false, &TestFormatter{})
})
n.It("adds a log line to the pump", func() {
l.write(line)
select {
case pumpLine := <-l.Pump:
assert.Equal(t, line, pumpLine)
var zero uint64 = 0
assert.Equal(t, zero, l.PumpDropped)
default:
t.Fail()
}
})
n.It("adds an error line to the pump if lines were dropped", func() {
l.PumpDropped = 1
l.write(line)
select {
case <-l.Pump:
expected := "The tcplog pump dropped 1 log lines"
actual := <-l.Pump
assert.True(t, strings.Index(string(actual), expected) != -1)
var zero uint64 = 0
assert.Equal(t, zero, l.PumpDropped)
default:
t.Fail()
}
})
n.It("does not add a log line and increments dropped counter if pump is full ", func() {
l.Pump = make(chan []byte, 0)
l.write(line)
select {
case <-l.Pump:
t.Fail()
default:
var one uint64 = 1
assert.Equal(t, one, l.PumpDropped)
}
})
n.Meow()
}
func TestDial(t *testing.T) {
s := NewTcpServer()
go s.Run("127.0.0.1")
l := NewLogger(<-s.Address, false, &TestFormatter{})
conn, _ := l.dial()
_, ok := conn.(net.Conn)
defer conn.Close()
assert.True(t, ok, "returns a connection")
}
func TestSendLogs(t *testing.T) {
n := neko.Start(t)
var (
s *TcpServer
l *Logger
line = []byte("This is a log line")
wg sync.WaitGroup
)
n.Setup(func() {
s = NewTcpServer()
wg.Add(1)
go func() {
defer wg.Done()
s.Run("127.0.0.1")
}()
l = NewLogger(<-s.Address, false, &TestFormatter{})
wg.Add(1)
go func() {
defer wg.Done()
l.sendLogs()
}()
})
n.It("sends line from pipe to tcp server", func() {
l.Pump <- line
close(l.Pump)
wg.Wait()
select {
case message := <-s.Messages:
assert.Equal(t, string(line), string(message))
default:
t.Fail()
}
})
n.Meow()
}
| vektra/addons | lib/tcplog/tcplog_test.go | GO | bsd-3-clause | 2,811 |
''' Renderers for various kinds of annotations that can be added to
Bokeh plots
'''
from __future__ import absolute_import
from six import string_types
from ..core.enums import (AngleUnits, Dimension, FontStyle, LegendClickPolicy, LegendLocation,
Orientation, RenderMode, SpatialUnits, VerticalAlign, TextAlign)
from ..core.has_props import abstract
from ..core.properties import (Angle, AngleSpec, Auto, Bool, ColorSpec, Datetime, Dict, DistanceSpec, Either,
Enum, Float, FontSizeSpec, Include, Instance, Int, List, NumberSpec, Override,
Seq, String, StringSpec, Tuple, value)
from ..core.property_mixins import FillProps, LineProps, TextProps
from ..core.validation import error
from ..core.validation.errors import BAD_COLUMN_NAME, NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS
from ..model import Model
from ..util.serialization import convert_datetime_type
from .formatters import BasicTickFormatter, TickFormatter
from .mappers import ContinuousColorMapper
from .renderers import GlyphRenderer, Renderer
from .sources import ColumnDataSource, DataSource
from .tickers import BasicTicker, Ticker
@abstract
class Annotation(Renderer):
''' Base class for all annotation models.
'''
plot = Instance(".models.plots.Plot", help="""
The plot to which this annotation is attached.
""")
level = Override(default="annotation")
@abstract
class TextAnnotation(Annotation):
''' Base class for text annotation models such as labels and titles.
'''
class LegendItem(Model):
'''
'''
def __init__(self, *args, **kwargs):
super(LegendItem, self).__init__(*args, **kwargs)
if isinstance(self.label, string_types):
# Allow convenience of setting label as a string
self.label = value(self.label)
label = StringSpec(default=None, help="""
A label for this legend. Can be a string, or a column of a
ColumnDataSource. If ``label`` is a field, then it must
be in the renderers' data_source.
""")
renderers = List(Instance(GlyphRenderer), help="""
A list of the glyph renderers to draw in the legend. If ``label`` is a field,
then all data_sources of renderers must be the same.
""")
@error(NON_MATCHING_DATA_SOURCES_ON_LEGEND_ITEM_RENDERERS)
def _check_data_sources_on_renderers(self):
if self.label and 'field' in self.label:
if len({r.data_source for r in self.renderers}) != 1:
return str(self)
@error(BAD_COLUMN_NAME)
def _check_field_label_on_data_source(self):
if self.label and 'field' in self.label:
if len(self.renderers) < 1:
return str(self)
source = self.renderers[0].data_source
if self.label.get('field') not in source.column_names:
return str(self)
class Legend(Annotation):
''' Render informational legends for a plot.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float), default="top_right", help="""
The location where the legend should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the legend entries should be placed vertically or horizontally
when they are drawn.
""")
border_props = Include(LineProps, help="""
The %s for the legend border outline.
""")
border_line_color = Override(default="#e5e5e5")
border_line_alpha = Override(default=0.5)
background_props = Include(FillProps, help="""
The %s for the legend background style.
""")
inactive_props = Include(FillProps, help="""
The %s for the legend background style when inactive.
""")
click_policy = Enum(LegendClickPolicy, default="none", help="""
Defines what happens when a lengend's item is clicked.
""")
background_fill_color = Override(default="#ffffff")
background_fill_alpha = Override(default=0.95)
inactive_fill_color = Override(default="white")
inactive_fill_alpha = Override(default=0.9)
label_props = Include(TextProps, help="""
The %s for the legend labels.
""")
label_text_baseline = Override(default='middle')
label_text_font_size = Override(default={'value': '10pt'})
label_standoff = Int(5, help="""
The distance (in pixels) to separate the label from its associated glyph.
""")
label_height = Int(20, help="""
The minimum height (in pixels) of the area that legend labels should occupy.
""")
label_width = Int(20, help="""
The minimum width (in pixels) of the area that legend labels should occupy.
""")
glyph_height = Int(20, help="""
The height (in pixels) that the rendered legend glyph should occupy.
""")
glyph_width = Int(20, help="""
The width (in pixels) that the rendered legend glyph should occupy.
""")
margin = Int(10, help="""
Amount of margin around the legend.
""")
padding = Int(10, help="""
Amount of padding around the contents of the legend. Only applicable when
when border is visible, otherwise collapses to 0.
""")
spacing = Int(3, help="""
Amount of spacing (in pixles) between legend entries.
""")
items = List(Instance(LegendItem), help="""
A list of :class:`~bokeh.model.annotations.LegendItem` instances to be
rendered in the legend.
This can be specified explicitly, for instance:
.. code-block:: python
legend = Legend(items=[
LegendItem(label="sin(x)" , renderers=[r0, r1]),
LegendItem(label="2*sin(x)" , renderers=[r2]),
LegendItem(label="3*sin(x)" , renderers=[r3, r4])
])
But as a convenience, can also be given more compactly as a list of tuples:
.. code-block:: python
legend = Legend(items=[
("sin(x)" , [r0, r1]),
("2*sin(x)" , [r2]),
("3*sin(x)" , [r3, r4])
])
where each tuple is of the form: *(label, renderers)*.
""").accepts(List(Tuple(String, List(Instance(GlyphRenderer)))), lambda items: [LegendItem(label=item[0], renderers=item[1]) for item in items])
class ColorBar(Annotation):
''' Render a color bar based on a color mapper.
'''
location = Either(Enum(LegendLocation), Tuple(Float, Float),
default="top_right", help="""
The location where the color bar should draw itself. It's either one of
``bokeh.core.enums.LegendLocation``'s enumerated values, or a ``(x, y)``
tuple indicating an absolute location absolute location in screen
coordinates (pixels from the bottom-left corner).
.. warning::
If the color bar is placed in a side panel, the location will likely
have to be set to `(0,0)`.
""")
orientation = Enum(Orientation, default="vertical", help="""
Whether the color bar should be oriented vertically or horizontally.
""")
height = Either(Auto, Int(), help="""
The height (in pixels) that the color scale should occupy.
""")
width = Either(Auto, Int(), help="""
The width (in pixels) that the color scale should occupy.
""")
scale_alpha = Float(1.0, help="""
The alpha with which to render the color scale.
""")
title = String(help="""
The title text to render.
""")
title_props = Include(TextProps, help="""
The %s values for the title text.
""")
title_text_font_size = Override(default={'value': "10pt"})
title_text_font_style = Override(default="italic")
title_standoff = Int(2, help="""
The distance (in pixels) to separate the title from the color bar.
""")
ticker = Instance(Ticker, default=lambda: BasicTicker(), help="""
A Ticker to use for computing locations of axis components.
""")
formatter = Instance(TickFormatter, default=lambda: BasicTickFormatter(), help="""
A TickFormatter to use for formatting the visual appearance of ticks.
""")
major_label_overrides = Dict(Either(Float, String), String, default={}, help="""
Provide explicit tick label values for specific tick locations that
override normal formatting.
""")
color_mapper = Instance(ContinuousColorMapper, help="""
A continuous color mapper containing a color palette to render.
.. warning::
If the `low` and `high` attributes of the ColorMapper aren't set, ticks
and tick labels won't be rendered. Additionally, if a LogTicker is
passed to the `ticker` argument and either or both of the logarithms
of `low` and `high` values of the color_mapper are non-numeric
(i.e. `low=0`), the tick and tick labels won't be rendered.
""")
margin = Int(30, help="""
Amount of margin (in pixels) around the outside of the color bar.
""")
padding = Int(10, help="""
Amount of padding (in pixels) between the color scale and color bar border.
""")
major_label_props = Include(TextProps, help="""
The %s of the major tick labels.
""")
major_label_text_align = Override(default="center")
major_label_text_baseline = Override(default="middle")
major_label_text_font_size = Override(default={'value': "8pt"})
label_standoff = Int(5, help="""
The distance (in pixels) to separate the tick labels from the color bar.
""")
major_tick_props = Include(LineProps, help="""
The %s of the major ticks.
""")
major_tick_line_color = Override(default="#ffffff")
major_tick_in = Int(default=5, help="""
The distance (in pixels) that major ticks should extend into the
main plot area.
""")
major_tick_out = Int(default=0, help="""
The distance (in pixels) that major ticks should extend out of the
main plot area.
""")
minor_tick_props = Include(LineProps, help="""
The %s of the minor ticks.
""")
minor_tick_line_color = Override(default=None)
minor_tick_in = Int(default=0, help="""
The distance (in pixels) that minor ticks should extend into the
main plot area.
""")
minor_tick_out = Int(default=0, help="""
The distance (in pixels) that major ticks should extend out of the
main plot area.
""")
bar_props = Include(LineProps, help="""
The %s for the color scale bar outline.
""")
bar_line_color = Override(default=None)
border_props = Include(LineProps, help="""
The %s for the color bar border outline.
""")
border_line_color = Override(default=None)
background_props = Include(FillProps, help="""
The %s for the color bar background style.
""")
background_fill_color = Override(default="#ffffff")
background_fill_alpha = Override(default=0.95)
# This only exists to prevent a circular import.
def _DEFAULT_ARROW():
from .arrow_heads import OpenHead
return OpenHead()
class Arrow(Annotation):
''' Render an arrow as an annotation.
'''
x_start = NumberSpec(help="""
The x-coordinates to locate the start of the arrows.
""")
y_start = NumberSpec(help="""
The y-coordinates to locate the start of the arrows.
""")
start_units = Enum(SpatialUnits, default='data', help="""
The unit type for the start_x and start_y attributes. Interpreted as "data
space" units by default.
""")
start = Instance('.models.arrow_heads.ArrowHead', default=None, help="""
Instance of ArrowHead.
""")
x_end = NumberSpec(help="""
The x-coordinates to locate the end of the arrows.
""")
y_end = NumberSpec(help="""
The y-coordinates to locate the end of the arrows.
""")
end_units = Enum(SpatialUnits, default='data', help="""
The unit type for the end_x and end_y attributes. Interpreted as "data
space" units by default.
""")
end = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_ARROW, help="""
Instance of ArrowHead.
""")
body_props = Include(LineProps, use_prefix=False, help="""
The %s values for the arrow body.
""")
source = Instance(DataSource, help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
class BoxAnnotation(Annotation):
''' Render a shaded rectangular region as an annotation.
'''
left = Either(Auto, NumberSpec(), default=None, help="""
The x-coordinates of the left edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
left_units = Enum(SpatialUnits, default='data', help="""
The unit type for the left attribute. Interpreted as "data space" units
by default.
""")
right = Either(Auto, NumberSpec(), default=None, help="""
The x-coordinates of the right edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
right_units = Enum(SpatialUnits, default='data', help="""
The unit type for the right attribute. Interpreted as "data space" units
by default.
""")
bottom = Either(Auto, NumberSpec(), default=None, help="""
The y-coordinates of the bottom edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
bottom_units = Enum(SpatialUnits, default='data', help="""
The unit type for the bottom attribute. Interpreted as "data space" units
by default.
""")
top = Either(Auto, NumberSpec(), default=None, help="""
The y-coordinates of the top edge of the box annotation.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""")
top_units = Enum(SpatialUnits, default='data', help="""
The unit type for the top attribute. Interpreted as "data space" units
by default.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the box.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the box.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the box is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. warning::
The line_dash and line_dash_offset attributes aren't supported if
the render_mode is set to "css"
""")
class Band(Annotation):
''' Render a filled area band along a dimension.
'''
lower = DistanceSpec(help="""
The coordinates of the lower portion of the filled area band.
""")
upper = DistanceSpec(help="""
The coordinations of the upper portion of the filled area band.
""")
base = DistanceSpec(help="""
The orthogonal coordinates of the upper and lower values.
""")
dimension = Enum(Dimension, default='height', help="""
The direction of the band.
""")
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the band.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the band.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
class Label(TextAnnotation):
''' Render a single text label as an annotation.
``Label`` will render a single text label at given ``x`` and ``y``
coordinates, which can be in either screen (pixel) space, or data (axis
range) space.
The label can also be configured with a screen space offset from ``x`` and
``y``, by using the ``x_offset`` and ``y_offset`` properties.
Additionally, the label can be rotated with the ``angle`` property.
There are also standard text, fill, and line properties to control the
appearance of the text, its background, as well as the rectangular bounding
box border.
'''
x = Float(help="""
The x-coordinate in screen coordinates to locate the text anchors.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""").accepts(Datetime, convert_datetime_type)
x_units = Enum(SpatialUnits, default='data', help="""
The unit type for the x attribute. Interpreted as "data space" units
by default.
""")
y = Float(help="""
The y-coordinate in screen coordinates to locate the text anchors.
Datetime values are also accepted, but note that they are immediately
converted to milliseconds-since-epoch.
""").accepts(Datetime, convert_datetime_type)
y_units = Enum(SpatialUnits, default='data', help="""
The unit type for the y attribute. Interpreted as "data space" units
by default.
""")
text = String(help="""
The text value to render.
""")
angle = Angle(default=0, help="""
The angle to rotate the text, as measured from the horizontal.
.. warning::
The center of rotation for canvas and css render_modes is different.
For `render_mode="canvas"` the label is rotated from the top-left
corner of the annotation, while for `render_mode="css"` the annotation
is rotated around it's center.
""")
angle_units = Enum(AngleUnits, default='rad', help="""
Acceptable values for units are ``"rad"`` and ``"deg"``
""")
x_offset = Float(default=0, help="""
Offset value to apply to the x-coordinate.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
y_offset = Float(default=0, help="""
Offset value to apply to the y-coordinate.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
# TODO (bev) these should probably not be dataspec properties
text_props = Include(TextProps, use_prefix=False, help="""
The %s values for the text.
""")
# TODO (bev) these should probably not be dataspec properties
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
# TODO (bev) these should probably not be dataspec properties
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen location when
rendering an annotation on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen location when
rendering an annotation on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class LabelSet(TextAnnotation):
''' Render multiple text labels as annotations.
``LabelSet`` will render multiple text labels at given ``x`` and ``y``
coordinates, which can be in either screen (pixel) space, or data (axis
range) space. In this case (as opposed to the single ``Label`` model),
``x`` and ``y`` can also be the name of a column from a
:class:`~bokeh.models.sources.ColumnDataSource`, in which case the labels
will be "vectorized" using coordinate values from the specified columns.
The label can also be configured with a screen space offset from ``x`` and
``y``, by using the ``x_offset`` and ``y_offset`` properties. These offsets
may be vectorized by giving the name of a data source column.
Additionally, the label can be rotated with the ``angle`` property (which
may also be a column name.)
There are also standard text, fill, and line properties to control the
appearance of the text, its background, as well as the rectangular bounding
box border.
The data source is provided by setting the ``source`` property.
'''
x = NumberSpec(help="""
The x-coordinates to locate the text anchors.
""")
x_units = Enum(SpatialUnits, default='data', help="""
The unit type for the xs attribute. Interpreted as "data space" units
by default.
""")
y = NumberSpec(help="""
The y-coordinates to locate the text anchors.
""")
y_units = Enum(SpatialUnits, default='data', help="""
The unit type for the ys attribute. Interpreted as "data space" units
by default.
""")
text = StringSpec("text", help="""
The text values to render.
""")
angle = AngleSpec(default=0, help="""
The angles to rotate the text, as measured from the horizontal.
.. warning::
The center of rotation for canvas and css render_modes is different.
For `render_mode="canvas"` the label is rotated from the top-left
corner of the annotation, while for `render_mode="css"` the annotation
is rotated around it's center.
""")
x_offset = NumberSpec(default=0, help="""
Offset values to apply to the x-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
y_offset = NumberSpec(default=0, help="""
Offset values to apply to the y-coordinates.
This is useful, for instance, if it is desired to "float" text a fixed
distance in screen units from a given data position.
""")
text_props = Include(TextProps, use_prefix=False, help="""
The %s values for the text.
""")
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class PolyAnnotation(Annotation):
''' Render a shaded polygonal region as an annotation.
'''
xs = Seq(Float, default=[], help="""
The x-coordinates of the region to draw.
""")
xs_units = Enum(SpatialUnits, default='data', help="""
The unit type for the xs attribute. Interpreted as "data space" units
by default.
""")
ys = Seq(Float, default=[], help="""
The y-coordinates of the region to draw.
""")
ys_units = Enum(SpatialUnits, default='data', help="""
The unit type for the ys attribute. Interpreted as "data space" units
by default.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering box annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the polygon.
""")
line_alpha = Override(default=0.3)
line_color = Override(default="#cccccc")
fill_props = Include(FillProps, use_prefix=False, help="""
The %s values for the polygon.
""")
fill_alpha = Override(default=0.4)
fill_color = Override(default="#fff9ba")
class Span(Annotation):
""" Render a horizontal or vertical line span.
"""
location = Float(help="""
The location of the span, along ``dimension``.
""")
location_units = Enum(SpatialUnits, default='data', help="""
The unit type for the location attribute. Interpreted as "data space"
units by default.
""")
dimension = Enum(Dimension, default='width', help="""
The direction of the span.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the span is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. warning::
The line_dash and line_dash_offset attributes aren't supported if
the render_mode is set to "css"
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the span.
""")
class Title(TextAnnotation):
''' Render a single title box as an annotation.
'''
text = String(help="""
The text value to render.
""")
vertical_align = Enum(VerticalAlign, default='bottom', help="""
Aligment of the text in its enclosing space, *across* the direction of the text.
""")
align = Enum(TextAlign, default='left', help="""
Aligment of the text in its enclosing space, *along* the direction of the text.
""")
offset = Float(default=0, help="""
Offset the text by a number of pixels (can be positive or negative). Shifts the text in
different directions based on the location of the title:
* above: shifts title right
* right: shifts title down
* below: shifts title right
* left: shifts title up
""")
text_font = String(default="helvetica", help="""
Name of a font to use for rendering text, e.g., ``'times'``,
``'helvetica'``.
""")
text_font_size = FontSizeSpec(default=value("10pt"))
text_font_style = Enum(FontStyle, default="bold", help="""
A style to use for rendering text.
Acceptable values are:
- ``'normal'`` normal text
- ``'italic'`` *italic text*
- ``'bold'`` **bold text**
""")
text_color = ColorSpec(default="#444444", help="""
A color to use to fill text with.
Acceptable values are:
- any of the 147 named `CSS colors`_, e.g ``'green'``, ``'indigo'``
- an RGB(A) hex value, e.g., ``'#FF0000'``, ``'#44444444'``
- a 3-tuple of integers (r,g,b) between 0 and 255
- a 4-tuple of (r,g,b,a) where r,g,b are integers between 0..255 and a is between 0..1
.. _CSS colors: http://www.w3schools.com/cssref/css_colornames.asp
""")
text_alpha = NumberSpec(default=1.0, help="""
An alpha value to use to fill text with.
Acceptable values are floating point numbers between 0 (transparent)
and 1 (opaque).
""")
background_props = Include(FillProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
background_fill_color = Override(default=None)
border_props = Include(LineProps, use_prefix=True, help="""
The %s values for the text bounding box.
""")
border_line_color = Override(default=None)
render_mode = Enum(RenderMode, default="canvas", help="""
Specifies whether the text is rendered as a canvas element or as an
css element overlaid on the canvas. The default mode is "canvas".
.. note::
The CSS labels won't be present in the output using the "save" tool.
.. warning::
Not all visual styling properties are supported if the render_mode is
set to "css". The border_line_dash property isn't fully supported and
border_line_dash_offset isn't supported at all. Setting text_alpha will
modify the opacity of the entire background box and border in addition
to the text. Finally, clipping Label annotations inside of the plot
area isn't supported in "css" mode.
""")
class Tooltip(Annotation):
''' Render a tooltip.
.. note::
This model is currently managed by BokehJS and is not useful
directly from python.
'''
level = Override(default="overlay")
attachment = Enum("horizontal", "vertical", "left", "right", "above", "below", help="""
Whether the tooltip should display to the left or right off the cursor
position or above or below it, or if it should be automatically placed
in the horizontal or vertical dimension.
""")
inner_only = Bool(default=True, help="""
Whether to display outside a central plot frame area.
""")
show_arrow = Bool(default=True, help="""
Whether tooltip's arrow should be showed.
""")
# This only exists to prevent a circular import.
def _DEFAULT_TEE():
from .arrow_heads import TeeHead
return TeeHead(level="underlay", size=10)
class Whisker(Annotation):
''' Render a whisker along a dimension.
'''
lower = DistanceSpec(help="""
The coordinates of the lower end of the whiskers.
""")
lower_head = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_TEE, help="""
Instance of ArrowHead.
""")
upper = DistanceSpec(help="""
The coordinations of the upper end of the whiskers.
""")
upper_head = Instance('.models.arrow_heads.ArrowHead', default=_DEFAULT_TEE, help="""
Instance of ArrowHead.
""")
base = DistanceSpec(help="""
The orthogonal coordinates of the upper and lower values.
""")
dimension = Enum(Dimension, default='height', help="""
The direction of the band.
""")
source = Instance(DataSource, default=lambda: ColumnDataSource(), help="""
Local data source to use when rendering annotations on the plot.
""")
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen locations when
rendering annotations on the plot. If unset, use the default y-range.
""")
line_props = Include(LineProps, use_prefix=False, help="""
The %s values for the whisker body.
""")
level = Override(default="underlay")
class ToolbarPanel(Annotation): # TODO: this shouldn't be an annotation
toolbar = Instance(".models.tools.Toolbar", help="""
A toolbar to display.
""")
| rs2/bokeh | bokeh/models/annotations.py | Python | bsd-3-clause | 34,179 |
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model app\models\Patrol */
$this->title = 'Update Patrol: ' . ' ' . $model->name;
$this->params['breadcrumbs'][] = ['label' => 'Patrols', 'url' => ['index']];
$this->params['breadcrumbs'][] = ['label' => $model->name, 'url' => ['view', 'id' => $model->id]];
$this->params['breadcrumbs'][] = 'Update';
?>
<div class="patrol-update">
<h1><?= Html::encode($this->title) ?></h1>
<?= $this->render('_form', [
'model' => $model,
]) ?>
</div>
| thehern-troop15/pinestraw | views/patrol/update.php | PHP | bsd-3-clause | 529 |
#include "ace/config-all.h"
#include "ace/Log_Msg.h"
#include "ace/OS_Memory.h"
#include "ace/OS_NS_string.h"
#include "Options.h"
#include "Protocol_Manager.h"
// Returns a pointer to the Drwho_Node associated with HOST_NAME (if
// it exists, otherwise a new node is created. Note that if a
// Drwho_Node is found it is moved to the front of the list so that
// subsequent finds are faster (i.e., self-organizing!)
Drwho_Node *
Protocol_Manager::get_drwho_node (char *key_name, Drwho_Node *&head)
{
Drwho_Node **temp = &head;
for (; *temp != 0; temp = &(*temp)->next_)
if (ACE_OS::strcmp (key_name,
(*temp)->get_login_name ()) == 0)
break;
if (*temp == 0)
ACE_NEW_RETURN (head,
Drwho_Node (key_name, head),
0);
else
{
Drwho_Node *t = *temp;
*temp = (*temp)->next_;
t->next_ = head;
head = t;
}
return head;
}
Protocol_Manager::Protocol_Manager (void)
: total_users (0)
{
}
Protocol_Manager::~Protocol_Manager (void)
{
if (Options::get_opt (Options::DEBUGGING))
ACE_DEBUG ((LM_DEBUG,
"disposing Protocol_Manager\n"));
}
// Returns the next friend in the sequence of sorted friends.
Protocol_Record *
Protocol_Manager::get_next_friend (void)
{
return this->ss->get_next_entry ();
}
Protocol_Record *
Protocol_Manager::get_each_friend (void)
{
return this->ss->get_each_entry ();
}
// Returns the number of friends.
int
Protocol_Manager::friend_count (void)
{
return this->ss->n_elems ();
}
// Returns total number of users logged in throughout the system.
int
Protocol_Manager::get_total_users (void)
{
return Protocol_Manager::total_users;
}
void
Protocol_Manager::increment_total_users (int remote_users)
{
Protocol_Manager::total_users += remote_users;
}
| wfnex/openbras | src/ace/ACE_wrappers/apps/drwho/Protocol_Manager.cpp | C++ | bsd-3-clause | 1,829 |
<?php
namespace Music\Controller;
use Zend\Mvc\Controller\AbstractActionController;
use Zend\View\Model\ViewModel;
use Music\Model\Music;
use Music\Form\MusicForm;
class MusicController extends AbstractActionController
{
protected $musicTable;
public function indexAction()
{
return new ViewModel(array(
'musics' => $this->getMusicTable()->fetchAll(),
));
}
public function getMusicTable()
{
if (!$this->musicTable) {
$sm = $this->getServiceLocator();
$this->musicTable = $sm->get('Music\Model\MusicTable');
}
return $this->musicTable;
}
public function addAction()
{
$form = new MusicForm();
$form->get('submit')->setValue('Add');
$form->prepare();
$request = $this->getRequest();
if ($request->isPost()) {
$music = new Music();
$post = array_merge_recursive($request->getPost()->toArray(),$request->getFiles()->toArray());
$form->setInputFilter($music->getInputFilter());
$form->setData($post);
if ($form->isValid()) {
$music->exchangeArray($form->getData());
$this->getmusicTable()->savemusic($music);
return $this->redirect()->toRoute('music');
}
}
return array('form' => $form);
}
}
?> | zocuxtech/smsandemail | module/Music/src/Music/Controller/MusicController.php | PHP | bsd-3-clause | 1,423 |
package org.mafagafogigante.dungeon.game;
import org.mafagafogigante.dungeon.util.Percentage;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* The LocationPreset class that serves as a recipe for Locations.
*/
public final class LocationPreset {
private final Id id;
private final Type type;
private final Name name;
private final BlockedEntrances blockedEntrances = new BlockedEntrances();
private final List<SpawnerPreset> spawners = new ArrayList<>();
private final Map<Id, Percentage> items = new HashMap<>();
private Percentage lightPermittivity;
private int blobSize;
private LocationDescription description;
LocationPreset(Id id, Type type, Name name) {
this.id = id;
this.type = type;
this.name = name;
}
public Id getId() {
return id;
}
public Type getType() {
return type;
}
public Name getName() {
return name;
}
public LocationDescription getDescription() {
return description;
}
public void setDescription(LocationDescription description) {
this.description = description;
}
public List<SpawnerPreset> getSpawners() {
return spawners;
}
/**
* Adds a Spawner to this Location based on a SpawnerPreset.
*
* @param preset the SpawnerPreset
*/
public void addSpawner(SpawnerPreset preset) {
this.spawners.add(preset);
}
public Set<Entry<Id, Percentage>> getItems() {
return items.entrySet();
}
/**
* Adds an Item to this Location based on an ItemFrequencyPair.
*
* @param id the ID string of the item
* @param probability the probability of the item appearing
*/
public void addItem(String id, Double probability) {
items.put(new Id(id), new Percentage(probability));
}
public BlockedEntrances getBlockedEntrances() {
return new BlockedEntrances(blockedEntrances);
}
/**
* Blocks exiting and entering into the location by a given direction.
*
* @param direction a Direction to be blocked.
*/
public void block(Direction direction) {
blockedEntrances.block(direction);
}
public Percentage getLightPermittivity() {
return lightPermittivity;
}
public void setLightPermittivity(double lightPermittivity) {
this.lightPermittivity = new Percentage(lightPermittivity);
}
public int getBlobSize() {
return blobSize;
}
public void setBlobSize(int blobSize) {
this.blobSize = blobSize;
}
public enum Type {HOME, RIVER, BRIDGE, DUNGEON_ENTRANCE, DUNGEON_STAIRWAY, DUNGEON_ROOM, DUNGEON_CORRIDOR, LAND}
}
| ffurkanhas/dungeon | src/main/java/org/mafagafogigante/dungeon/game/LocationPreset.java | Java | bsd-3-clause | 2,633 |
// Copyright 2017 Yanni Coroneos. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"../../embedded"
"fmt"
"runtime"
"syscall"
)
/*
* This is the entry point of GERT. dont try anything fancy
*/
//go:nosplit
func Entry() {
runtime.Armhackmode = 1
runtime.Runtime_main()
}
//the runtime calls main after it's done setting up
func main() {
//test things like channels and whatnot
fmt.Printf("self tests ...")
self_tests()
fmt.Printf("done!\n")
//print out some warnings for myself so I dont forget possibly sketchy things I have done
fmt.Printf("warnings ...")
self_warnings()
fmt.Printf("done!\n")
//init the GIC and turn on interrupts
fmt.Printf("pre-init ...")
pre_init()
syscall.Setenv("TZ", "UTC")
runtime.Booted = 1
fmt.Printf("done!\n")
//user-provided init code
fmt.Printf("user init ...")
user_init()
fmt.Printf("done!\n")
//user main loop
for {
user_loop()
}
panic("user loop broke out")
}
//add things here if you think they are critical for functionality
func self_tests() {
fmt.Println("Hi from fmt")
channel := make(chan string, 1)
channel <- "channel test pass"
val := <-channel
fmt.Println(val)
go func(resp chan string) {
fmt.Println("print from inside goroutine")
resp <- "send channel from inside a goroutine"
}(channel)
val = <-channel
fmt.Println(val)
}
//I never read the git logs. Now I dont have to
func self_warnings() {
//fmt.Println("REMEMBER THAT SKETCHY THING YOU DID WITH MAPPING AN EXTRA PAGE IN MAP_REGION")
}
//If a user doesnt want IRQs then they should never enable one. The GIC will just be ON but do nothing
func pre_init() {
//enable GIC
embedded.GIC_init(false)
//set IRQ callback function
runtime.SetIRQcallback(irq)
//Release spinning cpus
runtime.Release(3)
//unmap the first page
runtime.Unmap_region(0x0, 0x0, 0x100000)
}
| ycoroneos/G.E.R.T | gert/armv7a/programs/gopher/kernel.go | GO | bsd-3-clause | 1,928 |
// Copyright 2012 Jimmy Zelinskie. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package geddit
import (
"fmt"
)
// Submission represents an individual post from the perspective
// of a subreddit. Remember to check for nil pointers before
// using any pointer fields.
type Submission struct {
Author string `json:"author"`
Title string `json:"title"`
URL string `json:"url"`
Domain string `json:"domain"`
Subreddit string `json:"subreddit"`
SubredditID string `json:"subreddit_id"`
FullID string `json:"name"`
ID string `json:"id"`
Permalink string `json:"permalink"`
Selftext string `json:"selftext"`
ThumbnailURL string `json:"thumbnail"`
DateCreated float64 `json:"created_utc"`
NumComments int `json:"num_comments"`
Score int `json:"score"`
Ups int `json:"ups"`
Downs int `json:"downs"`
IsNSFW bool `json:"over_18"`
IsSelf bool `json:"is_self"`
WasClicked bool `json:"clicked"`
IsSaved bool `json:"saved"`
BannedBy *string `json:"banned_by"`
}
func (h Submission) voteID() string { return h.FullID }
func (h Submission) deleteID() string { return h.FullID }
func (h Submission) replyID() string { return h.FullID }
// FullPermalink returns the full URL of a submission.
func (h *Submission) FullPermalink() string {
return "https://reddit.com" + h.Permalink
}
// String returns the string representation of a submission.
func (h *Submission) String() string {
plural := ""
if h.NumComments != 1 {
plural = "s"
}
comments := fmt.Sprintf("%d comment%s", h.NumComments, plural)
return fmt.Sprintf("%d - %s (%s)", h.Score, h.Title, comments)
}
| aggrolite/geddit | submission.go | GO | bsd-3-clause | 1,804 |
package com.flash3388.flashlib.vision;
import com.castle.util.throwables.ThrowableHandler;
public class SourcePoller<T> implements Runnable {
private final Source<? extends T> mSource;
private final Pipeline<? super T> mPipeline;
private final ThrowableHandler mThrowableHandler;
public SourcePoller(Source<? extends T> source,
Pipeline<? super T> pipeline,
ThrowableHandler throwableHandler) {
mSource = source;
mPipeline = pipeline;
mThrowableHandler = throwableHandler;
}
@Override
public void run() {
try {
T data = mSource.get();
mPipeline.process(data);
} catch (Throwable t) {
mThrowableHandler.handle(t);
}
}
}
| Flash3388/FlashLib | flashlib.vision.core/src/main/java/com/flash3388/flashlib/vision/SourcePoller.java | Java | bsd-3-clause | 788 |
<?php
/**
* Zend Framework (http://framework.zend.com/)
*
* @link http://github.com/zendframework/zf2 for the canonical source repository
* @copyright Copyright (c) 2005-2014 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
namespace Zend\Db\Sql;
use Zend\Db\Adapter\AdapterInterface;
use Zend\Db\Adapter\ParameterContainer;
use Zend\Db\Adapter\Platform\PlatformInterface;
use Zend\Db\Adapter\Platform\Sql92;
use Zend\Db\Adapter\StatementContainerInterface;
use Zend\Stdlib\PriorityList;
/**
*
* @property Where $where
*/
class Update extends AbstractSql implements SqlInterface, PreparableSqlInterface
{
/**@#++
* @const
*/
const SPECIFICATION_UPDATE = 'update';
const SPECIFICATION_WHERE = 'where';
const VALUES_MERGE = 'merge';
const VALUES_SET = 'set';
/**@#-**/
protected $specifications = array(
self::SPECIFICATION_UPDATE => 'UPDATE %1$s SET %2$s',
self::SPECIFICATION_WHERE => 'WHERE %1$s'
);
/**
* @var string|TableIdentifier
*/
protected $table = '';
/**
* @var bool
*/
protected $emptyWhereProtection = true;
/**
* @var PriorityList
*/
protected $set;
/**
* @var string|Where
*/
protected $where = null;
/**
* Constructor
*
* @param null|string|TableIdentifier $table
*/
public function __construct($table = null)
{
if ($table) {
$this->table($table);
}
$this->where = new Where();
$this->set = new PriorityList();
$this->set->isLIFO(false);
}
/**
* Specify table for statement
*
* @param string|TableIdentifier $table
* @return Update
*/
public function table($table)
{
$this->table = $table;
return $this;
}
/**
* Set key/value pairs to update
*
* @param array $values Associative array of key values
* @param string $flag One of the VALUES_* constants
* @throws Exception\InvalidArgumentException
* @return Update
*/
public function set(array $values, $flag = self::VALUES_SET)
{
if ($values == null) {
throw new Exception\InvalidArgumentException('set() expects an array of values');
}
if ($flag == self::VALUES_SET) {
$this->set->clear();
}
$priority = is_numeric($flag) ? $flag : 0;
foreach ($values as $k => $v) {
if (!is_string($k)) {
throw new Exception\InvalidArgumentException('set() expects a string for the value key');
}
$this->set->insert($k, $v, $priority);
}
return $this;
}
/**
* Create where clause
*
* @param Where|\Closure|string|array $predicate
* @param string $combination One of the OP_* constants from Predicate\PredicateSet
* @throws Exception\InvalidArgumentException
* @return Select
*/
public function where($predicate, $combination = Predicate\PredicateSet::OP_AND)
{
if ($predicate instanceof Where) {
$this->where = $predicate;
} else {
$this->where->addPredicates($predicate, $combination);
}
return $this;
}
public function getRawState($key = null)
{
$rawState = array(
'emptyWhereProtection' => $this->emptyWhereProtection,
'table' => $this->table,
'set' => $this->set->toArray(),
'where' => $this->where
);
return (isset($key) && array_key_exists($key, $rawState)) ? $rawState[$key] : $rawState;
}
/**
* Prepare statement
*
* @param AdapterInterface $adapter
* @param StatementContainerInterface $statementContainer
* @return void
*/
public function prepareStatement(AdapterInterface $adapter, StatementContainerInterface $statementContainer)
{
$driver = $adapter->getDriver();
$platform = $adapter->getPlatform();
$parameterContainer = $statementContainer->getParameterContainer();
if (!$parameterContainer instanceof ParameterContainer) {
$parameterContainer = new ParameterContainer();
$statementContainer->setParameterContainer($parameterContainer);
}
$table = $this->table;
$schema = null;
// create quoted table name to use in update processing
if ($table instanceof TableIdentifier) {
list($table, $schema) = $table->getTableAndSchema();
}
$table = $platform->quoteIdentifier($table);
if ($schema) {
$table = $platform->quoteIdentifier($schema) . $platform->getIdentifierSeparator() . $table;
}
$setSql = array();
foreach ($this->set as $column => $value) {
if ($value instanceof Expression) {
$exprData = $this->processExpression($value, $platform, $driver);
$setSql[] = $platform->quoteIdentifier($column) . ' = ' . $exprData->getSql();
$parameterContainer->merge($exprData->getParameterContainer());
} else {
$setSql[] = $platform->quoteIdentifier($column) . ' = ' . $driver->formatParameterName($column);
$parameterContainer->offsetSet($column, $value);
}
}
$set = implode(', ', $setSql);
$sql = sprintf($this->specifications[static::SPECIFICATION_UPDATE], $table, $set);
// process where
if ($this->where->count() > 0) {
$whereParts = $this->processExpression($this->where, $platform, $driver, 'where');
$parameterContainer->merge($whereParts->getParameterContainer());
$sql .= ' ' . sprintf($this->specifications[static::SPECIFICATION_WHERE], $whereParts->getSql());
}
$statementContainer->setSql($sql);
}
/**
* Get SQL string for statement
*
* @param null|PlatformInterface $adapterPlatform If null, defaults to Sql92
* @return string
*/
public function getSqlString(PlatformInterface $adapterPlatform = null)
{
$adapterPlatform = ($adapterPlatform) ?: new Sql92;
$table = $this->table;
$schema = null;
// create quoted table name to use in update processing
if ($table instanceof TableIdentifier) {
list($table, $schema) = $table->getTableAndSchema();
}
$table = $adapterPlatform->quoteIdentifier($table);
if ($schema) {
$table = $adapterPlatform->quoteIdentifier($schema) . $adapterPlatform->getIdentifierSeparator() . $table;
}
$setSql = array();
foreach ($this->set as $column => $value) {
if ($value instanceof ExpressionInterface) {
$exprData = $this->processExpression($value, $adapterPlatform);
$setSql[] = $adapterPlatform->quoteIdentifier($column) . ' = ' . $exprData->getSql();
} elseif ($value === null) {
$setSql[] = $adapterPlatform->quoteIdentifier($column) . ' = NULL';
} else {
$setSql[] = $adapterPlatform->quoteIdentifier($column) . ' = ' . $adapterPlatform->quoteValue($value);
}
}
$set = implode(', ', $setSql);
$sql = sprintf($this->specifications[static::SPECIFICATION_UPDATE], $table, $set);
if ($this->where->count() > 0) {
$whereParts = $this->processExpression($this->where, $adapterPlatform, null, 'where');
$sql .= ' ' . sprintf($this->specifications[static::SPECIFICATION_WHERE], $whereParts->getSql());
}
return $sql;
}
/**
* Variable overloading
*
* Proxies to "where" only
*
* @param string $name
* @return mixed
*/
public function __get($name)
{
switch (strtolower($name)) {
case 'where':
return $this->where;
}
}
/**
* __clone
*
* Resets the where object each time the Update is cloned.
*
* @return void
*/
public function __clone()
{
$this->where = clone $this->where;
$this->set = clone $this->set;
}
}
| ishvaram/email-app | vendor/ZF2/library/Zend/Db/Sql/Update.php | PHP | bsd-3-clause | 8,578 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/spdy/hpack_encoding_context.h"
#include <cstddef>
#include "base/logging.h"
#include "base/macros.h"
#include "net/spdy/hpack_constants.h"
#include "net/spdy/hpack_entry.h"
namespace net {
using base::StringPiece;
namespace {
// An entry in the static table. Must be a POD in order to avoid
// static initializers, i.e. no user-defined constructors or
// destructors.
struct StaticEntry {
const char* const name;
const size_t name_len;
const char* const value;
const size_t value_len;
};
// The "constructor" for a StaticEntry that computes the lengths at
// compile time.
#define STATIC_ENTRY(name, value) \
{ name, arraysize(name) - 1, value, arraysize(value) - 1 }
const StaticEntry kStaticTable[] = {
STATIC_ENTRY(":authority" , ""), // 1
STATIC_ENTRY(":method" , "GET"), // 2
STATIC_ENTRY(":method" , "POST"), // 3
STATIC_ENTRY(":path" , "/"), // 4
STATIC_ENTRY(":path" , "/index.html"), // 5
STATIC_ENTRY(":scheme" , "http"), // 6
STATIC_ENTRY(":scheme" , "https"), // 7
STATIC_ENTRY(":status" , "200"), // 8
STATIC_ENTRY(":status" , "500"), // 9
STATIC_ENTRY(":status" , "404"), // 10
STATIC_ENTRY(":status" , "403"), // 11
STATIC_ENTRY(":status" , "400"), // 12
STATIC_ENTRY(":status" , "401"), // 13
STATIC_ENTRY("accept-charset" , ""), // 14
STATIC_ENTRY("accept-encoding" , ""), // 15
STATIC_ENTRY("accept-language" , ""), // 16
STATIC_ENTRY("accept-ranges" , ""), // 17
STATIC_ENTRY("accept" , ""), // 18
STATIC_ENTRY("access-control-allow-origin" , ""), // 19
STATIC_ENTRY("age" , ""), // 20
STATIC_ENTRY("allow" , ""), // 21
STATIC_ENTRY("authorization" , ""), // 22
STATIC_ENTRY("cache-control" , ""), // 23
STATIC_ENTRY("content-disposition" , ""), // 24
STATIC_ENTRY("content-encoding" , ""), // 25
STATIC_ENTRY("content-language" , ""), // 26
STATIC_ENTRY("content-length" , ""), // 27
STATIC_ENTRY("content-location" , ""), // 28
STATIC_ENTRY("content-range" , ""), // 29
STATIC_ENTRY("content-type" , ""), // 30
STATIC_ENTRY("cookie" , ""), // 31
STATIC_ENTRY("date" , ""), // 32
STATIC_ENTRY("etag" , ""), // 33
STATIC_ENTRY("expect" , ""), // 34
STATIC_ENTRY("expires" , ""), // 35
STATIC_ENTRY("from" , ""), // 36
STATIC_ENTRY("host" , ""), // 37
STATIC_ENTRY("if-match" , ""), // 38
STATIC_ENTRY("if-modified-since" , ""), // 39
STATIC_ENTRY("if-none-match" , ""), // 40
STATIC_ENTRY("if-range" , ""), // 41
STATIC_ENTRY("if-unmodified-since" , ""), // 42
STATIC_ENTRY("last-modified" , ""), // 43
STATIC_ENTRY("link" , ""), // 44
STATIC_ENTRY("location" , ""), // 45
STATIC_ENTRY("max-forwards" , ""), // 46
STATIC_ENTRY("proxy-authenticate" , ""), // 47
STATIC_ENTRY("proxy-authorization" , ""), // 48
STATIC_ENTRY("range" , ""), // 49
STATIC_ENTRY("referer" , ""), // 50
STATIC_ENTRY("refresh" , ""), // 51
STATIC_ENTRY("retry-after" , ""), // 52
STATIC_ENTRY("server" , ""), // 53
STATIC_ENTRY("set-cookie" , ""), // 54
STATIC_ENTRY("strict-transport-security" , ""), // 55
STATIC_ENTRY("transfer-encoding" , ""), // 56
STATIC_ENTRY("user-agent" , ""), // 57
STATIC_ENTRY("vary" , ""), // 58
STATIC_ENTRY("via" , ""), // 59
STATIC_ENTRY("www-authenticate" , ""), // 60
};
#undef STATIC_ENTRY
const size_t kStaticEntryCount = arraysize(kStaticTable);
} // namespace
const uint32 HpackEncodingContext::kUntouched = HpackEntry::kUntouched;
HpackEncodingContext::HpackEncodingContext()
: settings_header_table_size_(kDefaultHeaderTableSizeSetting) {}
HpackEncodingContext::~HpackEncodingContext() {}
uint32 HpackEncodingContext::GetMutableEntryCount() const {
return header_table_.GetEntryCount();
}
uint32 HpackEncodingContext::GetEntryCount() const {
return GetMutableEntryCount() + kStaticEntryCount;
}
StringPiece HpackEncodingContext::GetNameAt(uint32 index) const {
CHECK_GE(index, 1u);
CHECK_LE(index, GetEntryCount());
if (index > header_table_.GetEntryCount()) {
const StaticEntry& entry =
kStaticTable[index - header_table_.GetEntryCount() - 1];
return StringPiece(entry.name, entry.name_len);
}
return header_table_.GetEntry(index).name();
}
StringPiece HpackEncodingContext::GetValueAt(uint32 index) const {
CHECK_GE(index, 1u);
CHECK_LE(index, GetEntryCount());
if (index > header_table_.GetEntryCount()) {
const StaticEntry& entry =
kStaticTable[index - header_table_.GetEntryCount() - 1];
return StringPiece(entry.value, entry.value_len);
}
return header_table_.GetEntry(index).value();
}
bool HpackEncodingContext::IsReferencedAt(uint32 index) const {
CHECK_GE(index, 1u);
CHECK_LE(index, GetEntryCount());
if (index > header_table_.GetEntryCount())
return false;
return header_table_.GetEntry(index).IsReferenced();
}
uint32 HpackEncodingContext::GetTouchCountAt(uint32 index) const {
CHECK_GE(index, 1u);
CHECK_LE(index, GetEntryCount());
if (index > header_table_.GetEntryCount())
return 0;
return header_table_.GetEntry(index).TouchCount();
}
void HpackEncodingContext::SetReferencedAt(uint32 index, bool referenced) {
header_table_.GetMutableEntry(index)->SetReferenced(referenced);
}
void HpackEncodingContext::AddTouchesAt(uint32 index, uint32 touch_count) {
header_table_.GetMutableEntry(index)->AddTouches(touch_count);
}
void HpackEncodingContext::ClearTouchesAt(uint32 index) {
header_table_.GetMutableEntry(index)->ClearTouches();
}
void HpackEncodingContext::ApplyHeaderTableSizeSetting(uint32 size) {
settings_header_table_size_ = size;
if (size < header_table_.max_size()) {
// Implicit maximum-size context update.
CHECK(ProcessContextUpdateNewMaximumSize(size));
}
}
bool HpackEncodingContext::ProcessContextUpdateNewMaximumSize(uint32 size) {
if (size > settings_header_table_size_) {
return false;
}
header_table_.SetMaxSize(size);
return true;
}
bool HpackEncodingContext::ProcessContextUpdateEmptyReferenceSet() {
for (size_t i = 1; i <= header_table_.GetEntryCount(); ++i) {
HpackEntry* entry = header_table_.GetMutableEntry(i);
if (entry->IsReferenced()) {
entry->SetReferenced(false);
}
}
return true;
}
bool HpackEncodingContext::ProcessIndexedHeader(uint32 index, uint32* new_index,
std::vector<uint32>* removed_referenced_indices) {
CHECK_GT(index, 0u);
CHECK_LT(index, GetEntryCount());
if (index <= header_table_.GetEntryCount()) {
*new_index = index;
removed_referenced_indices->clear();
HpackEntry* entry = header_table_.GetMutableEntry(index);
entry->SetReferenced(!entry->IsReferenced());
} else {
// TODO(akalin): Make HpackEntry know about owned strings and
// non-owned strings so that it can potentially avoid copies here.
HpackEntry entry(GetNameAt(index), GetValueAt(index));
header_table_.TryAddEntry(entry, new_index, removed_referenced_indices);
if (*new_index >= 1) {
header_table_.GetMutableEntry(*new_index)->SetReferenced(true);
}
}
return true;
}
bool HpackEncodingContext::ProcessLiteralHeaderWithIncrementalIndexing(
StringPiece name,
StringPiece value,
uint32* index,
std::vector<uint32>* removed_referenced_indices) {
HpackEntry entry(name, value);
header_table_.TryAddEntry(entry, index, removed_referenced_indices);
if (*index >= 1) {
header_table_.GetMutableEntry(*index)->SetReferenced(true);
}
return true;
}
} // namespace net
| anirudhSK/chromium | net/spdy/hpack_encoding_context.cc | C++ | bsd-3-clause | 9,278 |
import threading
from ctypes import POINTER, Structure, byref, c_char, c_char_p, c_int, c_size_t
from django.contrib.gis.geos.base import GEOSBase
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory
from django.contrib.gis.geos.prototypes.errcheck import (
check_geom, check_sized_string, check_string,
)
from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p
from django.utils import six
from django.utils.encoding import force_bytes
# ### The WKB/WKT Reader/Writer structures and pointers ###
class WKTReader_st(Structure):
pass
class WKTWriter_st(Structure):
pass
class WKBReader_st(Structure):
pass
class WKBWriter_st(Structure):
pass
WKT_READ_PTR = POINTER(WKTReader_st)
WKT_WRITE_PTR = POINTER(WKTWriter_st)
WKB_READ_PTR = POINTER(WKBReader_st)
WKB_WRITE_PTR = POINTER(WKBReader_st)
# WKTReader routines
wkt_reader_create = GEOSFuncFactory('GEOSWKTReader_create', restype=WKT_READ_PTR)
wkt_reader_destroy = GEOSFuncFactory('GEOSWKTReader_destroy', argtypes=[WKT_READ_PTR])
wkt_reader_read = GEOSFuncFactory(
'GEOSWKTReader_read', argtypes=[WKT_READ_PTR, c_char_p], restype=GEOM_PTR, errcheck=check_geom
)
# WKTWriter routines
wkt_writer_create = GEOSFuncFactory('GEOSWKTWriter_create', restype=WKT_WRITE_PTR)
wkt_writer_destroy = GEOSFuncFactory('GEOSWKTWriter_destroy', argtypes=[WKT_WRITE_PTR])
wkt_writer_write = GEOSFuncFactory(
'GEOSWKTWriter_write', argtypes=[WKT_WRITE_PTR, GEOM_PTR], restype=geos_char_p, errcheck=check_string
)
wkt_writer_get_outdim = GEOSFuncFactory(
'GEOSWKTWriter_getOutputDimension', argtypes=[WKT_WRITE_PTR], restype=c_int
)
wkt_writer_set_outdim = GEOSFuncFactory(
'GEOSWKTWriter_setOutputDimension', argtypes=[WKT_WRITE_PTR, c_int]
)
wkt_writer_set_trim = GEOSFuncFactory('GEOSWKTWriter_setTrim', argtypes=[WKT_WRITE_PTR, c_char])
wkt_writer_set_precision = GEOSFuncFactory('GEOSWKTWriter_setRoundingPrecision', argtypes=[WKT_WRITE_PTR, c_int])
# WKBReader routines
wkb_reader_create = GEOSFuncFactory('GEOSWKBReader_create', restype=WKB_READ_PTR)
wkb_reader_destroy = GEOSFuncFactory('GEOSWKBReader_destroy', argtypes=[WKB_READ_PTR])
class WKBReadFunc(GEOSFuncFactory):
# Although the function definitions take `const unsigned char *`
# as their parameter, we use c_char_p here so the function may
# take Python strings directly as parameters. Inside Python there
# is not a difference between signed and unsigned characters, so
# it is not a problem.
argtypes = [WKB_READ_PTR, c_char_p, c_size_t]
restype = GEOM_PTR
errcheck = staticmethod(check_geom)
wkb_reader_read = WKBReadFunc('GEOSWKBReader_read')
wkb_reader_read_hex = WKBReadFunc('GEOSWKBReader_readHEX')
# WKBWriter routines
wkb_writer_create = GEOSFuncFactory('GEOSWKBWriter_create', restype=WKB_WRITE_PTR)
wkb_writer_destroy = GEOSFuncFactory('GEOSWKBWriter_destroy', argtypes=[WKB_WRITE_PTR])
# WKB Writing prototypes.
class WKBWriteFunc(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)]
restype = c_uchar_p
errcheck = staticmethod(check_sized_string)
wkb_writer_write = WKBWriteFunc('GEOSWKBWriter_write')
wkb_writer_write_hex = WKBWriteFunc('GEOSWKBWriter_writeHEX')
# WKBWriter property getter/setter prototypes.
class WKBWriterGet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR]
restype = c_int
class WKBWriterSet(GEOSFuncFactory):
argtypes = [WKB_WRITE_PTR, c_int]
wkb_writer_get_byteorder = WKBWriterGet('GEOSWKBWriter_getByteOrder')
wkb_writer_set_byteorder = WKBWriterSet('GEOSWKBWriter_setByteOrder')
wkb_writer_get_outdim = WKBWriterGet('GEOSWKBWriter_getOutputDimension')
wkb_writer_set_outdim = WKBWriterSet('GEOSWKBWriter_setOutputDimension')
wkb_writer_get_include_srid = WKBWriterGet('GEOSWKBWriter_getIncludeSRID', restype=c_char)
wkb_writer_set_include_srid = WKBWriterSet('GEOSWKBWriter_setIncludeSRID', argtypes=[WKB_WRITE_PTR, c_char])
# ### Base I/O Class ###
class IOBase(GEOSBase):
"Base class for GEOS I/O objects."
def __init__(self):
# Getting the pointer with the constructor.
self.ptr = self._constructor()
# Loading the real destructor function at this point as doing it in
# __del__ is too late (import error).
self._destructor.func = self._destructor.get_func(
*self._destructor.args, **self._destructor.kwargs
)
def __del__(self):
# Cleaning up with the appropriate destructor.
try:
self._destructor(self._ptr)
except (AttributeError, TypeError):
pass # Some part might already have been garbage collected
# ### Base WKB/WKT Reading and Writing objects ###
# Non-public WKB/WKT reader classes for internal use because
# their `read` methods return _pointers_ instead of GEOSGeometry
# objects.
class _WKTReader(IOBase):
_constructor = wkt_reader_create
_destructor = wkt_reader_destroy
ptr_type = WKT_READ_PTR
def read(self, wkt):
if not isinstance(wkt, (bytes, six.string_types)):
raise TypeError
return wkt_reader_read(self.ptr, force_bytes(wkt))
class _WKBReader(IOBase):
_constructor = wkb_reader_create
_destructor = wkb_reader_destroy
ptr_type = WKB_READ_PTR
def read(self, wkb):
"Returns a _pointer_ to C GEOS Geometry object from the given WKB."
if isinstance(wkb, six.memoryview):
wkb_s = bytes(wkb)
return wkb_reader_read(self.ptr, wkb_s, len(wkb_s))
elif isinstance(wkb, (bytes, six.string_types)):
return wkb_reader_read_hex(self.ptr, wkb, len(wkb))
else:
raise TypeError
# ### WKB/WKT Writer Classes ###
class WKTWriter(IOBase):
_constructor = wkt_writer_create
_destructor = wkt_writer_destroy
ptr_type = WKT_WRITE_PTR
_trim = False
_precision = None
def __init__(self, dim=2, trim=False, precision=None):
super(WKTWriter, self).__init__()
if bool(trim) != self._trim:
self.trim = trim
if precision is not None:
self.precision = precision
self.outdim = dim
def write(self, geom):
"Returns the WKT representation of the given geometry."
return wkt_writer_write(self.ptr, geom.ptr)
@property
def outdim(self):
return wkt_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError('WKT output dimension must be 2 or 3')
wkt_writer_set_outdim(self.ptr, new_dim)
@property
def trim(self):
return self._trim
@trim.setter
def trim(self, flag):
if bool(flag) != self._trim:
self._trim = bool(flag)
wkt_writer_set_trim(self.ptr, b'\x01' if flag else b'\x00')
@property
def precision(self):
return self._precision
@precision.setter
def precision(self, precision):
if (not isinstance(precision, int) or precision < 0) and precision is not None:
raise AttributeError('WKT output rounding precision must be non-negative integer or None.')
if precision != self._precision:
self._precision = precision
wkt_writer_set_precision(self.ptr, -1 if precision is None else precision)
class WKBWriter(IOBase):
_constructor = wkb_writer_create
_destructor = wkb_writer_destroy
ptr_type = WKB_WRITE_PTR
def __init__(self, dim=2):
super(WKBWriter, self).__init__()
self.outdim = dim
def _handle_empty_point(self, geom):
from django.contrib.gis.geos import Point
if isinstance(geom, Point) and geom.empty:
if self.srid:
# PostGIS uses POINT(NaN NaN) for WKB representation of empty
# points. Use it for EWKB as it's a PostGIS specific format.
# https://trac.osgeo.org/postgis/ticket/3181
geom = Point(float('NaN'), float('NaN'), srid=geom.srid)
else:
raise ValueError('Empty point is not representable in WKB.')
return geom
def write(self, geom):
"Returns the WKB representation of the given geometry."
from django.contrib.gis.geos import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))
if isinstance(geom, Polygon) and geom.empty:
# Fix GEOS output for empty polygon.
# See https://trac.osgeo.org/geos/ticket/680.
wkb = wkb[:-8] + b'\0' * 4
return six.memoryview(wkb)
def write_hex(self, geom):
"Returns the HEXEWKB representation of the given geometry."
from django.contrib.gis.geos.polygon import Polygon
geom = self._handle_empty_point(geom)
wkb = wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t()))
if isinstance(geom, Polygon) and geom.empty:
wkb = wkb[:-16] + b'0' * 8
return wkb
# ### WKBWriter Properties ###
# Property for getting/setting the byteorder.
def _get_byteorder(self):
return wkb_writer_get_byteorder(self.ptr)
def _set_byteorder(self, order):
if order not in (0, 1):
raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).')
wkb_writer_set_byteorder(self.ptr, order)
byteorder = property(_get_byteorder, _set_byteorder)
# Property for getting/setting the output dimension.
@property
def outdim(self):
return wkb_writer_get_outdim(self.ptr)
@outdim.setter
def outdim(self, new_dim):
if new_dim not in (2, 3):
raise ValueError('WKB output dimension must be 2 or 3')
wkb_writer_set_outdim(self.ptr, new_dim)
# Property for getting/setting the include srid flag.
@property
def srid(self):
return bool(ord(wkb_writer_get_include_srid(self.ptr)))
@srid.setter
def srid(self, include):
if include:
flag = b'\x01'
else:
flag = b'\x00'
wkb_writer_set_include_srid(self.ptr, flag)
# `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer
# objects that are local to the thread. The `GEOSGeometry` internals
# access these instances by calling the module-level functions, defined
# below.
class ThreadLocalIO(threading.local):
wkt_r = None
wkt_w = None
wkb_r = None
wkb_w = None
ewkb_w = None
thread_context = ThreadLocalIO()
# These module-level routines return the I/O object that is local to the
# thread. If the I/O object does not exist yet it will be initialized.
def wkt_r():
if not thread_context.wkt_r:
thread_context.wkt_r = _WKTReader()
return thread_context.wkt_r
def wkt_w(dim=2, trim=False, precision=None):
if not thread_context.wkt_w:
thread_context.wkt_w = WKTWriter(dim=dim, trim=trim, precision=precision)
else:
thread_context.wkt_w.outdim = dim
thread_context.wkt_w.trim = trim
thread_context.wkt_w.precision = precision
return thread_context.wkt_w
def wkb_r():
if not thread_context.wkb_r:
thread_context.wkb_r = _WKBReader()
return thread_context.wkb_r
def wkb_w(dim=2):
if not thread_context.wkb_w:
thread_context.wkb_w = WKBWriter(dim=dim)
else:
thread_context.wkb_w.outdim = dim
return thread_context.wkb_w
def ewkb_w(dim=2):
if not thread_context.ewkb_w:
thread_context.ewkb_w = WKBWriter(dim=dim)
thread_context.ewkb_w.srid = True
else:
thread_context.ewkb_w.outdim = dim
return thread_context.ewkb_w
| erikr/django | django/contrib/gis/geos/prototypes/io.py | Python | bsd-3-clause | 11,671 |
<?php
include "classes/db.class.php";
$db = db::getInstance();
$returnData = array();
$compairMonth = "";
$currentMonth = "";
date_default_timezone_set( 'America/Chicago' );
if(is_numeric($_POST["year"])) $year = $_POST["year"];else die("Bad Year");
if(is_numeric($_POST["month"])) $month = $_POST["month"];else die("Bad Month");
$month = $month-1;
$compMonth = $month;
$month = str_pad($month,2,"0",STR_PAD_LEFT);
$currentMonth = $year."-".str_pad($month,2,"0",STR_PAD_LEFT)."-01";
$compaireMonth = ($year-1)."-".$month."-01";
$curYear = $year;
$compYear = ($year-1);
$returnData["month"] = $month."-01-".$year;
$returnData["branches"] = array("Thomas Library","Hageman Library");
$tlCur = $db->Query("SELECT CSC.ReportMonth AS ReportMonth1, CSC.Group AS Group1, SumOfCheckouts AS SumOfCheckouts1 FROM CircStatsCategorized AS CSC WHERE CSC.ReportMonth='".$currentMonth."' AND CSC.Branch='Thomas Library' ORDER BY CSC.Group;","FALSE","assoc_array");
$tlComp = $db->Query("SELECT CSC.ReportMonth AS ReportMonth2, CSC.Group AS Group1, SumOfCheckouts AS SumOfCheckouts2 FROM CircStatsCategorized AS CSC WHERE CSC.ReportMonth='".$compaireMonth."' AND CSC.Branch='Thomas Library' ORDER BY CSC.Group;","FALSE","assoc_array");
$tlCurYTD = $db->Query("SELECT CSC.Group AS Group1,SUM(CSC.SumOfCheckouts) AS YTD1 FROM CircStatsCategorized AS CSC WHERE ( YEAR(CSC.ReportMonth)='".$curYear."' AND CSC.Branch = 'Thomas Library' AND MONTH(CSC.ReportMonth) BETWEEN 01 AND '".$month."') GROUP BY Group1 ORDER BY Group1;","FALSE","assoc_array");
$tlCompYTD = $db->Query("SELECT CSC.Group AS Group1,SUM(CSC.SumOfCheckouts) AS YTD2 FROM CircStatsCategorized AS CSC WHERE ( YEAR(CSC.ReportMonth)='".$compYear."' AND CSC.Branch = 'Thomas Library' AND MONTH(CSC.ReportMonth) BETWEEN 01 AND '".$month."') GROUP BY Group1 ORDER BY Group1;","FALSE","assoc_array");
$returnData["Thomas Library"] = array_merge($tlCur,$tlComp, $tlCurYTD, $tlCompYTD);
$hlCur = $db->Query("SELECT CSC.ReportMonth AS ReportMonth1, CSC.Group AS Group1, SumOfCheckouts AS SumOfCheckouts1 FROM CircStatsCategorized AS CSC WHERE CSC.ReportMonth='".$currentMonth."' AND CSC.Branch='Hageman Library' ORDER BY CSC.Group;","FALSE","assoc_array");
$hlComp = $db->Query("SELECT CSC.ReportMonth AS ReportMonth2, CSC.Group AS Group1, SumOfCheckouts AS SumOfCheckouts2 FROM CircStatsCategorized AS CSC WHERE CSC.ReportMonth='".$compaireMonth."' AND CSC.Branch='Hageman Library' ORDER BY CSC.Group;","FALSE","assoc_array");
$hlCurYTD = $db->Query("SELECT CSC.Group AS Group1,SUM(CSC.SumOfCheckouts) AS YTD1 FROM CircStatsCategorized AS CSC WHERE ( YEAR(CSC.ReportMonth)='".$curYear."' AND CSC.Branch = 'Hageman Library' AND MONTH(CSC.ReportMonth) BETWEEN 01 AND '".$month."') GROUP BY Group1 ORDER BY Group1;","FALSE","assoc_array");
$hlCompYTD = $db->Query("SELECT CSC.Group AS Group1,SUM(CSC.SumOfCheckouts) AS YTD2 FROM CircStatsCategorized AS CSC WHERE ( YEAR(CSC.ReportMonth)='".$compYear."' AND CSC.Branch = 'Hageman Library' AND MONTH(CSC.ReportMonth) BETWEEN 01 AND '".$month."') GROUP BY Group1 ORDER BY Group1;","FALSE","assoc_array");
$returnData["Hageman Library"] = array_merge($hlCur,$hlComp, $hlCurYTD, $hlCompYTD);
//$returnData["Thomas Library"]["alt"]=array();
foreach($returnData["Thomas Library"] as $v){
if(isset($v["ReportMonth1"])){
$tempDataT[$v["Group1"]]["month"]=$v["ReportMonth1"];
}
if(isset($v["SumOfCheckouts1"])){
$tempDataT[$v["Group1"]]["SOC1"]=$v["SumOfCheckouts1"];
}
if(isset($v["SumOfCheckouts2"])){
$tempDataT[$v["Group1"]]["SOC2"]=$v["SumOfCheckouts2"];
}
if(isset($v["YTD2"])){
$tempDataT[$v["Group1"]]["YTD1"]=$v["YTD2"];
}
if(isset($v["YTD1"])){
$tempDataT[$v["Group1"]]["YTD2"]=$v["YTD1"];
}
}
$returnData["Thomas Library"] = $tempDataT;
foreach($returnData["Hageman Library"] as $v){
if(isset($v["ReportMonth1"])){
$tempDataH[$v["Group1"]]["month"]=$v["ReportMonth1"];
}
if(isset($v["SumOfCheckouts1"])){
$tempDataH[$v["Group1"]]["SOC1"]=$v["SumOfCheckouts1"];
}
if(isset($v["SumOfCheckouts2"])){
$tempDataH[$v["Group1"]]["SOC2"]=$v["SumOfCheckouts2"];
}
if(isset($v["YTD2"])){
$tempDataH[$v["Group1"]]["YTD1"]=$v["YTD2"];
}
if(isset($v["YTD1"])){
$tempDataH[$v["Group1"]]["YTD2"]=$v["YTD1"];
}
}
$returnData["Hageman Library"] = $tempDataH;
echo json_encode($returnData);
?>
| WestchesterPublicLibrary/WPLreports | app/php/mcr.php | PHP | bsd-3-clause | 4,487 |
/*
* Copyright (C) 1999 Lars Knoll (knoll@kde.org)
* Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#include "sky/engine/core/css/resolver/StyleResolverState.h"
#include "sky/engine/core/animation/css/CSSAnimations.h"
#include "sky/engine/core/dom/Node.h"
#include "sky/engine/core/dom/NodeRenderStyle.h"
#include "sky/engine/core/frame/FrameHost.h"
namespace blink {
StyleResolverState::StyleResolverState(Document& document, Element* element, RenderStyle* parentStyle)
: m_elementContext(element ? ElementResolveContext(*element) : ElementResolveContext(document))
, m_document(document)
, m_style(nullptr)
, m_cssToLengthConversionData(0, document.renderView())
, m_parentStyle(parentStyle)
, m_lineHeightValue(nullptr)
, m_styleMap(*this, m_elementStyleResources)
{
if (!parentStyle && m_elementContext.parentNode())
m_parentStyle = m_elementContext.parentNode()->renderStyle();
ASSERT(document.isActive());
m_elementStyleResources.setDeviceScaleFactor(document.frameHost()->deviceScaleFactor());
}
StyleResolverState::~StyleResolverState()
{
}
void StyleResolverState::setAnimationUpdate(PassOwnPtr<CSSAnimationUpdate> update)
{
m_animationUpdate = update;
}
PassOwnPtr<CSSAnimationUpdate> StyleResolverState::takeAnimationUpdate()
{
return m_animationUpdate.release();
}
} // namespace blink
| chinmaygarde/mojo | sky/engine/core/css/resolver/StyleResolverState.cpp | C++ | bsd-3-clause | 2,199 |
module Seek
class ActivityStats
#the time periods to be tested
PERIODS={"daily"=>1.day.ago,"weekly"=>1.week.ago,"monthly"=>1.month.ago,"six_monthly"=>6.month.ago,"yearly"=>1.year.ago}
#the item types to include
INCLUDED_TYPES=["Sop","Model","Publication","DataFile","Assay","Study","Investigation","Presentation"]
def initialize
create_attributes
logs = ActivityLog.find(:all,:conditions=>["(action = ? or action = ?)","create","download"])
logs.each do |log|
next unless INCLUDED_TYPES.include?(log.activity_loggable_type)
action=""
case log.action
when "create"
action="created"
when "download"
action="downloaded"
end
PERIODS.keys.each do |period_key|
if log.created_at > PERIODS[period_key]
attribute="@#{period_key}_#{log.activity_loggable_type.downcase.pluralize}_#{action}"
eval("#{attribute} += 1")
end
end
end
end
def six_monthly_users
distinct_culprits_since 6.month.ago
end
def monthly_users
distinct_culprits_since 1.month.ago
end
def weekly_users
distinct_culprits_since 1.week.ago
end
def alltime_users
distinct_culprits_since
end
def daily_users
distinct_culprits_since 1.day.ago
end
def yearly_users
distinct_culprits_since 1.year.ago
end
private
def create_attributes
["created","downloaded"].each do |action|
PERIODS.keys.each do |period|
INCLUDED_TYPES.each do |type|
attribute="#{period}_#{type.downcase.pluralize}_#{action}"
self.class.class_eval { attr_accessor attribute.intern }
instance_variable_set "@#{attribute}".intern, 0
end
end
end
end
def distinct_culprits_since time=500.years.ago
ActivityLog.count(:all,:select=>"distinct culprit_id",:conditions=>["created_at > ?",time])
end
end
end | aina1205/virtualliverf1 | lib/seek/activity_stats.rb | Ruby | bsd-3-clause | 2,115 |
var url = document.URL;
var array = url.split("/");
var base = array[3];
if (array[2] == 'localhost') {
var staticurl = '/' + base + '/client/dashboard/reporting';
//var url_action = array[6].split("?")[0];
} else {
var staticurl = '/client/dashboard/reporting';
// var url_action = array[5].split("?")[0];
}
$(document).ready(function(){
$('.basic_info_menu').click(function(){
$url = $(this).find('a').attr("href");
var res = $url.split("#");
var hash = '#'+res[1];
window.location.hash = hash;
leftNavigation();
// now scroll to element with that id
});
$('#selectall').click(function(){
var select = $("#selectall").is(":checked");
if(select)
{
$('.permission_check').prop('checked', true);
$('.permission_check').css("pointer-events", "none");
}
else
{
$('.permission_check').prop('checked', false);
$('.permission_check').css("pointer-events", "auto");
}
});
$('#select_all_0').click(function(){
var select = $("#select_all_0").is(":checked");
if(select)
{
$('#select_all_0').removeClass('permission_check');
$('.permission_check').prop('checked', true);
$('.permission_check').css("pointer-events", "none");
}
else
{
$('.permission_check').prop('checked', false);
$('.permission_check').css("pointer-events", "auto");
}
});
$('.benefit_plan_info').click(function(){
$url = $(this).find('a').attr("href");
var res = $url.split("#");
var hash = '#'+res[1];
window.location.hash = hash;
benefitNavigation();
// now scroll to element with that id
});
}); | skyinsurance/acars | js/basicscroll.js | JavaScript | bsd-3-clause | 1,662 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "google_apis/gaia/gaia_auth_util.h"
#include <vector>
#include "base/logging.h"
#include "base/string_util.h"
#include "base/strings/string_split.h"
#include "google_apis/gaia/gaia_urls.h"
#include "googleurl/src/gurl.h"
namespace gaia {
namespace {
const char kGmailDomain[] = "gmail.com";
}
std::string CanonicalizeEmail(const std::string& email_address) {
std::vector<std::string> parts;
char at = '@';
base::SplitString(email_address, at, &parts);
if (parts.size() != 2U)
NOTREACHED() << "expecting exactly one @, but got " << parts.size();
else if (parts[1] == kGmailDomain) // only strip '.' for gmail accounts.
RemoveChars(parts[0], ".", &parts[0]);
std::string new_email = StringToLowerASCII(JoinString(parts, at));
VLOG(1) << "Canonicalized " << email_address << " to " << new_email;
return new_email;
}
std::string CanonicalizeDomain(const std::string& domain) {
// Canonicalization of domain names means lower-casing them. Make sure to
// update this function in sync with Canonicalize if this ever changes.
return StringToLowerASCII(domain);
}
std::string SanitizeEmail(const std::string& email_address) {
std::string sanitized(email_address);
// Apply a default domain if necessary.
if (sanitized.find('@') == std::string::npos) {
sanitized += '@';
sanitized += kGmailDomain;
}
return sanitized;
}
bool AreEmailsSame(const std::string& email1, const std::string& email2) {
return gaia::CanonicalizeEmail(gaia::SanitizeEmail(email1)) ==
gaia::CanonicalizeEmail(gaia::SanitizeEmail(email2));
}
std::string ExtractDomainName(const std::string& email_address) {
// First canonicalize which will also verify we have proper domain part.
std::string email = CanonicalizeEmail(email_address);
size_t separator_pos = email.find('@');
if (separator_pos != email.npos && separator_pos < email.length() - 1)
return email.substr(separator_pos + 1);
else
NOTREACHED() << "Not a proper email address: " << email;
return std::string();
}
bool IsGaiaSignonRealm(const GURL& url) {
if (!url.SchemeIsSecure())
return false;
return url == GaiaUrls::GetInstance()->gaia_url();
}
} // namespace gaia
| loopCM/chromium | google_apis/gaia/gaia_auth_util.cc | C++ | bsd-3-clause | 2,370 |
"""
Methods to characterize image textures.
"""
import numpy as np
from ._texture import _glcm_loop, _local_binary_pattern
def greycomatrix(image, distances, angles, levels=256, symmetric=False,
normed=False):
"""Calculate the grey-level co-occurrence matrix.
A grey level co-occurence matrix is a histogram of co-occuring
greyscale values at a given offset over an image.
Parameters
----------
image : array_like of uint8
Integer typed input image. The image will be cast to uint8, so
the maximum value must be less than 256.
distances : array_like
List of pixel pair distance offsets.
angles : array_like
List of pixel pair angles in radians.
levels : int, optional
The input image should contain integers in [0, levels-1],
where levels indicate the number of grey-levels counted
(typically 256 for an 8-bit image). The maximum value is
256.
symmetric : bool, optional
If True, the output matrix `P[:, :, d, theta]` is symmetric. This
is accomplished by ignoring the order of value pairs, so both
(i, j) and (j, i) are accumulated when (i, j) is encountered
for a given offset. The default is False.
normed : bool, optional
If True, normalize each matrix `P[:, :, d, theta]` by dividing
by the total number of accumulated co-occurrences for the given
offset. The elements of the resulting matrix sum to 1. The
default is False.
Returns
-------
P : 4-D ndarray
The grey-level co-occurrence histogram. The value
`P[i,j,d,theta]` is the number of times that grey-level `j`
occurs at a distance `d` and at an angle `theta` from
grey-level `i`. If `normed` is `False`, the output is of
type uint32, otherwise it is float64.
References
----------
.. [1] The GLCM Tutorial Home Page,
http://www.fp.ucalgary.ca/mhallbey/tutorial.htm
.. [2] Pattern Recognition Engineering, Morton Nadler & Eric P.
Smith
.. [3] Wikipedia, http://en.wikipedia.org/wiki/Co-occurrence_matrix
Examples
--------
Compute 2 GLCMs: One for a 1-pixel offset to the right, and one
for a 1-pixel offset upwards.
>>> image = np.array([[0, 0, 1, 1],
... [0, 0, 1, 1],
... [0, 2, 2, 2],
... [2, 2, 3, 3]], dtype=np.uint8)
>>> result = greycomatrix(image, [1], [0, np.pi/4, np.pi/2, 3*np.pi/4], levels=4)
>>> result[:, :, 0, 0]
array([[2, 2, 1, 0],
[0, 2, 0, 0],
[0, 0, 3, 1],
[0, 0, 0, 1]], dtype=uint32)
>>> result[:, :, 0, 1]
array([[1, 1, 3, 0],
[0, 1, 1, 0],
[0, 0, 0, 2],
[0, 0, 0, 0]], dtype=uint32)
>>> result[:, :, 0, 2]
array([[3, 0, 2, 0],
[0, 2, 2, 0],
[0, 0, 1, 2],
[0, 0, 0, 0]], dtype=uint32)
>>> result[:, :, 0, 3]
array([[2, 0, 0, 0],
[1, 1, 2, 0],
[0, 0, 2, 1],
[0, 0, 0, 0]], dtype=uint32)
"""
assert levels <= 256
image = np.ascontiguousarray(image)
assert image.ndim == 2
assert image.min() >= 0
assert image.max() < levels
image = image.astype(np.uint8)
distances = np.ascontiguousarray(distances, dtype=np.float64)
angles = np.ascontiguousarray(angles, dtype=np.float64)
assert distances.ndim == 1
assert angles.ndim == 1
P = np.zeros((levels, levels, len(distances), len(angles)),
dtype=np.uint32, order='C')
# count co-occurences
_glcm_loop(image, distances, angles, levels, P)
# make each GLMC symmetric
if symmetric:
Pt = np.transpose(P, (1, 0, 2, 3))
P = P + Pt
# normalize each GLMC
if normed:
P = P.astype(np.float64)
glcm_sums = np.apply_over_axes(np.sum, P, axes=(0, 1))
glcm_sums[glcm_sums == 0] = 1
P /= glcm_sums
return P
def greycoprops(P, prop='contrast'):
"""Calculate texture properties of a GLCM.
Compute a feature of a grey level co-occurrence matrix to serve as
a compact summary of the matrix. The properties are computed as
follows:
- 'contrast': :math:`\\sum_{i,j=0}^{levels-1} P_{i,j}(i-j)^2`
- 'dissimilarity': :math:`\\sum_{i,j=0}^{levels-1}P_{i,j}|i-j|`
- 'homogeneity': :math:`\\sum_{i,j=0}^{levels-1}\\frac{P_{i,j}}{1+(i-j)^2}`
- 'ASM': :math:`\\sum_{i,j=0}^{levels-1} P_{i,j}^2`
- 'energy': :math:`\\sqrt{ASM}`
- 'correlation':
.. math:: \\sum_{i,j=0}^{levels-1} P_{i,j}\\left[\\frac{(i-\\mu_i) \\
(j-\\mu_j)}{\\sqrt{(\\sigma_i^2)(\\sigma_j^2)}}\\right]
Parameters
----------
P : ndarray
Input array. `P` is the grey-level co-occurrence histogram
for which to compute the specified property. The value
`P[i,j,d,theta]` is the number of times that grey-level j
occurs at a distance d and at an angle theta from
grey-level i.
prop : {'contrast', 'dissimilarity', 'homogeneity', 'energy', \
'correlation', 'ASM'}, optional
The property of the GLCM to compute. The default is 'contrast'.
Returns
-------
results : 2-D ndarray
2-dimensional array. `results[d, a]` is the property 'prop' for
the d'th distance and the a'th angle.
References
----------
.. [1] The GLCM Tutorial Home Page,
http://www.fp.ucalgary.ca/mhallbey/tutorial.htm
Examples
--------
Compute the contrast for GLCMs with distances [1, 2] and angles
[0 degrees, 90 degrees]
>>> image = np.array([[0, 0, 1, 1],
... [0, 0, 1, 1],
... [0, 2, 2, 2],
... [2, 2, 3, 3]], dtype=np.uint8)
>>> g = greycomatrix(image, [1, 2], [0, np.pi/2], levels=4,
... normed=True, symmetric=True)
>>> contrast = greycoprops(g, 'contrast')
>>> contrast
array([[ 0.58333333, 1. ],
[ 1.25 , 2.75 ]])
"""
assert P.ndim == 4
(num_level, num_level2, num_dist, num_angle) = P.shape
assert num_level == num_level2
assert num_dist > 0
assert num_angle > 0
# create weights for specified property
I, J = np.ogrid[0:num_level, 0:num_level]
if prop == 'contrast':
weights = (I - J) ** 2
elif prop == 'dissimilarity':
weights = np.abs(I - J)
elif prop == 'homogeneity':
weights = 1. / (1. + (I - J) ** 2)
elif prop in ['ASM', 'energy', 'correlation']:
pass
else:
raise ValueError('%s is an invalid property' % (prop))
# compute property for each GLCM
if prop == 'energy':
asm = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]
results = np.sqrt(asm)
elif prop == 'ASM':
results = np.apply_over_axes(np.sum, (P ** 2), axes=(0, 1))[0, 0]
elif prop == 'correlation':
results = np.zeros((num_dist, num_angle), dtype=np.float64)
I = np.array(range(num_level)).reshape((num_level, 1, 1, 1))
J = np.array(range(num_level)).reshape((1, num_level, 1, 1))
diff_i = I - np.apply_over_axes(np.sum, (I * P), axes=(0, 1))[0, 0]
diff_j = J - np.apply_over_axes(np.sum, (J * P), axes=(0, 1))[0, 0]
std_i = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_i) ** 2),
axes=(0, 1))[0, 0])
std_j = np.sqrt(np.apply_over_axes(np.sum, (P * (diff_j) ** 2),
axes=(0, 1))[0, 0])
cov = np.apply_over_axes(np.sum, (P * (diff_i * diff_j)),
axes=(0, 1))[0, 0]
# handle the special case of standard deviations near zero
mask_0 = std_i < 1e-15
mask_0[std_j < 1e-15] = True
results[mask_0] = 1
# handle the standard case
mask_1 = mask_0 == False
results[mask_1] = cov[mask_1] / (std_i[mask_1] * std_j[mask_1])
elif prop in ['contrast', 'dissimilarity', 'homogeneity']:
weights = weights.reshape((num_level, num_level, 1, 1))
results = np.apply_over_axes(np.sum, (P * weights), axes=(0, 1))[0, 0]
return results
def local_binary_pattern(image, P, R, method='default'):
"""Gray scale and rotation invariant LBP (Local Binary Patterns).
LBP is an invariant descriptor that can be used for texture classification.
Parameters
----------
image : (N, M) array
Graylevel image.
P : int
Number of circularly symmetric neighbour set points (quantization of
the angular space).
R : float
Radius of circle (spatial resolution of the operator).
method : {'default', 'ror', 'uniform', 'var'}
Method to determine the pattern.
* 'default': original local binary pattern which is gray scale but not
rotation invariant.
* 'ror': extension of default implementation which is gray scale and
rotation invariant.
* 'uniform': improved rotation invariance with uniform patterns and
finer quantization of the angular space which is gray scale and
rotation invariant.
* 'nri_uniform': non rotation-invariant uniform patterns variant
which is only gray scale invariant [2].
* 'var': rotation invariant variance measures of the contrast of local
image texture which is rotation but not gray scale invariant.
Returns
-------
output : (N, M) array
LBP image.
References
----------
.. [1] Multiresolution Gray-Scale and Rotation Invariant Texture
Classification with Local Binary Patterns.
Timo Ojala, Matti Pietikainen, Topi Maenpaa.
http://www.rafbis.it/biplab15/images/stories/docenti/Danielriccio/\
Articoliriferimento/LBP.pdf, 2002.
.. [2] Face recognition with local binary patterns.
Timo Ahonen, Abdenour Hadid, Matti Pietikainen,
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.214.6851,
2004.
"""
methods = {
'default': ord('D'),
'ror': ord('R'),
'uniform': ord('U'),
'nri_uniform': ord('N'),
'var': ord('V')
}
image = np.ascontiguousarray(image, dtype=np.double)
output = _local_binary_pattern(image, P, R, methods[method.lower()])
return output
| SamHames/scikit-image | skimage/feature/texture.py | Python | bsd-3-clause | 10,468 |
package org.newdawn.slick;
import org.newdawn.slick.muffin.FileMuffin;
import org.newdawn.slick.muffin.Muffin;
import org.newdawn.slick.muffin.WebstartMuffin;
import org.newdawn.slick.util.Log;
import javax.jnlp.ServiceManager;
import java.io.IOException;
import java.util.HashMap;
/**
* A utility to allow game setup/state to be stored locally. This utility will adapt to the current enviornment
* (webstart or file based). Note that this will not currently work in an applet.
* <p/>
*
* @author kappaOne
*/
public class SavedState {
/**
* file name of where the scores will be saved
*/
private String fileName;
/**
* Type of Muffin to use
*/
private Muffin muffin;
/**
* hash map where int data will be stored
*/
private HashMap numericData = new HashMap();
/**
* hash map where string data will be stored
*/
private HashMap stringData = new HashMap();
/**
* Create and Test to see if the app is running as webstart or local app and select the appropriate muffin type
* <p/>
*
* @param fileName name of muffin where data will be saved
* <p/>
*
* @throws SlickException Indicates a failure to load the stored state
*/
public SavedState(String fileName) throws SlickException {
this.fileName = fileName;
if (isWebstartAvailable()) {
muffin = new WebstartMuffin();
} else {
muffin = new FileMuffin();
}
try {
load();
} catch (IOException e) {
throw new SlickException("Failed to load state on startup", e);
}
}
/**
* Get number stored at given location
* <p/>
*
* @param nameOfField The name of the number to retrieve
* <p/>
*
* @return The number saved at this location
*/
public double getNumber(String nameOfField) {
return getNumber(nameOfField, 0);
}
/**
* Get number stored at given location
* <p/>
*
* @param nameOfField The name of the number to retrieve
* @param defaultValue The value to return if the specified value hasn't been set
* <p/>
*
* @return The number saved at this location
*/
public double getNumber(String nameOfField, double defaultValue) {
Double value = ((Double) numericData.get(nameOfField));
if (value == null) {
return defaultValue;
}
return value.doubleValue();
}
/**
* Save the given value at the given location will overwrite any previous value at this location
* <p/>
*
* @param nameOfField The name to store the value against
* @param value The value to store
*/
public void setNumber(String nameOfField, double value) {
numericData.put(nameOfField, new Double(value));
}
/**
* Get the String at the given location
* <p/>
*
* @param nameOfField location of string
* <p/>
*
* @return String stored at the location given
*/
public String getString(String nameOfField) {
return getString(nameOfField, null);
}
/**
* Get the String at the given location
* <p/>
*
* @param nameOfField location of string
* @param defaultValue The value to return if the specified value hasn't been set
* <p/>
*
* @return String stored at the location given
*/
public String getString(String nameOfField, String defaultValue) {
String value = (String) stringData.get(nameOfField);
if (value == null) {
return defaultValue;
}
return value;
}
/**
* Save the given value at the given location will overwrite any previous value at this location
* <p/>
*
* @param nameOfField location to store int
* @param value The value to store
*/
public void setString(String nameOfField, String value) {
stringData.put(nameOfField, value);
}
/**
* Save the stored data to file/muffin
* <p/>
*
* @throws IOException Indicates it wasn't possible to store the state
*/
public void save() throws IOException {
muffin.saveFile(numericData, fileName + "_Number");
muffin.saveFile(stringData, fileName + "_String");
}
/**
* Load the data from file/muffin
* <p/>
*
* @throws IOException Indicates it wasn't possible to load the state
*/
public void load() throws IOException {
numericData = muffin.loadFile(fileName + "_Number");
stringData = muffin.loadFile(fileName + "_String");
}
/**
* Will delete all current data held in Score
*/
public void clear() {
numericData.clear();
stringData.clear();
}
/**
* Quick test to see if running through Java webstart
* <p/>
*
* @return True if jws running
*/
private boolean isWebstartAvailable() {
try {
Class.forName("javax.jnlp.ServiceManager");
// this causes to go and see if the service is available
ServiceManager.lookup("javax.jnlp.PersistenceService");
Log.info("Webstart detected using Muffins");
} catch (Exception e) {
Log.info("Using Local File System");
return false;
}
return true;
}
}
| emabrey/SleekSlick2D | slick/src/main/java/org/newdawn/slick/SavedState.java | Java | bsd-3-clause | 5,505 |
//= require ../store
(function () {
"use strict";
var AppJobs = FlynnDashboard.Stores.AppJobs = FlynnDashboard.Store.createClass({
displayName: "Stores.AppJobs",
getState: function () {
return this.state;
},
willInitialize: function () {
this.props = {
appId: this.id.appId
};
},
didInitialize: function () {},
didBecomeActive: function () {
this.__fetchJobs();
},
getInitialState: function () {
return {
processes: []
};
},
handleEvent: function () {
},
__fetchJobs: function () {
FlynnDashboard.client.getAppJobs(this.props.appId).then(function (args) {
var res = args[0];
this.setState({
processes: res.map(function (item) {
if (item.hasOwnProperty("State")) {
item.state = item.State;
}
return item;
})
});
}.bind(this));
}
}, Marbles.State);
AppJobs.registerWithDispatcher(FlynnDashboard.Dispatcher);
})();
| flynn-archive/flynn-dashboard-web | lib/javascripts/stores/app-jobs.js | JavaScript | bsd-3-clause | 899 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/declarative_content/chrome_content_rules_registry.h"
#include "base/bind.h"
#include "chrome/browser/chrome_notification_types.h"
#include "chrome/browser/extensions/api/declarative_content/content_constants.h"
#include "chrome/browser/extensions/extension_util.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_iterator.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "content/public/browser/navigation_details.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_source.h"
#include "content/public/browser/web_contents.h"
#include "extensions/browser/api/declarative/rules_registry_service.h"
#include "extensions/browser/extension_registry.h"
#include "extensions/browser/extension_system.h"
namespace extensions {
//
// EvaluationScope
//
// Used to coalesce multiple requests for evaluation into a zero or one actual
// evaluations (depending on the EvaluationDisposition). This is required for
// correctness when multiple trackers respond to the same event. Otherwise,
// executing the request from the first tracker will be done before the tracked
// state has been updated for the other trackers.
class ChromeContentRulesRegistry::EvaluationScope {
public:
// Default disposition is PERFORM_EVALUATION.
explicit EvaluationScope(ChromeContentRulesRegistry* registry);
EvaluationScope(ChromeContentRulesRegistry* registry,
EvaluationDisposition disposition);
~EvaluationScope();
private:
ChromeContentRulesRegistry* const registry_;
const EvaluationDisposition previous_disposition_;
DISALLOW_COPY_AND_ASSIGN(EvaluationScope);
};
ChromeContentRulesRegistry::EvaluationScope::EvaluationScope(
ChromeContentRulesRegistry* registry)
: EvaluationScope(registry, DEFER_REQUESTS) {}
ChromeContentRulesRegistry::EvaluationScope::EvaluationScope(
ChromeContentRulesRegistry* registry,
EvaluationDisposition disposition)
: registry_(registry),
previous_disposition_(registry_->evaluation_disposition_) {
DCHECK_NE(EVALUATE_REQUESTS, disposition);
registry_->evaluation_disposition_ = disposition;
}
ChromeContentRulesRegistry::EvaluationScope::~EvaluationScope() {
registry_->evaluation_disposition_ = previous_disposition_;
if (registry_->evaluation_disposition_ == EVALUATE_REQUESTS) {
for (content::WebContents* tab : registry_->evaluation_pending_)
registry_->EvaluateConditionsForTab(tab);
registry_->evaluation_pending_.clear();
}
}
//
// ChromeContentRulesRegistry
//
ChromeContentRulesRegistry::ChromeContentRulesRegistry(
content::BrowserContext* browser_context,
RulesCacheDelegate* cache_delegate,
const PredicateEvaluatorsFactory& evaluators_factory)
: ContentRulesRegistry(browser_context,
declarative_content_constants::kOnPageChanged,
content::BrowserThread::UI,
cache_delegate,
RulesRegistryService::kDefaultRulesRegistryID),
evaluators_(evaluators_factory.Run(this)),
evaluation_disposition_(EVALUATE_REQUESTS) {
registrar_.Add(this,
content::NOTIFICATION_WEB_CONTENTS_DESTROYED,
content::NotificationService::AllBrowserContextsAndSources());
}
void ChromeContentRulesRegistry::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
switch (type) {
case content::NOTIFICATION_WEB_CONTENTS_DESTROYED: {
content::WebContents* tab =
content::Source<content::WebContents>(source).ptr();
// Note that neither non-tab WebContents nor tabs from other browser
// contexts will be in the map.
active_rules_.erase(tab);
break;
}
}
}
void ChromeContentRulesRegistry::RequestEvaluation(
content::WebContents* contents) {
switch (evaluation_disposition_) {
case EVALUATE_REQUESTS:
EvaluateConditionsForTab(contents);
break;
case DEFER_REQUESTS:
evaluation_pending_.insert(contents);
break;
case IGNORE_REQUESTS:
break;
}
}
bool ChromeContentRulesRegistry::ShouldManageConditionsForBrowserContext(
content::BrowserContext* context) {
return ManagingRulesForBrowserContext(context);
}
void ChromeContentRulesRegistry::MonitorWebContentsForRuleEvaluation(
content::WebContents* contents) {
// We rely on active_rules_ to have a key-value pair for |contents| to know
// which WebContents we are working with.
active_rules_[contents] = std::set<const ContentRule*>();
EvaluationScope evaluation_scope(this);
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->TrackForWebContents(contents);
}
void ChromeContentRulesRegistry::DidNavigateMainFrame(
content::WebContents* contents,
const content::LoadCommittedDetails& details,
const content::FrameNavigateParams& params) {
if (ContainsKey(active_rules_, contents)) {
EvaluationScope evaluation_scope(this);
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->OnWebContentsNavigation(contents, details, params);
}
}
ChromeContentRulesRegistry::ContentRule::ContentRule(
const Extension* extension,
std::vector<scoped_ptr<const ContentCondition>> conditions,
std::vector<scoped_ptr<const ContentAction>> actions,
int priority)
: extension(extension),
conditions(std::move(conditions)),
actions(std::move(actions)),
priority(priority) {}
ChromeContentRulesRegistry::ContentRule::~ContentRule() {}
scoped_ptr<const ChromeContentRulesRegistry::ContentRule>
ChromeContentRulesRegistry::CreateRule(
const Extension* extension,
const std::map<std::string, ContentPredicateFactory*>& predicate_factories,
const api::events::Rule& api_rule,
std::string* error) {
std::vector<scoped_ptr<const ContentCondition>> conditions;
for (const linked_ptr<base::Value>& value : api_rule.conditions) {
conditions.push_back(
CreateContentCondition(extension, predicate_factories, *value, error));
if (!error->empty())
return scoped_ptr<ContentRule>();
}
std::vector<scoped_ptr<const ContentAction>> actions;
for (const linked_ptr<base::Value>& value : api_rule.actions) {
actions.push_back(ContentAction::Create(browser_context(), extension,
*value, error));
if (!error->empty())
return scoped_ptr<ContentRule>();
}
// Note: |api_rule| may contain tags, but these are ignored.
return make_scoped_ptr(new ContentRule(extension, std::move(conditions),
std::move(actions),
*api_rule.priority));
}
bool ChromeContentRulesRegistry::ManagingRulesForBrowserContext(
content::BrowserContext* context) {
// Manage both the normal context and incognito contexts associated with it.
return Profile::FromBrowserContext(context)->GetOriginalProfile() ==
Profile::FromBrowserContext(browser_context());
}
// static
bool ChromeContentRulesRegistry::EvaluateConditionForTab(
const ContentCondition* condition,
content::WebContents* tab) {
for (const scoped_ptr<const ContentPredicate>& predicate :
condition->predicates) {
if (predicate && !predicate->IsIgnored() &&
!predicate->GetEvaluator()->EvaluatePredicate(predicate.get(), tab)) {
return false;
}
}
return true;
}
std::set<const ChromeContentRulesRegistry::ContentRule*>
ChromeContentRulesRegistry::GetMatchingRules(content::WebContents* tab) const {
const bool is_incognito_tab = tab->GetBrowserContext()->IsOffTheRecord();
std::set<const ContentRule*> matching_rules;
for (const RulesMap::value_type& rule_id_rule_pair : content_rules_) {
const ContentRule* rule = rule_id_rule_pair.second.get();
if (is_incognito_tab &&
!ShouldEvaluateExtensionRulesForIncognitoRenderer(rule->extension))
continue;
for (const scoped_ptr<const ContentCondition>& condition :
rule->conditions) {
if (EvaluateConditionForTab(condition.get(), tab))
matching_rules.insert(rule);
}
}
return matching_rules;
}
std::string ChromeContentRulesRegistry::AddRulesImpl(
const std::string& extension_id,
const std::vector<linked_ptr<api::events::Rule>>& api_rules) {
EvaluationScope evaluation_scope(this);
const Extension* extension = ExtensionRegistry::Get(browser_context())
->GetInstalledExtension(extension_id);
DCHECK(extension);
std::string error;
RulesMap new_rules;
std::map<ContentPredicateEvaluator*,
std::map<const void*, std::vector<const ContentPredicate*>>>
new_predicates;
std::map<std::string, ContentPredicateFactory*> predicate_factories;
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_) {
predicate_factories[evaluator->GetPredicateApiAttributeName()] =
evaluator.get();
}
for (const linked_ptr<api::events::Rule>& api_rule : api_rules) {
ExtensionIdRuleIdPair rule_id(extension_id, *api_rule->id);
DCHECK(content_rules_.find(rule_id) == content_rules_.end());
scoped_ptr<const ContentRule> rule(
CreateRule(extension, predicate_factories, *api_rule, &error));
if (!error.empty()) {
// Notify evaluators that none of the created predicates will be tracked
// after all.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator :
evaluators_) {
if (!new_predicates[evaluator.get()].empty()) {
evaluator->TrackPredicates(
std::map<const void*, std::vector<const ContentPredicate*>>());
}
}
return error;
}
DCHECK(rule);
// Group predicates by evaluator and rule, so we can later notify the
// evaluators that they have new predicates to manage.
for (const scoped_ptr<const ContentCondition>& condition :
rule->conditions) {
for (const scoped_ptr<const ContentPredicate>& predicate :
condition->predicates) {
if (predicate.get()) {
new_predicates[predicate->GetEvaluator()][rule.get()].push_back(
predicate.get());
}
}
}
new_rules[rule_id] = make_linked_ptr(rule.release());
}
// Notify the evaluators about their new predicates.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->TrackPredicates(new_predicates[evaluator.get()]);
// Wohoo, everything worked fine.
content_rules_.insert(new_rules.begin(), new_rules.end());
// Request evaluation for all WebContents, under the assumption that a
// non-empty condition has been added.
for (const auto& web_contents_rules_pair : active_rules_)
RequestEvaluation(web_contents_rules_pair.first);
return std::string();
}
std::string ChromeContentRulesRegistry::RemoveRulesImpl(
const std::string& extension_id,
const std::vector<std::string>& rule_identifiers) {
// Ignore evaluation requests in this function because it reverts actions on
// any active rules itself. Otherwise, we run the risk of reverting the same
// rule multiple times.
EvaluationScope evaluation_scope(this, IGNORE_REQUESTS);
std::vector<RulesMap::iterator> rules_to_erase;
std::vector<const void*> predicate_groups_to_stop_tracking;
for (const std::string& id : rule_identifiers) {
// Skip unknown rules.
RulesMap::iterator content_rules_entry =
content_rules_.find(std::make_pair(extension_id, id));
if (content_rules_entry == content_rules_.end())
continue;
const ContentRule* rule = content_rules_entry->second.get();
// Remove the ContentRule from active_rules_.
for (auto& tab_rules_pair : active_rules_) {
if (ContainsKey(tab_rules_pair.second, rule)) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab_rules_pair.first,
rule->priority};
for (const auto& action : rule->actions)
action->Revert(apply_info);
tab_rules_pair.second.erase(rule);
}
}
rules_to_erase.push_back(content_rules_entry);
predicate_groups_to_stop_tracking.push_back(rule);
}
// Notify the evaluators to stop tracking the predicates that will be removed.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->StopTrackingPredicates(predicate_groups_to_stop_tracking);
// Remove the rules.
for (RulesMap::iterator it : rules_to_erase)
content_rules_.erase(it);
return std::string();
}
std::string ChromeContentRulesRegistry::RemoveAllRulesImpl(
const std::string& extension_id) {
// Search all identifiers of rules that belong to extension |extension_id|.
std::vector<std::string> rule_identifiers;
for (const RulesMap::value_type& id_rule_pair : content_rules_) {
const ExtensionIdRuleIdPair& extension_id_rule_id_pair = id_rule_pair.first;
if (extension_id_rule_id_pair.first == extension_id)
rule_identifiers.push_back(extension_id_rule_id_pair.second);
}
return RemoveRulesImpl(extension_id, rule_identifiers);
}
void ChromeContentRulesRegistry::EvaluateConditionsForTab(
content::WebContents* tab) {
std::set<const ContentRule*> matching_rules = GetMatchingRules(tab);
if (matching_rules.empty() && !ContainsKey(active_rules_, tab))
return;
std::set<const ContentRule*>& prev_matching_rules = active_rules_[tab];
for (const ContentRule* rule : matching_rules) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab, rule->priority};
if (!ContainsKey(prev_matching_rules, rule)) {
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Apply(apply_info);
} else {
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Reapply(apply_info);
}
}
for (const ContentRule* rule : prev_matching_rules) {
if (!ContainsKey(matching_rules, rule)) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab, rule->priority};
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Revert(apply_info);
}
}
if (matching_rules.empty())
active_rules_[tab].clear();
else
swap(matching_rules, prev_matching_rules);
}
bool
ChromeContentRulesRegistry::ShouldEvaluateExtensionRulesForIncognitoRenderer(
const Extension* extension) const {
if (!util::IsIncognitoEnabled(extension->id(), browser_context()))
return false;
// Split-mode incognito extensions register their rules with separate
// RulesRegistries per Original/OffTheRecord browser contexts, whereas
// spanning-mode extensions share the Original browser context.
if (util::CanCrossIncognito(extension, browser_context())) {
// The extension uses spanning mode incognito. No rules should have been
// registered for the extension in the OffTheRecord registry so
// execution for that registry should never reach this point.
CHECK(!browser_context()->IsOffTheRecord());
} else {
// The extension uses split mode incognito. Both the Original and
// OffTheRecord registries may have (separate) rules for this extension.
// Since we're looking at an incognito renderer, so only the OffTheRecord
// registry should process its rules.
if (!browser_context()->IsOffTheRecord())
return false;
}
return true;
}
size_t ChromeContentRulesRegistry::GetActiveRulesCountForTesting() {
size_t count = 0;
for (const auto& web_contents_rules_pair : active_rules_)
count += web_contents_rules_pair.second.size();
return count;
}
ChromeContentRulesRegistry::~ChromeContentRulesRegistry() {
}
} // namespace extensions
| Workday/OpenFrame | chrome/browser/extensions/api/declarative_content/chrome_content_rules_registry.cc | C++ | bsd-3-clause | 16,123 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'mptt',
'cms',
'menus',
'djangocms_inherit',
'south',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
TEMPLATE_CONTEXT_PROCESSORS = [
'django.core.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'cms.context_processors.media',
'sekizai.context_processors.sekizai',
]
ROOT_URLCONF = 'cms.urls'
def schemamigration():
# turn ``schemamigration.py --initial`` into
# ``manage.py schemamigration cmsplugin_disqus --initial`` and setup the
# enviroment
from django.conf import settings
from django.core.management import ManagementUtility
settings.configure(
INSTALLED_APPS=INSTALLED_APPS,
ROOT_URLCONF=ROOT_URLCONF,
DATABASES=DATABASES,
TEMPLATE_CONTEXT_PROCESSORS=TEMPLATE_CONTEXT_PROCESSORS
)
argv = list(sys.argv)
argv.insert(1, 'schemamigration')
argv.insert(2, 'djangocms_inherit')
utility = ManagementUtility(argv)
utility.execute()
if __name__ == "__main__":
schemamigration()
| divio/djangocms-inherit | schemamigration.py | Python | bsd-3-clause | 1,455 |
/**
* Copyright (c) 2017, Gluon
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Gluon, any associated website, nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL GLUON BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gluonhq.spring.motd.client.service;
import com.gluonhq.cloudlink.client.data.DataClient;
import com.gluonhq.cloudlink.client.data.DataClientBuilder;
import com.gluonhq.cloudlink.client.data.OperationMode;
import com.gluonhq.cloudlink.client.data.SyncFlag;
import com.gluonhq.connect.GluonObservableObject;
import com.gluonhq.connect.provider.DataProvider;
import javax.annotation.PostConstruct;
public class Service {
private static final String MOTD = "spring-motd-v1";
private DataClient dataClient;
@PostConstruct
public void postConstruct() {
dataClient = DataClientBuilder.create()
.operationMode(OperationMode.CLOUD_FIRST)
.build();
}
public GluonObservableObject<String> retrieveMOTD() {
GluonObservableObject<String> motd = DataProvider
.retrieveObject(dataClient.createObjectDataReader(MOTD, String.class, SyncFlag.OBJECT_READ_THROUGH));
motd.initializedProperty().addListener((obs, ov, nv) -> {
if (nv && motd.get() == null) {
motd.set("This is the first Message of the Day!");
}
});
return motd;
}
}
| erwin1/gluon-samples | spring-motd/client/src/main/java/com/gluonhq/spring/motd/client/service/Service.java | Java | bsd-3-clause | 2,745 |
# Copyright (c) 2016, the GPyOpt Authors
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ...models import GPModel
import numpy as np
class CostModel(object):
"""
Class to handle the cost of evaluating the function.
param cost_withGradients: function that returns the cost of evaluating the function and its gradient. By default
no cost is used. Options are:
- cost_withGradients is some pre-defined cost function. Should return numpy array as outputs.
- cost_withGradients = 'evaluation_time'.
.. Note:: if cost_withGradients = 'evaluation time' the evaluation time of the function is used to model a GP whose
mean is used as cost.
"""
def __init__(self, cost_withGradients):
super(CostModel, self).__init__()
self.cost_type = cost_withGradients
# --- Set-up evaluation cost
if self.cost_type is None:
self.cost_withGradients = constant_cost_withGradients
self.cost_type = 'Constant cost'
elif self.cost_type == 'evaluation_time':
self.cost_model = GPModel()
self.cost_withGradients = self._cost_gp_withGradients
self.num_updates = 0
else:
self.cost_withGradients = cost_withGradients
self.cost_type = 'User defined cost'
def _cost_gp(self,x):
"""
Predicts the time cost of evaluating the function at x.
"""
m, _, _, _ = self.cost_model.predict_withGradients(x)
return np.exp(m)
def _cost_gp_withGradients(self,x):
"""
Predicts the time cost and its gradient of evaluating the function at x.
"""
m, _, dmdx, _= self.cost_model.predict_withGradients(x)
return np.exp(m), np.exp(m)*dmdx
def update_cost_model(self, x, cost_x):
"""
Updates the GP used to handle the cost.
param x: input of the GP for the cost model.
param x_cost: values of the time cost at the input locations.
"""
if self.cost_type == 'evaluation_time':
cost_evals = np.log(np.atleast_2d(np.asarray(cost_x)).T)
if self.num_updates == 0:
X_all = x
costs_all = cost_evals
else:
X_all = np.vstack((self.cost_model.model.X,x))
costs_all = np.vstack((self.cost_model.model.Y,cost_evals))
self.num_updates += 1
self.cost_model.updateModel(X_all, costs_all, None, None)
def constant_cost_withGradients(x):
"""
Constant cost function used by default: cost = 1, d_cost = 0.
"""
return np.ones(x.shape[0])[:,None], np.zeros(x.shape)
| SheffieldML/GPyOpt | GPyOpt/core/task/cost.py | Python | bsd-3-clause | 2,686 |
<?php
namespace EstimateOption\Controller\Factory;
use Zend\ServiceManager\ServiceLocatorInterface;
use EstimateOption\Controller\DeleteController;
class DeleteControllerFactory
{
/**
*
* @param ServiceLocatorInterface $serviceLocator
* @return \EstimateOption\Controller\DeleteController
*/
public function __invoke(ServiceLocatorInterface $serviceLocator)
{
$realServiceLocator = $serviceLocator->getServiceLocator();
$clientService = $realServiceLocator->get('Client\Service\ClientServiceInterface');
$estimateService = $realServiceLocator->get('Estimate\Service\EstimateServiceInterface');
$optionService = $realServiceLocator->get('EstimateOption\Service\OptionServiceInterface');
$itemService = $realServiceLocator->get('EstimateOptionItem\Service\ItemServiceInterface');
return new DeleteController($clientService, $estimateService, $optionService, $itemService);
}
}
| pacificnm/pnm | module/EstimateOption/src/EstimateOption/Controller/Factory/DeleteControllerFactory.php | PHP | bsd-3-clause | 1,019 |
<?php
namespace lukisongroup\purchasing\models\warehouse;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use lukisongroup\purchasing\models\warehouse\HeaderDetailRelease;
/**
* HeaderDetailReleaseSearch represents the model behind the search form about `lukisongroup\purchasing\models\warehouse\HeaderDetailRelease`.
*/
class HeaderDetailReleaseSearch extends HeaderDetailRelease
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['ID'], 'integer'],
[['TYPE','TGL', 'KD_SJ', 'KD_SO', 'KD_INVOICE', 'KD_FP', 'ETD', 'ETA', 'KD_BARANG', 'NM_BARANG', 'NOTE', 'CREATE_BY', 'CREATE_AT', 'UPDATE_BY', 'UPDATE_AT'], 'safe'],
[['QTY_UNIT', 'QTY_PCS', 'HARGA', 'DISCOUNT', 'PAJAK', 'DELIVERY_COST'], 'number'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = HeaderDetailRelease::find();
// add conditions that should always apply here
$dataProvider = new ActiveDataProvider([
'query' => $query,
]);
$this->load($params);
if (!$this->validate()) {
// uncomment the following line if you do not want to return any records when validation fails
// $query->where('0=1');
return $dataProvider;
}
// grid filtering conditions
$query->andFilterWhere([
'ID' => $this->ID,
'TGL' => $this->TGL,
'ETD' => $this->ETD,
'ETA' => $this->ETA,
'QTY_UNIT' => $this->QTY_UNIT,
'QTY_PCS' => $this->QTY_PCS,
'HARGA' => $this->HARGA,
'DISCOUNT' => $this->DISCOUNT,
'PAJAK' => $this->PAJAK,
'DELIVERY_COST' => $this->DELIVERY_COST,
'CREATE_AT' => $this->CREATE_AT,
'UPDATE_AT' => $this->UPDATE_AT,
]);
$query->andFilterWhere(['like', 'KD_SJ', $this->KD_SJ])
->andFilterWhere(['like', 'KD_SO', $this->KD_SO])
->andFilterWhere(['like', 'KD_INVOICE', $this->KD_INVOICE])
->andFilterWhere(['like', 'KD_FP', $this->KD_FP])
->andFilterWhere(['like', 'KD_BARANG', $this->KD_BARANG])
->andFilterWhere(['like', 'NM_BARANG', $this->NM_BARANG])
->andFilterWhere(['like', 'NOTE', $this->NOTE])
->andFilterWhere(['like', 'CREATE_BY', $this->CREATE_BY])
->andFilterWhere(['like', 'UPDATE_BY', $this->UPDATE_BY]);
return $dataProvider;
}
}
| adem-team/advanced | lukisongroup/purchasing/models/warehouse/HeaderDetailReleaseSearch.php | PHP | bsd-3-clause | 2,840 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license
<https://opensource.org/licenses/BSD-3-Clause>
"""
from selenium import webdriver
import os
import time
import logging
import re
import random
from cameo.utility import Utility
from cameo.localdb import LocalDbForTECHORANGE
"""
抓取 科技報橘 html 存放到 source_html
"""
class SpiderForTECHORANGE:
#建構子
def __init__(self):
self.SOURCE_HTML_BASE_FOLDER_PATH = u"cameo_res\\source_html"
self.PARSED_RESULT_BASE_FOLDER_PATH = u"cameo_res\\parsed_result"
self.strWebsiteDomain = u"http://buzzorange.com/techorange"
self.dicSubCommandHandler = {
"index":self.downloadIndexPage,
"tag":self.downloadTagPag,
"news":self.downloadNewsPage
}
self.utility = Utility()
self.db = LocalDbForTECHORANGE()
self.driver = None
#取得 spider 使用資訊
def getUseageMessage(self):
return ("- TECHORANGE -\n"
"useage:\n"
"index - download entry page of TECHORANGE \n"
"tag - download not obtained tag page \n"
"news [tag] - download not obtained news [of given tag] \n")
#取得 selenium driver 物件
def getDriver(self):
chromeDriverExeFilePath = "cameo_res\\chromedriver.exe"
driver = webdriver.Chrome(chromeDriverExeFilePath)
return driver
#初始化 selenium driver 物件
def initDriver(self):
if self.driver is None:
self.driver = self.getDriver()
#終止 selenium driver 物件
def quitDriver(self):
self.driver.quit()
self.driver = None
#執行 spider
def runSpider(self, lstSubcommand=None):
strSubcommand = lstSubcommand[0]
strArg1 = None
if len(lstSubcommand) == 2:
strArg1 = lstSubcommand[1]
self.initDriver() #init selenium driver
self.dicSubCommandHandler[strSubcommand](strArg1)
self.quitDriver() #quit selenium driver
#下載 index 頁面
def downloadIndexPage(self, uselessArg1=None):
logging.info("download index page")
strIndexHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE"
if not os.path.exists(strIndexHtmlFolderPath):
os.mkdir(strIndexHtmlFolderPath) #mkdir source_html/TECHORANGE/
#科技報橘首頁
self.driver.get("https://buzzorange.com/techorange/")
#儲存 html
strIndexHtmlFilePath = strIndexHtmlFolderPath + u"\\index.html"
self.utility.overwriteSaveAs(strFilePath=strIndexHtmlFilePath, unicodeData=self.driver.page_source)
#下載 tag 頁面
def downloadTagPag(self, uselessArg1=None):
logging.info("download tag page")
strTagHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\tag"
if not os.path.exists(strTagHtmlFolderPath):
os.mkdir(strTagHtmlFolderPath) #mkdir source_html/TECHORANGE/tag/
strTagWebsiteDomain = self.strWebsiteDomain + u"/tag"
#取得 Db 中尚未下載的 Tag 名稱
lstStrNotObtainedTagName = self.db.fetchallNotObtainedTagName()
for strNotObtainedTagName in lstStrNotObtainedTagName:
#略過名稱太長的 tag
if len(strNotObtainedTagName) > 60:
continue
strTagUrl = strTagWebsiteDomain + u"/" + strNotObtainedTagName
#tag 第0頁
intPageNum = 0
time.sleep(random.randint(2,5)) #sleep random time
self.driver.get(strTagUrl)
#儲存 html
strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName)
self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source)
#tag 下一頁
elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers")
while len(elesNextPageA) != 0:
time.sleep(random.randint(2,5)) #sleep random time
intPageNum = intPageNum+1
strTagUrl = elesNextPageA[0].get_attribute("href")
self.driver.get(strTagUrl)
#儲存 html
strTagHtmlFilePath = strTagHtmlFolderPath + u"\\%d_%s_tag.html"%(intPageNum, strNotObtainedTagName)
self.utility.overwriteSaveAs(strFilePath=strTagHtmlFilePath, unicodeData=self.driver.page_source)
#tag 再下一頁
elesNextPageA = self.driver.find_elements_by_css_selector("div.nav-links a.next.page-numbers")
#更新tag DB 為已抓取 (isGot = 1)
self.db.updateTagStatusIsGot(strTagName=strNotObtainedTagName)
logging.info("got tag %s"%strNotObtainedTagName)
#限縮 字串長度低於 128 字元
def limitStrLessThen128Char(self, strStr=None):
if len(strStr) > 128:
logging.info("limit str less then 128 char")
return strStr[:127] + u"_"
else:
return strStr
#下載 news 頁面 (strTagName == None 會自動找尋已下載完成之 tag,但若未先執行 parser tag 即使 tag 已下載完成亦無法下載 news)
def downloadNewsPage(self, strTagName=None):
if strTagName is None:
#未指定 tag
lstStrObtainedTagName = self.db.fetchallCompletedObtainedTagName()
for strObtainedTagName in lstStrObtainedTagName:
self.downloadNewsPageWithGivenTagName(strTagName=strObtainedTagName)
else:
#有指定 tag 名稱
self.downloadNewsPageWithGivenTagName(strTagName=strTagName)
#下載 news 頁面 (指定 tag 名稱)
def downloadNewsPageWithGivenTagName(self, strTagName=None):
logging.info("download news page with tag %s"%strTagName)
strNewsHtmlFolderPath = self.SOURCE_HTML_BASE_FOLDER_PATH + u"\\TECHORANGE\\news"
if not os.path.exists(strNewsHtmlFolderPath):
os.mkdir(strNewsHtmlFolderPath) #mkdir source_html/TECHORANGE/news/
#取得 DB 紀錄中,指定 strTagName tag 的 news url
lstStrNewsUrl = self.db.fetchallNewsUrlByTagName(strTagName=strTagName)
intDownloadedNewsCount = 0#紀錄下載 news 頁面數量
timeStart = time.time() #計時開始時間點
timeEnd = None #計時結束時間點
for strNewsUrl in lstStrNewsUrl:
#檢查是否已下載
if not self.db.checkNewsIsGot(strNewsUrl=strNewsUrl):
if intDownloadedNewsCount%10 == 0: #計算下載10筆news所需時間
timeEnd = time.time()
timeCost = timeEnd - timeStart
logging.info("download 10 news cost %f sec"%timeCost)
timeStart = timeEnd
intDownloadedNewsCount = intDownloadedNewsCount+1
time.sleep(random.randint(2,5)) #sleep random time
self.driver.get(strNewsUrl)
#儲存 html
strNewsName = re.match("^https://buzzorange.com/techorange/[\d]{4}/[\d]{2}/[\d]{2}/(.*)/$", strNewsUrl).group(1)
strNewsName = self.limitStrLessThen128Char(strStr=strNewsName) #將名稱縮短小於128字完
strNewsHtmlFilePath = strNewsHtmlFolderPath + u"\\%s_news.html"%strNewsName
self.utility.overwriteSaveAs(strFilePath=strNewsHtmlFilePath, unicodeData=self.driver.page_source)
#更新news DB 為已抓取 (isGot = 1)
self.db.updateNewsStatusIsGot(strNewsUrl=strNewsUrl)
| muchu1983/104_cameo | cameo/spiderForTECHORANGE.py | Python | bsd-3-clause | 7,806 |
<?php
$data = range(0, 1000000);
echo sprintf('%02.2f', (memory_get_usage() / 1048576))." MB of memory used\n";
// output: 137.92 MB of memory used
foreach ($data as $key => $val) {
//echo "key: ".$key." value: ".$val."\n";
}
| nbari/my-sandbox | php/generators/practical_error.php | PHP | bsd-3-clause | 234 |
<?php
/**
* Copyright (c) 2016-present Ganbaro Digital Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the names of the copyright holders nor the names of his
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @category Libraries
* @package HttpStatus/StatusProviders
* @author Stuart Herbert <stuherbert@ganbarodigital.com>
* @copyright 2016-present Ganbaro Digital Ltd www.ganbarodigital.com
* @license http://www.opensource.org/licenses/bsd-license.php BSD License
* @link http://ganbarodigital.github.io/php-http-status
*/
namespace GanbaroDigitalTest\HttpStatus\StatusProviders\RequestError;
use GanbaroDigital\HttpStatus\Interfaces\HttpStatusProvider;
use GanbaroDigital\HttpStatus\StatusProviders\RequestError\UnsupportedMediaTypeStatusProvider;
use GanbaroDigital\HttpStatus\StatusValues\RequestError\UnsupportedMediaTypeStatus;
use PHPUnit_Framework_TestCase;
/**
* @coversDefaultClass GanbaroDigital\HttpStatus\StatusProviders\RequestError\UnsupportedMediaTypeStatusProvider
*/
class UnsupportedMediaTypeStatusProviderTest extends PHPUnit_Framework_TestCase
{
/**
* @coversNothing
*/
public function testCanInstantiateClassThatUsesThisTrait()
{
// ----------------------------------------------------------------
// setup your test
// ----------------------------------------------------------------
// perform the change
$unit = new UnsupportedMediaTypeStatusProviderTestHelper;
// ----------------------------------------------------------------
// test the results
// make sure the class could instantiate
$this->assertInstanceOf(UnsupportedMediaTypeStatusProviderTestHelper::class, $unit);
// make sure our test helper does use the trait we're trying to test
$traits = class_uses($unit);
$this->assertArrayHasKey(UnsupportedMediaTypeStatusProvider::class, $traits);
}
/**
* @covers ::getHttpStatus
*/
public function testReturnsUnsupportedMediaTypeStatus()
{
// ----------------------------------------------------------------
// setup your test
$expectedType = UnsupportedMediaTypeStatus::class;
$unit = new UnsupportedMediaTypeStatusProviderTestHelper;
// ----------------------------------------------------------------
// perform the change
$actualType = $unit->getHttpStatus();
// ----------------------------------------------------------------
// test the results
$this->assertInstanceOf($expectedType, $actualType);
}
}
class UnsupportedMediaTypeStatusProviderTestHelper
{
use UnsupportedMediaTypeStatusProvider;
}
| ganbarodigital/php-http-status | tests/StatusProviders/RequestError/UnsupportedMediaTypeStatusProviderTest.php | PHP | bsd-3-clause | 4,126 |
package de.intarsys.tools.locator;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import de.intarsys.tools.randomaccess.IRandomAccess;
import de.intarsys.tools.stream.StreamTools;
/**
* ! not yet functional !
*
* An {@link ILocator} into a zip file.
*
*/
public class ZipFileLocator extends CommonLocator {
static class ZipFile {
final private ILocator zipLocator;
private List<ZipEntry> entries;
public ZipFile(ILocator zipLocator) {
super();
this.zipLocator = zipLocator;
}
protected List<ZipEntry> createEntries() throws IOException {
List<ZipEntry> tempEntries = new ArrayList<ZipEntry>();
InputStream is = zipLocator.getInputStream();
try {
ZipInputStream zis = new ZipInputStream(is);
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
tempEntries.add(entry);
}
} finally {
StreamTools.close(is);
}
return tempEntries;
}
synchronized protected List<ZipEntry> getEntries() throws IOException {
if (entries == null) {
entries = createEntries();
}
return entries;
}
}
final private ZipFile zipFile;
final private String path;
public ZipFileLocator(ILocator zipLocator, String path) {
super();
this.zipFile = new ZipFile(zipLocator);
this.path = path;
}
protected ZipFileLocator(ZipFile zipFile, String path) {
super();
this.zipFile = zipFile;
this.path = path;
}
public boolean exists() {
return false;
}
protected ZipEntry findEntry(String tempPath) throws IOException {
for (ZipEntry entry : zipFile.getEntries()) {
if (entry.getName().equals(path)) {
return entry;
}
}
return null;
}
public ILocator getChild(String name) {
String tempPath = path + "/" + name;
return new ZipFileLocator(this, tempPath);
}
public String getFullName() {
return null;
}
public InputStream getInputStream() throws IOException {
return null;
}
public String getLocalName() {
return null;
}
public OutputStream getOutputStream() throws IOException {
return null;
}
public ILocator getParent() {
return null;
}
public IRandomAccess getRandomAccess() throws IOException {
return null;
}
public Reader getReader() throws IOException {
return null;
}
public Reader getReader(String encoding) throws IOException {
return null;
}
public String getType() {
return null;
}
public String getTypedName() {
return null;
}
public Writer getWriter() throws IOException {
return null;
}
public Writer getWriter(String encoding) throws IOException {
return null;
}
public boolean isDirectory() {
return false;
}
public boolean isOutOfSynch() {
return false;
}
public ILocator[] listLocators(ILocatorNameFilter filter)
throws IOException {
return null;
}
public void synch() {
}
public URL toURL() {
return null;
}
}
| intarsys/runtime | src/de/intarsys/tools/locator/ZipFileLocator.java | Java | bsd-3-clause | 3,058 |
import itertools
import os
import re
from abc import ABC, abstractmethod
from glob import glob
from pathlib import Path
import numpy as np
import torch
from PIL import Image
from ..io.image import _read_png_16
from .utils import verify_str_arg
from .vision import VisionDataset
__all__ = (
"KittiFlow",
"Sintel",
"FlyingThings3D",
"FlyingChairs",
"HD1K",
)
class FlowDataset(ABC, VisionDataset):
# Some datasets like Kitti have a built-in valid_flow_mask, indicating which flow values are valid
# For those we return (img1, img2, flow, valid_flow_mask), and for the rest we return (img1, img2, flow),
# and it's up to whatever consumes the dataset to decide what valid_flow_mask should be.
_has_builtin_flow_mask = False
def __init__(self, root, transforms=None):
super().__init__(root=root)
self.transforms = transforms
self._flow_list = []
self._image_list = []
def _read_img(self, file_name):
img = Image.open(file_name)
if img.mode != "RGB":
img = img.convert("RGB")
return img
@abstractmethod
def _read_flow(self, file_name):
# Return the flow or a tuple with the flow and the valid_flow_mask if _has_builtin_flow_mask is True
pass
def __getitem__(self, index):
img1 = self._read_img(self._image_list[index][0])
img2 = self._read_img(self._image_list[index][1])
if self._flow_list: # it will be empty for some dataset when split="test"
flow = self._read_flow(self._flow_list[index])
if self._has_builtin_flow_mask:
flow, valid_flow_mask = flow
else:
valid_flow_mask = None
else:
flow = valid_flow_mask = None
if self.transforms is not None:
img1, img2, flow, valid_flow_mask = self.transforms(img1, img2, flow, valid_flow_mask)
if self._has_builtin_flow_mask or valid_flow_mask is not None:
# The `or valid_flow_mask is not None` part is here because the mask can be generated within a transform
return img1, img2, flow, valid_flow_mask
else:
return img1, img2, flow
def __len__(self):
return len(self._image_list)
def __rmul__(self, v):
return torch.utils.data.ConcatDataset([self] * v)
class Sintel(FlowDataset):
"""`Sintel <http://sintel.is.tue.mpg.de/>`_ Dataset for optical flow.
The dataset is expected to have the following structure: ::
root
Sintel
testing
clean
scene_1
scene_2
...
final
scene_1
scene_2
...
training
clean
scene_1
scene_2
...
final
scene_1
scene_2
...
flow
scene_1
scene_2
...
Args:
root (string): Root directory of the Sintel Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
pass_name (string, optional): The pass to use, either "clean" (default), "final", or "both". See link above for
details on the different passes.
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", pass_name="clean", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both"))
passes = ["clean", "final"] if pass_name == "both" else [pass_name]
root = Path(root) / "Sintel"
flow_root = root / "training" / "flow"
for pass_name in passes:
split_dir = "training" if split == "train" else split
image_root = root / split_dir / pass_name
for scene in os.listdir(image_root):
image_list = sorted(glob(str(image_root / scene / "*.png")))
for i in range(len(image_list) - 1):
self._image_list += [[image_list[i], image_list[i + 1]]]
if split == "train":
self._flow_list += sorted(glob(str(flow_root / scene / "*.flo")))
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.
``flow`` is None if ``split="test"``.
If a valid flow mask is generated within the ``transforms`` parameter,
a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_flo(file_name)
class KittiFlow(FlowDataset):
"""`KITTI <http://www.cvlibs.net/datasets/kitti/eval_scene_flow.php?benchmark=flow>`__ dataset for optical flow (2015).
The dataset is expected to have the following structure: ::
root
KittiFlow
testing
image_2
training
image_2
flow_occ
Args:
root (string): Root directory of the KittiFlow Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
"""
_has_builtin_flow_mask = True
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
root = Path(root) / "KittiFlow" / (split + "ing")
images1 = sorted(glob(str(root / "image_2" / "*_10.png")))
images2 = sorted(glob(str(root / "image_2" / "*_11.png")))
if not images1 or not images2:
raise FileNotFoundError(
"Could not find the Kitti flow images. Please make sure the directory structure is correct."
)
for img1, img2 in zip(images1, images2):
self._image_list += [[img1, img2]]
if split == "train":
self._flow_list = sorted(glob(str(root / "flow_occ" / "*_10.png")))
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 4-tuple with ``(img1, img2, flow, valid_flow_mask)``
where ``valid_flow_mask`` is a numpy boolean mask of shape (H, W)
indicating which flow values are valid. The flow is a numpy array of
shape (2, H, W) and the images are PIL images. ``flow`` and ``valid_flow_mask`` are None if
``split="test"``.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_16bits_png_with_flow_and_valid_mask(file_name)
class FlyingChairs(FlowDataset):
"""`FlyingChairs <https://lmb.informatik.uni-freiburg.de/resources/datasets/FlyingChairs.en.html#flyingchairs>`_ Dataset for optical flow.
You will also need to download the FlyingChairs_train_val.txt file from the dataset page.
The dataset is expected to have the following structure: ::
root
FlyingChairs
data
00001_flow.flo
00001_img1.ppm
00001_img2.ppm
...
FlyingChairs_train_val.txt
Args:
root (string): Root directory of the FlyingChairs Dataset.
split (string, optional): The dataset split, either "train" (default) or "val"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "val"))
root = Path(root) / "FlyingChairs"
images = sorted(glob(str(root / "data" / "*.ppm")))
flows = sorted(glob(str(root / "data" / "*.flo")))
split_file_name = "FlyingChairs_train_val.txt"
if not os.path.exists(root / split_file_name):
raise FileNotFoundError(
"The FlyingChairs_train_val.txt file was not found - please download it from the dataset page (see docstring)."
)
split_list = np.loadtxt(str(root / split_file_name), dtype=np.int32)
for i in range(len(flows)):
split_id = split_list[i]
if (split == "train" and split_id == 1) or (split == "val" and split_id == 2):
self._flow_list += [flows[i]]
self._image_list += [[images[2 * i], images[2 * i + 1]]]
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.
``flow`` is None if ``split="val"``.
If a valid flow mask is generated within the ``transforms`` parameter,
a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_flo(file_name)
class FlyingThings3D(FlowDataset):
"""`FlyingThings3D <https://lmb.informatik.uni-freiburg.de/resources/datasets/SceneFlowDatasets.en.html>`_ dataset for optical flow.
The dataset is expected to have the following structure: ::
root
FlyingThings3D
frames_cleanpass
TEST
TRAIN
frames_finalpass
TEST
TRAIN
optical_flow
TEST
TRAIN
Args:
root (string): Root directory of the intel FlyingThings3D Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
pass_name (string, optional): The pass to use, either "clean" (default) or "final" or "both". See link above for
details on the different passes.
camera (string, optional): Which camera to return images from. Can be either "left" (default) or "right" or "both".
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
``valid_flow_mask`` is expected for consistency with other datasets which
return a built-in valid mask, such as :class:`~torchvision.datasets.KittiFlow`.
"""
def __init__(self, root, split="train", pass_name="clean", camera="left", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
split = split.upper()
verify_str_arg(pass_name, "pass_name", valid_values=("clean", "final", "both"))
passes = {
"clean": ["frames_cleanpass"],
"final": ["frames_finalpass"],
"both": ["frames_cleanpass", "frames_finalpass"],
}[pass_name]
verify_str_arg(camera, "camera", valid_values=("left", "right", "both"))
cameras = ["left", "right"] if camera == "both" else [camera]
root = Path(root) / "FlyingThings3D"
directions = ("into_future", "into_past")
for pass_name, camera, direction in itertools.product(passes, cameras, directions):
image_dirs = sorted(glob(str(root / pass_name / split / "*/*")))
image_dirs = sorted(Path(image_dir) / camera for image_dir in image_dirs)
flow_dirs = sorted(glob(str(root / "optical_flow" / split / "*/*")))
flow_dirs = sorted(Path(flow_dir) / direction / camera for flow_dir in flow_dirs)
if not image_dirs or not flow_dirs:
raise FileNotFoundError(
"Could not find the FlyingThings3D flow images. "
"Please make sure the directory structure is correct."
)
for image_dir, flow_dir in zip(image_dirs, flow_dirs):
images = sorted(glob(str(image_dir / "*.png")))
flows = sorted(glob(str(flow_dir / "*.pfm")))
for i in range(len(flows) - 1):
if direction == "into_future":
self._image_list += [[images[i], images[i + 1]]]
self._flow_list += [flows[i]]
elif direction == "into_past":
self._image_list += [[images[i + 1], images[i]]]
self._flow_list += [flows[i + 1]]
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 3-tuple with ``(img1, img2, flow)``.
The flow is a numpy array of shape (2, H, W) and the images are PIL images.
``flow`` is None if ``split="test"``.
If a valid flow mask is generated within the ``transforms`` parameter,
a 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` is returned.
"""
return super().__getitem__(index)
def _read_flow(self, file_name):
return _read_pfm(file_name)
class HD1K(FlowDataset):
"""`HD1K <http://hci-benchmark.iwr.uni-heidelberg.de/>`__ dataset for optical flow.
The dataset is expected to have the following structure: ::
root
hd1k
hd1k_challenge
image_2
hd1k_flow_gt
flow_occ
hd1k_input
image_2
Args:
root (string): Root directory of the HD1K Dataset.
split (string, optional): The dataset split, either "train" (default) or "test"
transforms (callable, optional): A function/transform that takes in
``img1, img2, flow, valid_flow_mask`` and returns a transformed version.
"""
_has_builtin_flow_mask = True
def __init__(self, root, split="train", transforms=None):
super().__init__(root=root, transforms=transforms)
verify_str_arg(split, "split", valid_values=("train", "test"))
root = Path(root) / "hd1k"
if split == "train":
# There are 36 "sequences" and we don't want seq i to overlap with seq i + 1, so we need this for loop
for seq_idx in range(36):
flows = sorted(glob(str(root / "hd1k_flow_gt" / "flow_occ" / f"{seq_idx:06d}_*.png")))
images = sorted(glob(str(root / "hd1k_input" / "image_2" / f"{seq_idx:06d}_*.png")))
for i in range(len(flows) - 1):
self._flow_list += [flows[i]]
self._image_list += [[images[i], images[i + 1]]]
else:
images1 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*10.png")))
images2 = sorted(glob(str(root / "hd1k_challenge" / "image_2" / "*11.png")))
for image1, image2 in zip(images1, images2):
self._image_list += [[image1, image2]]
if not self._image_list:
raise FileNotFoundError(
"Could not find the HD1K images. Please make sure the directory structure is correct."
)
def _read_flow(self, file_name):
return _read_16bits_png_with_flow_and_valid_mask(file_name)
def __getitem__(self, index):
"""Return example at given index.
Args:
index(int): The index of the example to retrieve
Returns:
tuple: A 4-tuple with ``(img1, img2, flow, valid_flow_mask)`` where ``valid_flow_mask``
is a numpy boolean mask of shape (H, W)
indicating which flow values are valid. The flow is a numpy array of
shape (2, H, W) and the images are PIL images. ``flow`` and ``valid_flow_mask`` are None if
``split="test"``.
"""
return super().__getitem__(index)
def _read_flo(file_name):
"""Read .flo file in Middlebury format"""
# Code adapted from:
# http://stackoverflow.com/questions/28013200/reading-middlebury-flow-files-with-python-bytes-array-numpy
# Everything needs to be in little Endian according to
# https://vision.middlebury.edu/flow/code/flow-code/README.txt
with open(file_name, "rb") as f:
magic = np.fromfile(f, "c", count=4).tobytes()
if magic != b"PIEH":
raise ValueError("Magic number incorrect. Invalid .flo file")
w = int(np.fromfile(f, "<i4", count=1))
h = int(np.fromfile(f, "<i4", count=1))
data = np.fromfile(f, "<f4", count=2 * w * h)
return data.reshape(h, w, 2).transpose(2, 0, 1)
def _read_16bits_png_with_flow_and_valid_mask(file_name):
flow_and_valid = _read_png_16(file_name).to(torch.float32)
flow, valid_flow_mask = flow_and_valid[:2, :, :], flow_and_valid[2, :, :]
flow = (flow - 2 ** 15) / 64 # This conversion is explained somewhere on the kitti archive
valid_flow_mask = valid_flow_mask.bool()
# For consistency with other datasets, we convert to numpy
return flow.numpy(), valid_flow_mask.numpy()
def _read_pfm(file_name):
"""Read flow in .pfm format"""
with open(file_name, "rb") as f:
header = f.readline().rstrip()
if header != b"PF":
raise ValueError("Invalid PFM file")
dim_match = re.match(rb"^(\d+)\s(\d+)\s$", f.readline())
if not dim_match:
raise Exception("Malformed PFM header.")
w, h = (int(dim) for dim in dim_match.groups())
scale = float(f.readline().rstrip())
if scale < 0: # little-endian
endian = "<"
scale = -scale
else:
endian = ">" # big-endian
data = np.fromfile(f, dtype=endian + "f")
data = data.reshape(h, w, 3).transpose(2, 0, 1)
data = np.flip(data, axis=1) # flip on h dimension
data = data[:2, :, :]
return data.astype(np.float32)
| pytorch/vision | torchvision/datasets/_optical_flow.py | Python | bsd-3-clause | 19,330 |
<?php
/**
* Created by PhpStorm.
* User: Administrator
* Date: 2015-9-2
* Time: 13:47
*/
namespace app\controllers;
use yii;
use yii\web\Controller;
use yii\base\Exception;
use app\models\Issue;
use conquer\services\WsdlGenerator;
use app\models\HelpWebservice;
use yii\validators\DateValidator;
class TestController extends Controller{
public function actionCopy(){
$output =shell_exec('cp -n /udiska/upload/test.7z /udiska/upload');
print_r($output.'<br/>');
}
function actionIndex(){
//$webservice = new WsdlGenerator();
//$webservice->bindingStyle = WsdlGenerator::STYLE_RPC;
//$aa = $webservice->generateWsdl('app\controllers\AuditServiceController','http://admin.audit.com/audit-service/soap');
//$fb = fopen('audit.wsdl','w');
//fwrite($fb,$aa);
//fclose($fb);
//var_dump($aa);die;
$soap_url = 'http://admin.audit.com/audit.wsdl';
try {
try{
$arr = [
'registrarId' => '12345',
'domain' => '格润丝.商标',
'contactId' => '1481s6qf5e8y',
'registorType'=>2,
'contactCode'=>'1',
'contactValue'=>'412725198811193425',
'org'=>1234561212,
'orgCode'=>1234561212,
'businessLicense'=>1234561212122,
'registeredYears'=>10,
'parseUrl'=>'http://www.huyi.com',
'trademarkRegNo'=>'156257383927',
'tmClassType'=>'第一类,第二类',
'tmIssuingCountry'=>'CN',
'tmProofExpiresDate'=>'2020-12-03',
'otherParam'=>'',
];
$fp12 = fopen(Yii::$app->basePath.'/20150210161931893.jpg', 'rb',0);
$file12 = fread($fp12, filesize(Yii::$app->basePath.'/20150210161931893.jpg')); //二进制数据
fclose($fp12);
$file1 = ['dataHandler'=>base64_encode($file12),'fileName'=>'id.jpg','type'=>1,'fileSuffix'=>'jpg'];
$file2 = ['dataHandler'=>base64_encode($file12),'fileName'=>'org.jpg','type'=>2,'fileSuffix'=>'jpg'];
$file3 = ['dataHandler'=>base64_encode($file12),'fileName'=>'business.jpg','type'=>3,'fileSuffix'=>'jpg'];
$files = [$file1,$file2,$file3];
$arr['files'] = $files;
$xml = '<wsse:Security xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd" ><wsse:UsernameToken xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"><wsse:Username>huyi1</wsse:Username><wsse:Password Type="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText">' . sha1('123456') . '</wsse:Password></wsse:UsernameToken></wsse:Security>';
$header = new \SoapHeader('http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd', 'CallbackHandler', new \SoapVar($xml, XSD_ANYXML), true);
$opt = array('trace'=>1,'encoding'=>'utf-8', 'exceptions' => 1,'uri'=> "http://service.nightwish.gtld.knet.cn/", 'soap_version' => SOAP_1_2);
$client = new \SoapClient($soap_url,$opt);
$client->__setSoapHeaders(array($header));
$res = $client->UploadAuditData($arr);
var_dump($res);die;
}catch (\SoapFault $e) {
var_dump($e->faultstring);die;
}
}catch (Exception $e){
var_dump($e->getMessage());die;
}
}
public function actionStatus(){
$soap_url = 'http://admin.audit.com/aa.wsdl';
try {
try{
$arr = [
'domainName' => 'audit1.商标',
//'domain' => 'duan030.商标',
//'contactId' => '1481s6qf5e8y',
];
$xml = '<wsse:Security xmlns:wsse="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd" ><wsse:UsernameToken xmlns:wsu="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"><wsse:Username>huyi1</wsse:Username><wsse:Password Type="http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText">' . sha1('123456') . '</wsse:Password></wsse:UsernameToken></wsse:Security>';
$header = new \SoapHeader('http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd', 'CallbackHandler', new \SoapVar($xml, XSD_ANYXML), true);
$opt = array('trace'=>1,'encoding'=>'utf-8', 'exceptions' => 1,'uri'=> "http://service.nightwish.gtld.knet.cn/", 'soap_version' => SOAP_1_2);
$client = new \SoapClient($soap_url,$opt);
$client->__setSoapHeaders(array($header));
$res = $client->GetNamingAuditStateByDomainName($arr);
var_dump($res);die;
}catch (\SoapFault $e) {
var_dump($e->faultstring);die;
}
}catch (Exception $e){
var_dump($e->getMessage());die;
}
}
} | duanduan2288/vr | controllers/TestController.php | PHP | bsd-3-clause | 5,202 |
<?php
namespace User\Mapper;
use Application\Mapper\AbstractMapper;
class UserRole extends AbstractMapper
{
} | Afinogen/zf2-docs | module/User/src/User/Mapper/UserRole.php | PHP | bsd-3-clause | 115 |
/**
Copyright (c) 2017, Philip Deegan.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Philip Deegan nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _KUL_DEFS_HPP_
#define _KUL_DEFS_HPP_
#define KSTRINGIFY(x) #x
#define KTOSTRING(x) KSTRINGIFY(x)
#ifdef KUL_SHARED
#if defined _WIN32 || defined __CYGWIN__
#ifdef KUL_EXPORT
#ifdef __GNUC__
#define KUL_PUBLISH __attribute__((dllexport))
#else
#define KUL_PUBLISH __declspec(dllexport)
#endif
#else
#ifdef __GNUC__
#define KUL_PUBLISH __attribute__((dllimport))
#else
#define KUL_PUBLISH __declspec(dllimport)
#endif
#endif
#else
#if __GNUC__ >= 4
#define KUL_PUBLISH __attribute__((visibility("default")))
#define KUL_PRIVATE __attribute__((visibility("hidden")))
#endif
#endif
#endif // KUL_SHARED
#ifndef KUL_PUBLISH
#define KUL_PUBLISH
#endif
#ifndef KUL_PRIVATE
#define KUL_PRIVATE
#endif
#if defined(__APPLE__) || defined(__NetBSD__) || defined(__FreeBSD__)
#define KUL_IS_BSD 1
#endif
#if defined(_WIN32)
#define KUL_IS_WIN 1
#endif
#ifndef KUL_IS_WIN
#define KUL_IS_WIN 0
#endif
#ifndef KUL_IS_BSD
#define KUL_IS_BSD 0
#endif
#if !KUL_IS_WIN && !KUL_IS_BSD
#define KUL_IS_NIX 1
#endif
#ifndef KUL_IS_NIX
#define KUL_IS_NIX 0
#endif
#if !defined(NDEBUG) || defined(KUL_FORCE_DEBUG_DO)
#define KUL_DEBUG_DO(...) __VA_ARGS__
#define KUL_DEBUG_DO_ELSE(...)
#else
#define KUL_DEBUG_DO(...)
#define KUL_DEBUG_DO_ELSE(...) __VA_ARGS__
#endif
#include "kul/os/def.hpp"
#endif /* _KUL_DEFS_HPP_ */
| mkn/mkn.kul | inc/kul/defs.hpp | C++ | bsd-3-clause | 2,810 |
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
__version__ = "1.2.0.11"
| STIXProject/python-stix | stix/version.py | Python | bsd-3-clause | 130 |
/*
* Account.cpp
*
* Created on: 20 March 2015
* Author: cyosp
*/
#include <com/cyosp/mpa/core/Account.hpp>
namespace mpa
{
bool Account::isAccountAlreadyExisting( string accountName )
{
bool ret = false;
// TODO : try to use BDD facilities
// Get accounts list
vector<mpapo::Account> accounts = all();
// Get accounts iterator
vector<mpapo::Account>::iterator it = accounts.begin();
while (it != accounts.end() && ! ret )
{
if ((*it).name == accountName)
ret = true;
else
it++;
}
return ret;
}
mpapo::Account & Account::add( string accountName )
{
mpapo::Account * ret = NULL;
ret = new mpapo::Account( MPA::getInstance()->getMPAPO() );
ret->setName( accountName );
//sleep(6);
ret->balance = 0;
ret->initializeVersion();
ret->store();
MPA_LOG_TRIVIAL(trace,"Account added, id=" + (* ret).id.value());
return * ret;
}
vector<mpapo::Account> Account::all()
{
return select<mpapo::Account>( MPA::getInstance()->getMPAPO() ).all(); //.orderBy(mpapo::Account::Name).all();
}
bool Account::del(int id , int version )
{
bool ret = false;
MPA_LOG_TRIVIAL(trace,"Account to delete:" + StrUtil::int2string( id )+" with version: " + StrUtil::int2string(version ));
try
{
mpapo::Account accountToDel = get( id );
if( accountToDel.isCorrectVersion( version ) )
{
MPA_LOG_TRIVIAL(trace,"Account found");
if( accountToDel.operations().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_OPERATIONS );
if( accountToDel.providers().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_PROVIDERS );
if( accountToDel.categories().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_CATEGORIES );
accountToDel.del();
}
else throw mpa_exception::MsgNotTranslated( OPERATION_IMPOSSIBLE_BECAUSE_DATA_HAVE_CHANGED );
}
catch (NotFound & e)
{
throw mpa_exception::MsgNotTranslated( ACCOUNT_DOESNT_EXIST );
}
return ret;
}
// Get acount by ID
mpapo::Account Account::get( int id )
{
// BOOST_LOG_TRIVIAL(trace) << "Account retrieved" << std::endl;
return select<mpapo::Account>( MPA::getInstance()->getMPAPO() , mpapo::Account::Id == id ).one();
}
// Rename account
mpapo::Account Account::renameAccount( int accountId , int accountVersionToRename , string newAccountName )
{
//MPA_LOG_TRIVIAL( trace , "Start" );
try
{
mpapo::Account account = get( accountId );
if( account.isCorrectVersion( accountVersionToRename ) )
{
account.setName( newAccountName );
account.store();
//MPA_LOG_TRIVIAL( trace , "End" );
// Return is here because there is no empty constructor for mpapo::Account::Account()
return account;
}
else throw mpa_exception::MsgNotTranslated( OPERATION_IMPOSSIBLE_BECAUSE_DATA_HAVE_CHANGED );
}
catch (NotFound & e)
{
throw mpa_exception::MsgNotTranslated( ACCOUNT_DOESNT_EXIST );
}
}
Account::~Account()
{
}
} /* namespace mpa */
| cyosp/MPA | src/com/cyosp/mpa/core/Account.cpp | C++ | bsd-3-clause | 3,000 |
// This is a generated file. Not intended for manual editing.
package io.v.vdl.psi.impl;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import static io.v.vdl.psi.VdlTypes.*;
import io.v.vdl.psi.VdlCompositeElementImpl;
import io.v.vdl.psi.*;
public class VdlTagImpl extends VdlCompositeElementImpl implements VdlTag {
public VdlTagImpl(ASTNode node) {
super(node);
}
public void accept(@NotNull VdlVisitor visitor) {
visitor.visitTag(this);
}
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof VdlVisitor) accept((VdlVisitor)visitor);
else super.accept(visitor);
}
@Override
@NotNull
public VdlStringLiteral getStringLiteral() {
return findNotNullChildByClass(VdlStringLiteral.class);
}
}
| vanadium/intellij-vdl-plugin | gen/io/v/vdl/psi/impl/VdlTagImpl.java | Java | bsd-3-clause | 933 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/frame_host/interstitial_page_impl.h"
#include <vector>
#include "base/bind.h"
#include "base/compiler_specific.h"
#include "base/message_loop/message_loop.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "base/threading/thread.h"
#include "content/browser/dom_storage/dom_storage_context_wrapper.h"
#include "content/browser/dom_storage/session_storage_namespace_impl.h"
#include "content/browser/frame_host/interstitial_page_navigator_impl.h"
#include "content/browser/frame_host/navigation_controller_impl.h"
#include "content/browser/frame_host/navigation_entry_impl.h"
#include "content/browser/loader/resource_dispatcher_host_impl.h"
#include "content/browser/renderer_host/render_process_host_impl.h"
#include "content/browser/renderer_host/render_view_host_delegate_view.h"
#include "content/browser/renderer_host/render_view_host_factory.h"
#include "content/browser/renderer_host/render_view_host_impl.h"
#include "content/browser/renderer_host/render_widget_host_view_base.h"
#include "content/browser/site_instance_impl.h"
#include "content/browser/web_contents/web_contents_impl.h"
#include "content/browser/web_contents/web_contents_view.h"
#include "content/common/frame_messages.h"
#include "content/common/view_messages.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/content_browser_client.h"
#include "content/public/browser/dom_operation_notification_details.h"
#include "content/public/browser/interstitial_page_delegate.h"
#include "content/public/browser/invalidate_type.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_source.h"
#include "content/public/browser/storage_partition.h"
#include "content/public/browser/user_metrics.h"
#include "content/public/browser/web_contents_delegate.h"
#include "content/public/common/bindings_policy.h"
#include "net/base/escape.h"
#include "net/url_request/url_request_context_getter.h"
#include "ui/base/page_transition_types.h"
using blink::WebDragOperation;
using blink::WebDragOperationsMask;
namespace content {
namespace {
void ResourceRequestHelper(ResourceDispatcherHostImpl* rdh,
int process_id,
int render_view_host_id,
ResourceRequestAction action) {
switch (action) {
case BLOCK:
rdh->BlockRequestsForRoute(process_id, render_view_host_id);
break;
case RESUME:
rdh->ResumeBlockedRequestsForRoute(process_id, render_view_host_id);
break;
case CANCEL:
rdh->CancelBlockedRequestsForRoute(process_id, render_view_host_id);
break;
default:
NOTREACHED();
}
}
} // namespace
class InterstitialPageImpl::InterstitialPageRVHDelegateView
: public RenderViewHostDelegateView {
public:
explicit InterstitialPageRVHDelegateView(InterstitialPageImpl* page);
// RenderViewHostDelegateView implementation:
#if defined(OS_MACOSX) || defined(OS_ANDROID)
void ShowPopupMenu(RenderFrameHost* render_frame_host,
const gfx::Rect& bounds,
int item_height,
double item_font_size,
int selected_item,
const std::vector<MenuItem>& items,
bool right_aligned,
bool allow_multiple_selection) override;
void HidePopupMenu() override;
#endif
void StartDragging(const DropData& drop_data,
WebDragOperationsMask operations_allowed,
const gfx::ImageSkia& image,
const gfx::Vector2d& image_offset,
const DragEventSourceInfo& event_info) override;
void UpdateDragCursor(WebDragOperation operation) override;
void GotFocus() override;
void TakeFocus(bool reverse) override;
virtual void OnFindReply(int request_id,
int number_of_matches,
const gfx::Rect& selection_rect,
int active_match_ordinal,
bool final_update);
private:
InterstitialPageImpl* interstitial_page_;
DISALLOW_COPY_AND_ASSIGN(InterstitialPageRVHDelegateView);
};
// We keep a map of the various blocking pages shown as the UI tests need to
// be able to retrieve them.
typedef std::map<WebContents*, InterstitialPageImpl*> InterstitialPageMap;
static InterstitialPageMap* g_web_contents_to_interstitial_page;
// Initializes g_web_contents_to_interstitial_page in a thread-safe manner.
// Should be called before accessing g_web_contents_to_interstitial_page.
static void InitInterstitialPageMap() {
if (!g_web_contents_to_interstitial_page)
g_web_contents_to_interstitial_page = new InterstitialPageMap;
}
InterstitialPage* InterstitialPage::Create(WebContents* web_contents,
bool new_navigation,
const GURL& url,
InterstitialPageDelegate* delegate) {
return new InterstitialPageImpl(
web_contents,
static_cast<RenderWidgetHostDelegate*>(
static_cast<WebContentsImpl*>(web_contents)),
new_navigation, url, delegate);
}
InterstitialPage* InterstitialPage::GetInterstitialPage(
WebContents* web_contents) {
InitInterstitialPageMap();
InterstitialPageMap::const_iterator iter =
g_web_contents_to_interstitial_page->find(web_contents);
if (iter == g_web_contents_to_interstitial_page->end())
return NULL;
return iter->second;
}
InterstitialPageImpl::InterstitialPageImpl(
WebContents* web_contents,
RenderWidgetHostDelegate* render_widget_host_delegate,
bool new_navigation,
const GURL& url,
InterstitialPageDelegate* delegate)
: underlying_content_observer_(web_contents, this),
web_contents_(web_contents),
controller_(static_cast<NavigationControllerImpl*>(
&web_contents->GetController())),
render_widget_host_delegate_(render_widget_host_delegate),
url_(url),
new_navigation_(new_navigation),
should_discard_pending_nav_entry_(new_navigation),
reload_on_dont_proceed_(false),
enabled_(true),
action_taken_(NO_ACTION),
render_view_host_(NULL),
// TODO(nasko): The InterstitialPageImpl will need to provide its own
// NavigationControllerImpl to the Navigator, which is separate from
// the WebContents one, so we can enforce no navigation policy here.
// While we get the code to a point to do this, pass NULL for it.
// TODO(creis): We will also need to pass delegates for the RVHM as we
// start to use it.
frame_tree_(new InterstitialPageNavigatorImpl(this, controller_),
this, this, this,
static_cast<WebContentsImpl*>(web_contents)),
original_child_id_(web_contents->GetRenderProcessHost()->GetID()),
original_rvh_id_(web_contents->GetRenderViewHost()->GetRoutingID()),
should_revert_web_contents_title_(false),
web_contents_was_loading_(false),
resource_dispatcher_host_notified_(false),
rvh_delegate_view_(new InterstitialPageRVHDelegateView(this)),
create_view_(true),
delegate_(delegate),
weak_ptr_factory_(this) {
InitInterstitialPageMap();
// It would be inconsistent to create an interstitial with no new navigation
// (which is the case when the interstitial was triggered by a sub-resource on
// a page) when we have a pending entry (in the process of loading a new top
// frame).
DCHECK(new_navigation || !web_contents->GetController().GetPendingEntry());
}
InterstitialPageImpl::~InterstitialPageImpl() {
}
void InterstitialPageImpl::Show() {
if (!enabled())
return;
// If an interstitial is already showing or about to be shown, close it before
// showing the new one.
// Be careful not to take an action on the old interstitial more than once.
InterstitialPageMap::const_iterator iter =
g_web_contents_to_interstitial_page->find(web_contents_);
if (iter != g_web_contents_to_interstitial_page->end()) {
InterstitialPageImpl* interstitial = iter->second;
if (interstitial->action_taken_ != NO_ACTION) {
interstitial->Hide();
} else {
// If we are currently showing an interstitial page for which we created
// a transient entry and a new interstitial is shown as the result of a
// new browser initiated navigation, then that transient entry has already
// been discarded and a new pending navigation entry created.
// So we should not discard that new pending navigation entry.
// See http://crbug.com/9791
if (new_navigation_ && interstitial->new_navigation_)
interstitial->should_discard_pending_nav_entry_= false;
interstitial->DontProceed();
}
}
// Block the resource requests for the render view host while it is hidden.
TakeActionOnResourceDispatcher(BLOCK);
// We need to be notified when the RenderViewHost is destroyed so we can
// cancel the blocked requests. We cannot do that on
// NOTIFY_WEB_CONTENTS_DESTROYED as at that point the RenderViewHost has
// already been destroyed.
notification_registrar_.Add(
this, NOTIFICATION_RENDER_WIDGET_HOST_DESTROYED,
Source<RenderWidgetHost>(controller_->delegate()->GetRenderViewHost()));
// Update the g_web_contents_to_interstitial_page map.
iter = g_web_contents_to_interstitial_page->find(web_contents_);
DCHECK(iter == g_web_contents_to_interstitial_page->end());
(*g_web_contents_to_interstitial_page)[web_contents_] = this;
if (new_navigation_) {
NavigationEntryImpl* entry = new NavigationEntryImpl;
entry->SetURL(url_);
entry->SetVirtualURL(url_);
entry->set_page_type(PAGE_TYPE_INTERSTITIAL);
// Give delegates a chance to set some states on the navigation entry.
delegate_->OverrideEntry(entry);
controller_->SetTransientEntry(entry);
}
DCHECK(!render_view_host_);
render_view_host_ = CreateRenderViewHost();
render_view_host_->AttachToFrameTree();
CreateWebContentsView();
std::string data_url = "data:text/html;charset=utf-8," +
net::EscapePath(delegate_->GetHTMLContents());
frame_tree_.root()->current_frame_host()->NavigateToURL(GURL(data_url));
notification_registrar_.Add(this, NOTIFICATION_NAV_ENTRY_PENDING,
Source<NavigationController>(controller_));
}
void InterstitialPageImpl::Hide() {
// We may have already been hidden, and are just waiting to be deleted.
// We can't check for enabled() here, because some callers have already
// called Disable.
if (!render_view_host_)
return;
Disable();
RenderWidgetHostView* old_view =
controller_->delegate()->GetRenderViewHost()->GetView();
if (controller_->delegate()->GetInterstitialPage() == this &&
old_view &&
!old_view->IsShowing() &&
!controller_->delegate()->IsHidden()) {
// Show the original RVH since we're going away. Note it might not exist if
// the renderer crashed while the interstitial was showing.
// Note that it is important that we don't call Show() if the view is
// already showing. That would result in bad things (unparented HWND on
// Windows for example) happening.
old_view->Show();
}
// If the focus was on the interstitial, let's keep it to the page.
// (Note that in unit-tests the RVH may not have a view).
if (render_view_host_->GetView() &&
render_view_host_->GetView()->HasFocus() &&
controller_->delegate()->GetRenderViewHost()->GetView()) {
controller_->delegate()->GetRenderViewHost()->GetView()->Focus();
}
// Delete this and call Shutdown on the RVH asynchronously, as we may have
// been called from a RVH delegate method, and we can't delete the RVH out
// from under itself.
base::MessageLoop::current()->PostNonNestableTask(
FROM_HERE,
base::Bind(&InterstitialPageImpl::Shutdown,
weak_ptr_factory_.GetWeakPtr()));
render_view_host_ = NULL;
frame_tree_.ResetForMainFrameSwap();
controller_->delegate()->DetachInterstitialPage();
// Let's revert to the original title if necessary.
NavigationEntry* entry = controller_->GetVisibleEntry();
if (entry && !new_navigation_ && should_revert_web_contents_title_) {
entry->SetTitle(original_web_contents_title_);
controller_->delegate()->NotifyNavigationStateChanged(
INVALIDATE_TYPE_TITLE);
}
InterstitialPageMap::iterator iter =
g_web_contents_to_interstitial_page->find(web_contents_);
DCHECK(iter != g_web_contents_to_interstitial_page->end());
if (iter != g_web_contents_to_interstitial_page->end())
g_web_contents_to_interstitial_page->erase(iter);
// Clear the WebContents pointer, because it may now be deleted.
// This signifies that we are in the process of shutting down.
web_contents_ = NULL;
}
void InterstitialPageImpl::Observe(
int type,
const NotificationSource& source,
const NotificationDetails& details) {
switch (type) {
case NOTIFICATION_NAV_ENTRY_PENDING:
// We are navigating away from the interstitial (the user has typed a URL
// in the location bar or clicked a bookmark). Make sure clicking on the
// interstitial will have no effect. Also cancel any blocked requests
// on the ResourceDispatcherHost. Note that when we get this notification
// the RenderViewHost has not yet navigated so we'll unblock the
// RenderViewHost before the resource request for the new page we are
// navigating arrives in the ResourceDispatcherHost. This ensures that
// request won't be blocked if the same RenderViewHost was used for the
// new navigation.
Disable();
TakeActionOnResourceDispatcher(CANCEL);
break;
case NOTIFICATION_RENDER_WIDGET_HOST_DESTROYED:
if (action_taken_ == NO_ACTION) {
// The RenderViewHost is being destroyed (as part of the tab being
// closed); make sure we clear the blocked requests.
RenderViewHost* rvh = static_cast<RenderViewHost*>(
static_cast<RenderViewHostImpl*>(
RenderWidgetHostImpl::From(
Source<RenderWidgetHost>(source).ptr())));
DCHECK(rvh->GetProcess()->GetID() == original_child_id_ &&
rvh->GetRoutingID() == original_rvh_id_);
TakeActionOnResourceDispatcher(CANCEL);
}
break;
default:
NOTREACHED();
}
}
bool InterstitialPageImpl::OnMessageReceived(RenderFrameHost* render_frame_host,
const IPC::Message& message) {
if (render_frame_host->GetRenderViewHost() != render_view_host_) {
DCHECK(!render_view_host_)
<< "We expect an interstitial page to have only a single RVH";
return false;
}
bool handled = true;
IPC_BEGIN_MESSAGE_MAP(InterstitialPageImpl, message)
IPC_MESSAGE_HANDLER(FrameHostMsg_DomOperationResponse,
OnDomOperationResponse)
IPC_MESSAGE_UNHANDLED(handled = false)
IPC_END_MESSAGE_MAP()
return handled;
}
bool InterstitialPageImpl::OnMessageReceived(RenderViewHost* render_view_host,
const IPC::Message& message) {
return false;
}
void InterstitialPageImpl::RenderFrameCreated(
RenderFrameHost* render_frame_host) {
// Note this is only for subframes in the interstitial, the notification for
// the main frame happens in RenderViewCreated.
controller_->delegate()->RenderFrameForInterstitialPageCreated(
render_frame_host);
}
void InterstitialPageImpl::UpdateTitle(
RenderFrameHost* render_frame_host,
int32 page_id,
const base::string16& title,
base::i18n::TextDirection title_direction) {
if (!enabled())
return;
RenderViewHost* render_view_host = render_frame_host->GetRenderViewHost();
DCHECK(render_view_host == render_view_host_);
NavigationEntry* entry = controller_->GetVisibleEntry();
if (!entry) {
// There may be no visible entry if no URL has committed (e.g., after
// window.open("")). InterstitialPages with the new_navigation flag create
// a transient NavigationEntry and thus have a visible entry. However,
// interstitials can still be created when there is no visible entry. For
// example, the opener window may inject content into the initial blank
// page, which might trigger a SafeBrowsingBlockingPage.
return;
}
// If this interstitial is shown on an existing navigation entry, we'll need
// to remember its title so we can revert to it when hidden.
if (!new_navigation_ && !should_revert_web_contents_title_) {
original_web_contents_title_ = entry->GetTitle();
should_revert_web_contents_title_ = true;
}
// TODO(evan): make use of title_direction.
// http://code.google.com/p/chromium/issues/detail?id=27094
entry->SetTitle(title);
controller_->delegate()->NotifyNavigationStateChanged(INVALIDATE_TYPE_TITLE);
}
AccessibilityMode InterstitialPageImpl::GetAccessibilityMode() const {
if (web_contents_)
return static_cast<WebContentsImpl*>(web_contents_)->GetAccessibilityMode();
else
return AccessibilityModeOff;
}
RenderViewHostDelegateView* InterstitialPageImpl::GetDelegateView() {
return rvh_delegate_view_.get();
}
const GURL& InterstitialPageImpl::GetMainFrameLastCommittedURL() const {
return url_;
}
void InterstitialPageImpl::RenderViewTerminated(
RenderViewHost* render_view_host,
base::TerminationStatus status,
int error_code) {
// Our renderer died. This should not happen in normal cases.
// If we haven't already started shutdown, just dismiss the interstitial.
// We cannot check for enabled() here, because we may have called Disable
// without calling Hide.
if (render_view_host_)
DontProceed();
}
void InterstitialPageImpl::DidNavigate(
RenderViewHost* render_view_host,
const FrameHostMsg_DidCommitProvisionalLoad_Params& params) {
// A fast user could have navigated away from the page that triggered the
// interstitial while the interstitial was loading, that would have disabled
// us. In that case we can dismiss ourselves.
if (!enabled()) {
DontProceed();
return;
}
if (ui::PageTransitionCoreTypeIs(params.transition,
ui::PAGE_TRANSITION_AUTO_SUBFRAME)) {
// No need to handle navigate message from iframe in the interstitial page.
return;
}
// The RenderViewHost has loaded its contents, we can show it now.
if (!controller_->delegate()->IsHidden())
render_view_host_->GetView()->Show();
controller_->delegate()->AttachInterstitialPage(this);
RenderWidgetHostView* rwh_view =
controller_->delegate()->GetRenderViewHost()->GetView();
// The RenderViewHost may already have crashed before we even get here.
if (rwh_view) {
// If the page has focus, focus the interstitial.
if (rwh_view->HasFocus())
Focus();
// Hide the original RVH since we're showing the interstitial instead.
rwh_view->Hide();
}
// Notify the tab we are not loading so the throbber is stopped. It also
// causes a WebContentsObserver::DidStopLoading callback that the
// AutomationProvider (used by the UI tests) expects to consider a navigation
// as complete. Without this, navigating in a UI test to a URL that triggers
// an interstitial would hang.
web_contents_was_loading_ = controller_->delegate()->IsLoading();
controller_->delegate()->SetIsLoading(false, true, NULL);
}
RendererPreferences InterstitialPageImpl::GetRendererPrefs(
BrowserContext* browser_context) const {
delegate_->OverrideRendererPrefs(&renderer_preferences_);
return renderer_preferences_;
}
void InterstitialPageImpl::RenderWidgetDeleted(
RenderWidgetHostImpl* render_widget_host) {
// TODO(creis): Remove this method once we verify the shutdown path is sane.
CHECK(!web_contents_);
}
bool InterstitialPageImpl::PreHandleKeyboardEvent(
const NativeWebKeyboardEvent& event,
bool* is_keyboard_shortcut) {
if (!enabled())
return false;
return render_widget_host_delegate_->PreHandleKeyboardEvent(
event, is_keyboard_shortcut);
}
void InterstitialPageImpl::HandleKeyboardEvent(
const NativeWebKeyboardEvent& event) {
if (enabled())
render_widget_host_delegate_->HandleKeyboardEvent(event);
}
#if defined(OS_WIN)
gfx::NativeViewAccessible
InterstitialPageImpl::GetParentNativeViewAccessible() {
if (web_contents_) {
WebContentsImpl* wci = static_cast<WebContentsImpl*>(web_contents_);
return wci->GetParentNativeViewAccessible();
}
return NULL;
}
#endif
WebContents* InterstitialPageImpl::web_contents() const {
return web_contents_;
}
RenderViewHostImpl* InterstitialPageImpl::CreateRenderViewHost() {
if (!enabled())
return NULL;
// Interstitial pages don't want to share the session storage so we mint a
// new one.
BrowserContext* browser_context = web_contents()->GetBrowserContext();
scoped_refptr<SiteInstance> site_instance =
SiteInstance::Create(browser_context);
DOMStorageContextWrapper* dom_storage_context =
static_cast<DOMStorageContextWrapper*>(
BrowserContext::GetStoragePartition(
browser_context, site_instance.get())->GetDOMStorageContext());
session_storage_namespace_ =
new SessionStorageNamespaceImpl(dom_storage_context);
// Use the RenderViewHost from our FrameTree.
frame_tree_.root()->render_manager()->Init(
browser_context, site_instance.get(), MSG_ROUTING_NONE, MSG_ROUTING_NONE);
return frame_tree_.root()->current_frame_host()->render_view_host();
}
WebContentsView* InterstitialPageImpl::CreateWebContentsView() {
if (!enabled() || !create_view_)
return NULL;
WebContentsView* wcv =
static_cast<WebContentsImpl*>(web_contents())->GetView();
RenderWidgetHostViewBase* view =
wcv->CreateViewForWidget(render_view_host_, false);
render_view_host_->SetView(view);
render_view_host_->AllowBindings(BINDINGS_POLICY_DOM_AUTOMATION);
int32 max_page_id = web_contents()->
GetMaxPageIDForSiteInstance(render_view_host_->GetSiteInstance());
render_view_host_->CreateRenderView(base::string16(),
MSG_ROUTING_NONE,
MSG_ROUTING_NONE,
max_page_id,
false);
controller_->delegate()->RenderFrameForInterstitialPageCreated(
frame_tree_.root()->current_frame_host());
view->SetSize(web_contents()->GetContainerBounds().size());
// Don't show the interstitial until we have navigated to it.
view->Hide();
return wcv;
}
void InterstitialPageImpl::Proceed() {
// Don't repeat this if we are already shutting down. We cannot check for
// enabled() here, because we may have called Disable without calling Hide.
if (!render_view_host_)
return;
if (action_taken_ != NO_ACTION) {
NOTREACHED();
return;
}
Disable();
action_taken_ = PROCEED_ACTION;
// Resumes the throbber, if applicable.
if (web_contents_was_loading_)
controller_->delegate()->SetIsLoading(true, true, NULL);
// If this is a new navigation, the old page is going away, so we cancel any
// blocked requests for it. If it is not a new navigation, then it means the
// interstitial was shown as a result of a resource loading in the page.
// Since the user wants to proceed, we'll let any blocked request go through.
if (new_navigation_)
TakeActionOnResourceDispatcher(CANCEL);
else
TakeActionOnResourceDispatcher(RESUME);
// No need to hide if we are a new navigation, we'll get hidden when the
// navigation is committed.
if (!new_navigation_) {
Hide();
delegate_->OnProceed();
return;
}
delegate_->OnProceed();
}
void InterstitialPageImpl::DontProceed() {
// Don't repeat this if we are already shutting down. We cannot check for
// enabled() here, because we may have called Disable without calling Hide.
if (!render_view_host_)
return;
DCHECK(action_taken_ != DONT_PROCEED_ACTION);
Disable();
action_taken_ = DONT_PROCEED_ACTION;
// If this is a new navigation, we are returning to the original page, so we
// resume blocked requests for it. If it is not a new navigation, then it
// means the interstitial was shown as a result of a resource loading in the
// page and we won't return to the original page, so we cancel blocked
// requests in that case.
if (new_navigation_)
TakeActionOnResourceDispatcher(RESUME);
else
TakeActionOnResourceDispatcher(CANCEL);
if (should_discard_pending_nav_entry_) {
// Since no navigation happens we have to discard the transient entry
// explicitely. Note that by calling DiscardNonCommittedEntries() we also
// discard the pending entry, which is what we want, since the navigation is
// cancelled.
controller_->DiscardNonCommittedEntries();
}
if (reload_on_dont_proceed_)
controller_->Reload(true);
Hide();
delegate_->OnDontProceed();
}
void InterstitialPageImpl::CancelForNavigation() {
// The user is trying to navigate away. We should unblock the renderer and
// disable the interstitial, but keep it visible until the navigation
// completes.
Disable();
// If this interstitial was shown for a new navigation, allow any navigations
// on the original page to resume (e.g., subresource requests, XHRs, etc).
// Otherwise, cancel the pending, possibly dangerous navigations.
if (new_navigation_)
TakeActionOnResourceDispatcher(RESUME);
else
TakeActionOnResourceDispatcher(CANCEL);
}
void InterstitialPageImpl::SetSize(const gfx::Size& size) {
if (!enabled())
return;
#if !defined(OS_MACOSX)
// When a tab is closed, we might be resized after our view was NULLed
// (typically if there was an info-bar).
if (render_view_host_->GetView())
render_view_host_->GetView()->SetSize(size);
#else
// TODO(port): Does Mac need to SetSize?
NOTIMPLEMENTED();
#endif
}
void InterstitialPageImpl::Focus() {
// Focus the native window.
if (!enabled())
return;
render_view_host_->GetView()->Focus();
}
void InterstitialPageImpl::FocusThroughTabTraversal(bool reverse) {
if (!enabled())
return;
render_view_host_->SetInitialFocus(reverse);
}
RenderWidgetHostView* InterstitialPageImpl::GetView() {
return render_view_host_->GetView();
}
RenderFrameHost* InterstitialPageImpl::GetMainFrame() const {
return render_view_host_->GetMainFrame();
}
InterstitialPageDelegate* InterstitialPageImpl::GetDelegateForTesting() {
return delegate_.get();
}
void InterstitialPageImpl::DontCreateViewForTesting() {
create_view_ = false;
}
gfx::Rect InterstitialPageImpl::GetRootWindowResizerRect() const {
return gfx::Rect();
}
void InterstitialPageImpl::CreateNewWindow(
int render_process_id,
int route_id,
int main_frame_route_id,
const ViewHostMsg_CreateWindow_Params& params,
SessionStorageNamespace* session_storage_namespace) {
NOTREACHED() << "InterstitialPage does not support showing popups yet.";
}
void InterstitialPageImpl::CreateNewWidget(int render_process_id,
int route_id,
blink::WebPopupType popup_type) {
NOTREACHED() << "InterstitialPage does not support showing drop-downs yet.";
}
void InterstitialPageImpl::CreateNewFullscreenWidget(int render_process_id,
int route_id) {
NOTREACHED()
<< "InterstitialPage does not support showing full screen popups.";
}
void InterstitialPageImpl::ShowCreatedWindow(int route_id,
WindowOpenDisposition disposition,
const gfx::Rect& initial_rect,
bool user_gesture) {
NOTREACHED() << "InterstitialPage does not support showing popups yet.";
}
void InterstitialPageImpl::ShowCreatedWidget(int route_id,
const gfx::Rect& initial_rect) {
NOTREACHED() << "InterstitialPage does not support showing drop-downs yet.";
}
void InterstitialPageImpl::ShowCreatedFullscreenWidget(int route_id) {
NOTREACHED()
<< "InterstitialPage does not support showing full screen popups.";
}
SessionStorageNamespace* InterstitialPageImpl::GetSessionStorageNamespace(
SiteInstance* instance) {
return session_storage_namespace_.get();
}
FrameTree* InterstitialPageImpl::GetFrameTree() {
return &frame_tree_;
}
void InterstitialPageImpl::Disable() {
enabled_ = false;
}
void InterstitialPageImpl::Shutdown() {
delete this;
}
void InterstitialPageImpl::OnNavigatingAwayOrTabClosing() {
if (action_taken_ == NO_ACTION) {
// We are navigating away from the interstitial or closing a tab with an
// interstitial. Default to DontProceed(). We don't just call Hide as
// subclasses will almost certainly override DontProceed to do some work
// (ex: close pending connections).
DontProceed();
} else {
// User decided to proceed and either the navigation was committed or
// the tab was closed before that.
Hide();
}
}
void InterstitialPageImpl::TakeActionOnResourceDispatcher(
ResourceRequestAction action) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (action == CANCEL || action == RESUME) {
if (resource_dispatcher_host_notified_)
return;
resource_dispatcher_host_notified_ = true;
}
// The tab might not have a render_view_host if it was closed (in which case,
// we have taken care of the blocked requests when processing
// NOTIFY_RENDER_WIDGET_HOST_DESTROYED.
// Also we need to test there is a ResourceDispatcherHostImpl, as when unit-
// tests we don't have one.
RenderViewHostImpl* rvh = RenderViewHostImpl::FromID(original_child_id_,
original_rvh_id_);
if (!rvh || !ResourceDispatcherHostImpl::Get())
return;
BrowserThread::PostTask(
BrowserThread::IO,
FROM_HERE,
base::Bind(
&ResourceRequestHelper,
ResourceDispatcherHostImpl::Get(),
original_child_id_,
original_rvh_id_,
action));
}
void InterstitialPageImpl::OnDomOperationResponse(
const std::string& json_string,
int automation_id) {
// Needed by test code.
DomOperationNotificationDetails details(json_string, automation_id);
NotificationService::current()->Notify(
NOTIFICATION_DOM_OPERATION_RESPONSE,
Source<WebContents>(web_contents()),
Details<DomOperationNotificationDetails>(&details));
if (!enabled())
return;
delegate_->CommandReceived(details.json);
}
InterstitialPageImpl::InterstitialPageRVHDelegateView::
InterstitialPageRVHDelegateView(InterstitialPageImpl* page)
: interstitial_page_(page) {
}
#if defined(OS_MACOSX) || defined(OS_ANDROID)
void InterstitialPageImpl::InterstitialPageRVHDelegateView::ShowPopupMenu(
RenderFrameHost* render_frame_host,
const gfx::Rect& bounds,
int item_height,
double item_font_size,
int selected_item,
const std::vector<MenuItem>& items,
bool right_aligned,
bool allow_multiple_selection) {
NOTREACHED() << "InterstitialPage does not support showing popup menus.";
}
void InterstitialPageImpl::InterstitialPageRVHDelegateView::HidePopupMenu() {
NOTREACHED() << "InterstitialPage does not support showing popup menus.";
}
#endif
void InterstitialPageImpl::InterstitialPageRVHDelegateView::StartDragging(
const DropData& drop_data,
WebDragOperationsMask allowed_operations,
const gfx::ImageSkia& image,
const gfx::Vector2d& image_offset,
const DragEventSourceInfo& event_info) {
interstitial_page_->render_view_host_->DragSourceSystemDragEnded();
DVLOG(1) << "InterstitialPage does not support dragging yet.";
}
void InterstitialPageImpl::InterstitialPageRVHDelegateView::UpdateDragCursor(
WebDragOperation) {
NOTREACHED() << "InterstitialPage does not support dragging yet.";
}
void InterstitialPageImpl::InterstitialPageRVHDelegateView::GotFocus() {
WebContents* web_contents = interstitial_page_->web_contents();
if (web_contents)
static_cast<WebContentsImpl*>(web_contents)->NotifyWebContentsFocused();
}
void InterstitialPageImpl::InterstitialPageRVHDelegateView::TakeFocus(
bool reverse) {
if (!interstitial_page_->web_contents())
return;
WebContentsImpl* web_contents =
static_cast<WebContentsImpl*>(interstitial_page_->web_contents());
if (!web_contents->GetDelegateView())
return;
web_contents->GetDelegateView()->TakeFocus(reverse);
}
void InterstitialPageImpl::InterstitialPageRVHDelegateView::OnFindReply(
int request_id, int number_of_matches, const gfx::Rect& selection_rect,
int active_match_ordinal, bool final_update) {
}
InterstitialPageImpl::UnderlyingContentObserver::UnderlyingContentObserver(
WebContents* web_contents,
InterstitialPageImpl* interstitial)
: WebContentsObserver(web_contents), interstitial_(interstitial) {
}
void InterstitialPageImpl::UnderlyingContentObserver::NavigationEntryCommitted(
const LoadCommittedDetails& load_details) {
interstitial_->OnNavigatingAwayOrTabClosing();
}
void InterstitialPageImpl::UnderlyingContentObserver::WebContentsDestroyed() {
interstitial_->OnNavigatingAwayOrTabClosing();
}
} // namespace content
| ltilve/chromium | content/browser/frame_host/interstitial_page_impl.cc | C++ | bsd-3-clause | 33,808 |
/*******************************************************
* Copyright (c) 2014, ArrayFire
* All rights reserved.
*
* This file is distributed under 3-clause BSD license.
* The complete license agreement can be obtained at:
* http://arrayfire.com/licenses/BSD-3-Clause
********************************************************/
#include <af/dim4.hpp>
#include <af/defines.h>
#include <af/blas.h>
#include <af/data.h>
#include <af/arith.h>
#include <err_common.hpp>
#include <handle.hpp>
#include <backend.hpp>
#include <transpose.hpp>
using af::dim4;
using namespace detail;
template<typename T>
static inline af_array trs(const af_array in, const bool conjugate)
{
return getHandle<T>(detail::transpose<T>(getArray<T>(in), conjugate));
}
af_err af_transpose(af_array *out, af_array in, const bool conjugate)
{
try {
const ArrayInfo& info = getInfo(in);
af_dtype type = info.getType();
af::dim4 dims = info.dims();
if (dims.elements() == 0) {
return af_retain_array(out, in);
}
if (dims[0]==1 || dims[1]==1) {
af::dim4 outDims(dims[1],dims[0],dims[2],dims[3]);
if(conjugate) {
af_array temp = 0;
AF_CHECK(af_conjg(&temp, in));
AF_CHECK(af_moddims(out, temp, outDims.ndims(), outDims.get()));
AF_CHECK(af_release_array(temp));
return AF_SUCCESS;
} else {
// for a vector OR a batch of vectors
// we can use modDims to transpose
AF_CHECK(af_moddims(out, in, outDims.ndims(), outDims.get()));
return AF_SUCCESS;
}
}
af_array output;
switch(type) {
case f32: output = trs<float> (in, conjugate); break;
case c32: output = trs<cfloat> (in, conjugate); break;
case f64: output = trs<double> (in, conjugate); break;
case c64: output = trs<cdouble>(in, conjugate); break;
case b8 : output = trs<char> (in, conjugate); break;
case s32: output = trs<int> (in, conjugate); break;
case u32: output = trs<uint> (in, conjugate); break;
case u8 : output = trs<uchar> (in, conjugate); break;
case s64: output = trs<intl> (in, conjugate); break;
case u64: output = trs<uintl> (in, conjugate); break;
case s16: output = trs<short> (in, conjugate); break;
case u16: output = trs<ushort> (in, conjugate); break;
default : TYPE_ERROR(1, type);
}
std::swap(*out,output);
}
CATCHALL;
return AF_SUCCESS;
}
template<typename T>
static inline void transpose_inplace(af_array in, const bool conjugate)
{
return detail::transpose_inplace<T>(getWritableArray<T>(in), conjugate);
}
af_err af_transpose_inplace(af_array in, const bool conjugate)
{
try {
const ArrayInfo& info = getInfo(in);
af_dtype type = info.getType();
af::dim4 dims = info.dims();
// InPlace only works on square matrices
DIM_ASSERT(0, dims[0] == dims[1]);
// If singleton element
if(dims[0] == 1)
return AF_SUCCESS;
switch(type) {
case f32: transpose_inplace<float> (in, conjugate); break;
case c32: transpose_inplace<cfloat> (in, conjugate); break;
case f64: transpose_inplace<double> (in, conjugate); break;
case c64: transpose_inplace<cdouble>(in, conjugate); break;
case b8 : transpose_inplace<char> (in, conjugate); break;
case s32: transpose_inplace<int> (in, conjugate); break;
case u32: transpose_inplace<uint> (in, conjugate); break;
case u8 : transpose_inplace<uchar> (in, conjugate); break;
case s64: transpose_inplace<intl> (in, conjugate); break;
case u64: transpose_inplace<uintl> (in, conjugate); break;
case s16: transpose_inplace<short> (in, conjugate); break;
case u16: transpose_inplace<ushort> (in, conjugate); break;
default : TYPE_ERROR(1, type);
}
}
CATCHALL;
return AF_SUCCESS;
}
| shehzan10/arrayfire | src/api/c/transpose.cpp | C++ | bsd-3-clause | 4,289 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
class BadOption(Exception):
""" Incorrect HTTP API arguments """
pass
class RenderError(Exception):
""" Error rendering page """
pass
class InternalError(Exception):
""" Unhandled internal error """
pass
class GlobalTimeoutError(Exception):
""" Timeout exceeded rendering page """
pass
class UnsupportedContentType(Exception):
""" Request Content-Type is not supported """
pass
class ExpiredArguments(Exception):
""" Arguments stored with ``save_args`` are expired """
pass
class ScriptError(BadOption):
""" Error happened while executing Lua script """
LUA_INIT_ERROR = 'LUA_INIT_ERROR' # error happened before coroutine starts
LUA_ERROR = 'LUA_ERROR' # lua error() is called from the coroutine
LUA_CONVERT_ERROR = 'LUA_CONVERT_ERROR' # result can't be converted to Python
SPLASH_LUA_ERROR = 'SPLASH_LUA_ERROR' # custom error raised by Splash
BAD_MAIN_ERROR = 'BAD_MAIN_ERROR' # main() definition is incorrect
MAIN_NOT_FOUND_ERROR = 'MAIN_NOT_FOUND_ERROR' # main() is not found
SYNTAX_ERROR = 'SYNTAX_ERROR' # XXX: unused; reported as INIT_ERROR now
JS_ERROR = 'JS_ERROR' # error in a wrapped JS function
UNKNOWN_ERROR = 'UNKNOWN_ERROR'
class JsError(Exception):
""" Error occured in JavaScript code """
pass
class OneShotCallbackError(Exception):
""" A one shot callback was called more than once. """
pass
| Youwotma/splash | splash/exceptions.py | Python | bsd-3-clause | 1,502 |
require File.expand_path('../spec_helper', File.dirname(__FILE__))
describe 'search faceting' do
def self.test_field_type(name, attribute, field, *args)
clazz, value1, value2 =
if args.length == 2
[Post, args.first, args.last]
else
args
end
context "with field of type #{name}" do
before :all do
Sunspot.remove_all
2.times do
Sunspot.index(clazz.new(attribute => value1))
end
Sunspot.index(clazz.new(attribute => value2))
Sunspot.commit
end
before :each do
@search = Sunspot.search(clazz) do
facet(field)
end
end
it "should return value #{value1.inspect} with count 2" do
row = @search.facet(field).rows[0]
expect(row.value).to eq(value1)
expect(row.count).to eq(2)
end
it "should return value #{value2.inspect} with count 1" do
row = @search.facet(field).rows[1]
expect(row.value).to eq(value2)
expect(row.count).to eq(1)
end
end
end
test_field_type('String', :title, :title, 'Title 1', 'Title 2')
test_field_type('Integer', :blog_id, :blog_id, 3, 4)
test_field_type('Float', :ratings_average, :average_rating, 2.2, 1.1)
test_field_type('Time', :published_at, :published_at, Time.mktime(2008, 02, 17, 17, 45, 04),
Time.mktime(2008, 07, 02, 03, 56, 22))
test_field_type('Trie Integer', :size, :size, Photo, 3, 4)
test_field_type('Float', :average_rating, :average_rating, Photo, 2.2, 1.1)
test_field_type('Time', :created_at, :created_at, Photo, Time.mktime(2008, 02, 17, 17, 45, 04),
Time.mktime(2008, 07, 02, 03, 56, 22))
test_field_type('Boolean', :featured, :featured, true, false)
context 'facet options' do
before :all do
Sunspot.remove_all
facet_values = %w(zero one two three four)
facet_values.each_with_index do |value, i|
i.times { Sunspot.index(Post.new(:title => value, :blog_id => 1)) }
end
Sunspot.index(Post.new(:blog_id => 1))
Sunspot.index(Post.new(:title => 'zero', :blog_id => 2))
Sunspot.commit
end
it 'should limit the number of facet rows' do
search = Sunspot.search(Post) do
facet :title, :limit => 3
end
expect(search.facet(:title).rows.size).to eq(3)
end
it 'should not return zeros by default' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title
end
expect(search.facet(:title).rows.map { |row| row.value }).not_to include('zero')
end
it 'should return zeros when specified' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :zeros => true
end
expect(search.facet(:title).rows.map { |row| row.value }).to include('zero')
end
it 'should return facet rows from an offset' do
search = Sunspot.search(Post) do
facet :title, :offset => 3
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(one zero))
end
it 'should return a specified minimum count' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :minimum_count => 2
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four three two))
end
it 'should order facets lexically' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :sort => :index
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four one three two))
end
it 'should order facets by count' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :sort => :count
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four three two one))
end
it 'should limit facet values by prefix' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :prefix => 't'
end
expect(search.facet(:title).rows.map { |row| row.value }.sort).to eq(%w(three two))
end
it 'should return :all facet' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :extra => :any
end
expect(search.facet(:title).rows.first.value).to eq(:any)
expect(search.facet(:title).rows.first.count).to eq(10)
end
it 'should return :none facet' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :extra => :none
end
expect(search.facet(:title).rows.first.value).to eq(:none)
expect(search.facet(:title).rows.first.count).to eq(1)
end
it 'gives correct facet count when group == true and truncate == true' do
search = Sunspot.search(Post) do
group :title do
truncate
end
facet :title, :extra => :any
end
# Should be 5 instead of 11
expect(search.facet(:title).rows.first.count).to eq(5)
end
end
context 'json facet options' do
before :all do
Sunspot.remove_all
facet_values = %w(zero one two three four)
facet_values.each_with_index do |value, i|
i.times { Sunspot.index(Post.new(:title => value, :blog_id => 1)) }
end
Sunspot.index(Post.new(:blog_id => 1))
Sunspot.index(Post.new(:title => 'zero', :blog_id => 2))
Sunspot.commit
end
it 'should return indexed elements' do
search = Sunspot.search(Post) do
json_facet(:title)
end
expect(search.facet(:title).rows.size).to eq(5)
end
it 'should limit the number of facet rows' do
search = Sunspot.search(Post) do
json_facet :title, :limit => 3
end
expect(search.facet(:title).rows.size).to eq(3)
end
it 'should not return zeros by default' do
search = Sunspot.search(Post) do
with :blog_id, 1
json_facet :title
end
expect(search.facet(:title).rows.map { |row| row.value }).not_to include('zero')
end
it 'should return a specified minimum count' do
search = Sunspot.search(Post) do
with :blog_id, 1
json_facet :title, :minimum_count => 2
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four three two))
end
it 'should order facets lexically' do
search = Sunspot.search(Post) do
with :blog_id, 1
json_facet :title, :sort => :index
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four one three two))
end
it 'should order facets by count' do
search = Sunspot.search(Post) do
with :blog_id, 1
json_facet :title, :sort => :count
end
expect(search.facet(:title).rows.map { |row| row.value }).to eq(%w(four three two one))
end
it 'should limit facet values by prefix' do
search = Sunspot.search(Post) do
with :blog_id, 1
json_facet :title, :prefix => 't'
end
expect(search.facet(:title).rows.map { |row| row.value }.sort).to eq(%w(three two))
end
end
context 'nested json facet' do
before :all do
Sunspot.remove_all
facet_values = %w(zero one two three four)
nested_facet_values = %w(alfa bravo charlie delta)
facet_values.each do |value|
nested_facet_values.each do |v2|
Sunspot.index(Post.new(:title => value, :author_name => v2, :blog_id => 1))
end
end
0.upto(9) { |i| Sunspot.index(Post.new(:title => 'zero', :author_name => "another#{i}", :blog_id => 1)) }
Sunspot.commit
end
it 'should get nested' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name } )
end
expect(search.facet(:title).rows.first.nested.size).to eq(4)
end
it 'without limit take the first 10' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name } )
end
expect(search.facet(:title).rows.last.nested.size).to eq(10)
end
it 'without limit' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name, limit: -1 } )
end
expect(search.facet(:title).rows.last.nested.size).to eq(14)
end
it 'works with distinct' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name, distinct: { strategy: :unique } } )
end
expect(search.facet(:title).rows.first.nested.map(&:count).uniq.size).to eq(1)
end
it 'should limit the nested facet' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name, limit: 2 } )
end
expect(search.facet(:title).rows.first.nested.size).to eq(2)
end
it 'should work nested of nested' do
search = Sunspot.search(Post) do
json_facet(:title, nested: { field: :author_name, nested: { field: :title } } )
end
expect(search.facet(:title).rows.first.nested.first.nested.size).to eq(1)
expect(search.facet(:title).rows.first.nested.first.nested.first.nested).to eq(nil)
end
end
context 'prefix escaping' do
before do
Sunspot.remove_all
["title1", "title2", "title with spaces 1", "title with spaces 2", "title/with/slashes/1", "title/with/slashes/2"].each do |value|
Sunspot.index(Post.new(:title => value, :blog_id => 1))
end
Sunspot.commit
end
it 'should limit facet values by a prefix with spaces' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :prefix => 'title '
end
expect(search.facet(:title).rows.map { |row| row.value }.sort).to eq(["title with spaces 1", "title with spaces 2"])
end
it 'should limit facet values by a prefix with slashes' do
search = Sunspot.search(Post) do
with :blog_id, 1
facet :title, :prefix => 'title/'
end
expect(search.facet(:title).rows.map { |row| row.value }.sort).to eq(["title/with/slashes/1", "title/with/slashes/2"])
end
end
context 'multiselect faceting' do
before do
Sunspot.remove_all
Sunspot.index!(
Post.new(:blog_id => 1, :category_ids => [1]),
Post.new(:blog_id => 1, :category_ids => [2]),
Post.new(:blog_id => 3, :category_ids => [3])
)
end
context 'field faceting' do
it 'should exclude filter from faceting' do
search = Sunspot.search(Post) do
with(:blog_id, 1)
category_filter = with(:category_ids, 1)
facet(:category_ids, :exclude => category_filter)
end
expect(search.facet(:category_ids).rows.map { |row| row.value }.to_set).to eq(Set[1, 2])
end
it 'should use facet keys to facet more than once with different exclusions' do
search = Sunspot.search(Post) do
with(:blog_id, 1)
category_filter = with(:category_ids, 1)
facet(:category_ids)
facet(:category_ids, :exclude => category_filter, :name => :all_category_ids)
end
expect(search.facet(:category_ids).rows.map { |row| row.value }).to eq([1])
expect(search.facet(:all_category_ids).rows.map { |row| row.value }.to_set).to eq(Set[1, 2])
end
end
context 'query faceting' do
it 'should exclude filter from faceting' do
search = Sunspot.search(Post) do
with(:blog_id, 1)
category_filter = with(:category_ids, 1)
facet :category_ids, :exclude => category_filter do
row(:category_1) do
with(:category_ids, 1)
end
row(:category_2) do
with(:category_ids, 2)
end
end
end
expect(search.facet(:category_ids).rows.map { |row| [row.value, row.count] }.to_set).to eq(Set[[:category_1, 1], [:category_2, 1]])
end
it 'should use facet keys to facet more than once with different exclusions' do
search = Sunspot.search(Post) do
with(:blog_id, 1)
category_filter = with(:category_ids, 1)
facet :category_ids do
row(:category_1) do
with(:category_ids, 1)
end
row(:category_2) do
with(:category_ids, 2)
end
end
facet :all_category_ids, :exclude => category_filter do
row(:category_1) do
with(:category_ids, 1)
end
row(:category_2) do
with(:category_ids, 2)
end
end
end
expect(search.facet(:category_ids).rows.map { |row| [row.value, row.count] }.to_set).to eq(Set[[:category_1, 1]])
expect(search.facet(:all_category_ids).rows.map { |row| [row.value, row.count] }.to_set).to eq(Set[[:category_1, 1], [:category_2, 1]])
end
end
end
context 'distinct field facets' do
before :all do
Sunspot.remove_all
Sunspot.index!(
(0..5).map { |i| Post.new(:blog_id => i, :title => 'title') }
)
0.upto(3) { |i| Sunspot.index(Post.new(:blog_id => i, :title => 'title')) }
Sunspot.index!(Post.new(:blog_id => 4, :title => 'other title'))
Sunspot.index!(Post.new(:blog_id => 5, :title => 'other title'))
Sunspot.index!(Post.new(:blog_id => 40, :title => 'title'))
Sunspot.index!(Post.new(:blog_id => 40, :title => 'title'))
Sunspot.index!(Post.new(:blog_id => 40, :title => 'other title'))
Sunspot.index!(Post.new(:blog_id => 40, :title => 'other title'))
end
it 'should return unique indexed elements for a field' do
search = Sunspot.search(Post) do
json_facet(:blog_id, distinct: { strategy: :unique })
end
expect(search.facet(:blog_id).rows.size).to eq(7)
expect(search.facet(:blog_id).rows.map(&:count).uniq.size).to eq(1)
end
it 'should return unique indexed elements for a field and facet on a field' do
search = Sunspot.search(Post) do
json_facet(:blog_id, distinct: { group_by: :title, strategy: :unique })
end
expect(search.facet(:blog_id).rows.size).to eq(2)
expect(search.facet(:blog_id).rows[0].count).to eq(3)
expect(search.facet(:blog_id).rows[1].count).to eq(7)
end
it 'should return unique indexed elements for a field and facet on a field with hll' do
search = Sunspot.search(Post) do
json_facet(:blog_id, distinct: { group_by: :title, strategy: :hll })
end
expect(search.facet(:blog_id).rows.size).to eq(2)
expect(search.facet(:blog_id).rows[0].count).to eq(3)
expect(search.facet(:blog_id).rows[1].count).to eq(7)
end
end
context 'date facets' do
before :all do
Sunspot.remove_all
time = Time.utc(2009, 7, 8)
Sunspot.index!(
(0..2).map { |i| Post.new(:published_at => time + i*60*60*16) }
)
end
it 'should return time ranges' do
time = Time.utc(2009, 7, 8)
search = Sunspot.search(Post) do
facet :published_at, :time_range => time..(time + 60*60*24*2), :sort => :count
end
expect(search.facet(:published_at).rows.first.value).to eq(time..(time + 60*60*24))
expect(search.facet(:published_at).rows.first.count).to eq(2)
expect(search.facet(:published_at).rows.last.value).to eq((time + 60*60*24)..(time + 60*60*24*2))
expect(search.facet(:published_at).rows.last.count).to eq(1)
end
it 'json facet should return time ranges' do
days_diff = 15
time_from = Time.utc(2009, 7, 8)
time_to = Time.utc(2009, 7, 8 + days_diff)
search = Sunspot.search(Post) do
json_facet(
:published_at,
:time_range => time_from..time_to
)
end
expect(search.facet(:published_at).rows.size).to eq(days_diff)
expect(search.facet(:published_at).rows[0].count).to eq(2)
expect(search.facet(:published_at).rows[1].count).to eq(1)
end
it 'json facet should return time ranges with custom gap' do
days_diff = 10
time_from = Time.utc(2009, 7, 8)
time_to = Time.utc(2009, 7, 8 + days_diff)
search = Sunspot.search(Post) do
json_facet(
:published_at,
:time_range => time_from..time_to,
gap: 60*60*24*2
)
end
expect(search.facet(:published_at).rows.size).to eq(days_diff / 2)
expect(search.facet(:published_at).rows[0].count).to eq(3)
end
end
context 'class facets' do
before :all do
Sunspot.remove_all
Sunspot.index!(Post.new, Post.new, Namespaced::Comment.new)
end
it 'should return classes' do
search = Sunspot.search(Post, Namespaced::Comment) do
facet(:class, :sort => :count)
end
expect(search.facet(:class).rows.first.value).to eq(Post)
expect(search.facet(:class).rows.first.count).to eq(2)
expect(search.facet(:class).rows.last.value).to eq(Namespaced::Comment)
expect(search.facet(:class).rows.last.count).to eq(1)
end
end
context 'query facets' do
before :all do
Sunspot.remove_all
Sunspot.index!(
[1.1, 1.2, 3.2, 3.4, 3.9, 4.1].map do |rating|
Post.new(:ratings_average => rating)
end
)
end
it 'should return specified facets' do
search = Sunspot.search(Post) do
facet :rating_range, :sort => :count do
for rating in [1.0, 2.0, 3.0, 4.0]
range = rating..(rating + 1.0)
row range do
with :average_rating, rating..(rating + 1.0)
end
end
end
end
facet = search.facet(:rating_range)
expect(facet.rows[0].value).to eq(3.0..4.0)
expect(facet.rows[0].count).to eq(3)
expect(facet.rows[1].value).to eq(1.0..2.0)
expect(facet.rows[1].count).to eq(2)
expect(facet.rows[2].value).to eq(4.0..5.0)
expect(facet.rows[2].count).to eq(1)
end
end
end
| hafeild/alice | vendor/bundle/ruby/2.3.0/gems/sunspot-2.3.0/spec/integration/faceting_spec.rb | Ruby | bsd-3-clause | 18,069 |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// UNSUPPORTED: c++98, c++03, c++11, c++14
// UNSUPPORTED: c++filesystem-disabled
// <fstream>
// plate <class charT, class traits = char_traits<charT> >
// class basic_fstream
// void open(const filesystem::path& s, ios_base::openmode mode = ios_base::in|ios_base::out);
#include <fstream>
#include <filesystem>
#include <cassert>
#include "test_macros.h"
#include "platform_support.h"
int main(int, char**) {
std::filesystem::path p = get_temp_file_name();
{
std::fstream stream;
assert(!stream.is_open());
stream.open(p,
std::ios_base::in | std::ios_base::out | std::ios_base::trunc);
assert(stream.is_open());
double x = 0;
stream << 3.25;
stream.seekg(0);
stream >> x;
assert(x == 3.25);
}
std::remove(p.c_str());
{
std::wfstream stream;
assert(!stream.is_open());
stream.open(p,
std::ios_base::in | std::ios_base::out | std::ios_base::trunc);
assert(stream.is_open());
double x = 0;
stream << 3.25;
stream.seekg(0);
stream >> x;
assert(x == 3.25);
}
std::remove(p.c_str());
return 0;
}
| endlessm/chromium-browser | third_party/llvm/libcxx/test/std/input.output/file.streams/fstreams/fstream.members/open_path.pass.cpp | C++ | bsd-3-clause | 1,488 |
#!/usr/bin/php -q
<?php
// mirror.php MySQLicious implementation
// Mirrors Delicious bookmarks.
// v1.01 - 8/6/2006
// MySQL configuration.
$MySQL_Host = "localhost"; // Address of your MySQL server.
$MySQL_Database = "db"; // Name of the MySQL database you want to use.
$MySQL_Table = "delicious"; // Name of the MySQL table you want to put the Delicious bookmarks in.
$MySQL_Username = "username"; // MySQL username.
$MySQL_Password = "password"; // MySQL password.
// Delicious configuration.
$delicious_Username = "username"; // Delicious username.
$delicious_Password = "password"; // Delicious password.
$delicious_TagFilter = ""; // Tag to mirror. If left blank, all bookmarks will be mirrored.
// ---------------------------------------------------------------
// You shouldn't need to change anything below here.
// ---------------------------------------------------------------
// Import the MySQLicious code.
$currentDir = dirname(__FILE__)."/";
require $currentDir."MySQLicious.php";
// Initialize MySQLicious.
$delicious = new MySQLicious($MySQL_Host, $MySQL_Database, $MySQL_Username, $MySQL_Password);
// Un-comment the following line to turn on XML logging.
// This should only be necessary as a debugging measure.
//$delicious->logXml = true;
// Perform the mirroring.
$delicious->mirror($delicious_Username, $delicious_Password, $MySQL_Table, $delicious_TagFilter);
?>
| respencer/mysqlicious | mirror.php | PHP | bsd-3-clause | 1,400 |
"Yang/Wu's OEP implementation, in PyQuante."
from math import sqrt
import settings
from PyQuante.NumWrap import zeros,matrixmultiply,transpose,dot,identity,\
array,solve
from PyQuante.Ints import getbasis, getints, getJ,get2JmK,getK
from PyQuante.LA2 import geigh,mkdens,trace2,simx
from PyQuante.hartree_fock import get_fock
from PyQuante.CGBF import three_center
from PyQuante.optimize import fminBFGS
from PyQuante.fermi_dirac import get_efermi, get_fermi_occs,mkdens_occs,\
get_entropy,mkdens_fermi
import logging
logger = logging.getLogger("pyquante")
gradcall=0
class EXXSolver:
"EXXSolver(solver)"
def __init__(self,solver):
# Solver is a pointer to a HF or a DFT calculation that has
# already converged
self.solver = solver
self.bfs = self.solver.bfs
self.nbf = len(self.bfs)
self.S = self.solver.S
self.h = self.solver.h
self.Ints = self.solver.Ints
self.molecule = self.solver.molecule
self.nel = self.molecule.get_nel()
self.nclosed, self.nopen = self.molecule.get_closedopen()
self.Enuke = self.molecule.get_enuke()
self.norb = self.nbf
self.orbs = self.solver.orbs
self.orbe = self.solver.orbe
self.Gij = []
for g in xrange(self.nbf):
gmat = zeros((self.nbf,self.nbf),'d')
self.Gij.append(gmat)
gbf = self.bfs[g]
for i in xrange(self.nbf):
ibf = self.bfs[i]
for j in xrange(i+1):
jbf = self.bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
D0 = mkdens(self.orbs,0,self.nclosed)
J0 = getJ(self.Ints,D0)
Vfa = (2.0*(self.nel-1.0)/self.nel)*J0
self.H0 = self.h + Vfa
self.b = zeros(self.nbf,'d')
return
def iterate(self,**kwargs):
self.iter = 0
self.etemp = kwargs.get("etemp",settings.DFTElectronTemperature)
logging.debug("iter Energy <b|b>")
logging.debug("---- ------ -----")
self.b = fminBFGS(self.get_energy,self.b,self.get_gradient,logger=logging)
return
def get_energy(self,b):
self.iter += 1
self.Hoep = get_Hoep(b,self.H0,self.Gij)
self.orbe,self.orbs = geigh(self.Hoep,self.S)
if self.etemp:
self.D,self.entropy = mkdens_fermi(self.nel,self.orbe,self.orbs,
self.etemp)
else:
self.D = mkdens(self.orbs,0,self.nclosed)
self.entropy=0
self.F = get_fock(self.D,self.Ints,self.h)
self.energy = trace2(self.h+self.F,self.D)+self.Enuke + self.entropy
if self.iter == 1 or self.iter % 10 == 0:
logging.debug("%4d %10.5f %10.5f" % (self.iter,self.energy,dot(b,b)))
return self.energy
def get_gradient(self,b):
energy = self.get_energy(b)
Fmo = simx(self.F,self.orbs)
bp = zeros(self.nbf,'d')
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbs)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nclosed):
for a in xrange(self.nclosed,self.norb):
bp[g] = bp[g] + Fmo[i,a]*Gmo[i,a]/(self.orbe[i]-self.orbe[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return bp
class UEXXSolver:
"EXXSolver(solver)"
def __init__(self,solver):
# Solver is a pointer to a UHF calculation that has
# already converged
self.solver = solver
self.bfs = self.solver.bfs
self.nbf = len(self.bfs)
self.S = self.solver.S
self.h = self.solver.h
self.Ints = self.solver.Ints
self.molecule = self.solver.molecule
self.nel = self.molecule.get_nel()
self.nalpha, self.nbeta = self.molecule.get_alphabeta()
self.Enuke = self.molecule.get_enuke()
self.norb = self.nbf
self.orbsa = self.solver.orbsa
self.orbsb = self.solver.orbsb
self.orbea = self.solver.orbea
self.orbeb = self.solver.orbeb
self.Gij = []
for g in xrange(self.nbf):
gmat = zeros((self.nbf,self.nbf),'d')
self.Gij.append(gmat)
gbf = self.bfs[g]
for i in xrange(self.nbf):
ibf = self.bfs[i]
for j in xrange(i+1):
jbf = self.bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
D0 = mkdens(self.orbsa,0,self.nalpha)+mkdens(self.orbsb,0,self.nbeta)
J0 = getJ(self.Ints,D0)
Vfa = ((self.nel-1.)/self.nel)*J0
self.H0 = self.h + Vfa
self.b = zeros(2*self.nbf,'d')
return
def iterate(self,**kwargs):
self.etemp = kwargs.get("etemp",settings.DFTElectronTemperature)
self.iter = 0
logging.debug("iter Energy <b|b>")
logging.debug("---- ------ -----")
self.b = fminBFGS(self.get_energy,self.b,self.get_gradient,logger=logging)
return
def get_energy(self,b):
self.iter += 1
ba = b[:self.nbf]
bb = b[self.nbf:]
self.Hoepa = get_Hoep(ba,self.H0,self.Gij)
self.Hoepb = get_Hoep(bb,self.H0,self.Gij)
self.orbea,self.orbsa = geigh(self.Hoepa,self.S)
self.orbeb,self.orbsb = geigh(self.Hoepb,self.S)
if self.etemp:
self.Da,entropya = mkdens_fermi(2*self.nalpha,self.orbea,self.orbsa,
self.etemp)
self.Db,entropyb = mkdens_fermi(2*self.nbeta,self.orbeb,self.orbsb,
self.etemp)
self.entropy = 0.5*(entropya+entropyb)
else:
self.Da = mkdens(self.orbsa,0,self.nalpha)
self.Db = mkdens(self.orbsb,0,self.nbeta)
self.entropy=0
J = getJ(self.Ints,self.Da+self.Db)
Ka = getK(self.Ints,self.Da)
Kb = getK(self.Ints,self.Db)
self.Fa = self.h + J - Ka
self.Fb = self.h + J - Kb
self.energy = 0.5*(trace2(self.h+self.Fa,self.Da) +
trace2(self.h+self.Fb,self.Db))\
+ self.Enuke + self.entropy
if self.iter == 1 or self.iter % 10 == 0:
logging.debug("%4d %10.5f %10.5f" % (self.iter,self.energy,dot(b,b)))
return self.energy
def get_gradient(self,b):
energy = self.get_energy(b)
Fmoa = simx(self.Fa,self.orbsa)
Fmob = simx(self.Fb,self.orbsb)
bp = zeros(2*self.nbf,'d')
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbsa)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nalpha):
for a in xrange(self.nalpha,self.norb):
bp[g] += Fmoa[i,a]*Gmo[i,a]/(self.orbea[i]-self.orbea[a])
for g in xrange(self.nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = simx(self.Gij[g],self.orbsb)
# Now sum the appropriate terms to get the b gradient
for i in xrange(self.nbeta):
for a in xrange(self.nbeta,self.norb):
bp[self.nbf+g] += Fmob[i,a]*Gmo[i,a]/(self.orbeb[i]-self.orbeb[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return bp
def exx(atoms,orbs,**kwargs):
return oep_hf(atoms,orbs,**kwargs)
def oep_hf(atoms,orbs,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
See notes on options and other args in oep routine.
"""
return oep(atoms,orbs,get_exx_energy,get_exx_gradient,**kwargs)
def oep(atoms,orbs,energy_func,grad_func=None,**kwargs):
"""oep - Form the optimized effective potential for a given energy expression
oep(atoms,orbs,energy_func,grad_func=None,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
energy_func The function that returns the energy for the given method
grad_func The function that returns the force for the given method
Options
-------
verbose False Output terse information to stdout (default)
True Print out additional information
ETemp False Use ETemp value for finite temperature DFT (default)
float Use (float) for the electron temperature
bfs None The basis functions to use. List of CGBF's
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
verbose = kwargs.get('verbose')
ETemp = kwargs.get('ETemp',settings.DFTElectronTemperature)
opt_method = kwargs.get('opt_method',settings.OEPOptMethod)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms,**kwargs)
nel = atoms.get_nel()
nocc,nopen = atoms.get_closedopen()
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
bp = zeros(nbf,'d')
bvec = kwargs.get('bvec')
if bvec:
assert len(bvec) == npbf
b = array(bvec)
else:
b = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbs,0,nocc)
J0 = getJ(Ints,D0)
Vfa = (2*(nel-1.)/nel)*J0
H0 = h + Vfa
b = fminBFGS(energy_func,b,grad_func,
(nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij),
logger=logging)
energy,orbe,orbs = energy_func(b,nbf,nel,nocc,ETemp,Enuke,
S,h,Ints,H0,Gij,return_flag=1)
return energy,orbe,orbs
def get_exx_energy(b,nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij,**kwargs):
"""Computes the energy for the OEP/HF functional
Options:
return_flag 0 Just return the energy
1 Return energy, orbe, orbs
2 Return energy, orbe, orbs, F
"""
return_flag = kwargs.get('return_flag')
Hoep = get_Hoep(b,H0,Gij)
orbe,orbs = geigh(Hoep,S)
if ETemp:
efermi = get_efermi(nel,orbe,ETemp)
occs = get_fermi_occs(efermi,orbe,ETemp)
D = mkdens_occs(orbs,occs)
entropy = get_entropy(occs,ETemp)
else:
D = mkdens(orbs,0,nocc)
F = get_fock(D,Ints,h)
energy = trace2(h+F,D)+Enuke
if ETemp: energy += entropy
iref = nel/2
gap = 627.51*(orbe[iref]-orbe[iref-1])
logging.debug("EXX Energy, B, Gap: %10.5f %10.5f %10.5f"
% (energy,sqrt(dot(b,b)),gap))
#logging.debug("%s" % orbe)
if return_flag == 1:
return energy,orbe,orbs
elif return_flag == 2:
return energy,orbe,orbs,F
return energy
def get_exx_gradient(b,nbf,nel,nocc,ETemp,Enuke,S,h,Ints,H0,Gij,**kwargs):
"""Computes the gradient for the OEP/HF functional.
return_flag 0 Just return gradient
1 Return energy,gradient
2 Return energy,gradient,orbe,orbs
"""
# Dump the gradient every 10 steps so we can restart...
global gradcall
gradcall += 1
#if gradcall % 5 == 0: logging.debug("B vector:\n%s" % b)
# Form the new potential and the new orbitals
energy,orbe,orbs,F = get_exx_energy(b,nbf,nel,nocc,ETemp,Enuke,
S,h,Ints,H0,Gij,return_flag=2)
Fmo = matrixmultiply(transpose(orbs),matrixmultiply(F,orbs))
norb = nbf
bp = zeros(nbf,'d') # dE/db
for g in xrange(nbf):
# Transform Gij[g] to MOs. This is done over the whole
# space rather than just the parts we need. I can speed
# this up later by only forming the i,a elements required
Gmo = matrixmultiply(transpose(orbs),matrixmultiply(Gij[g],orbs))
# Now sum the appropriate terms to get the b gradient
for i in xrange(nocc):
for a in xrange(nocc,norb):
bp[g] = bp[g] + Fmo[i,a]*Gmo[i,a]/(orbe[i]-orbe[a])
#logging.debug("EXX Grad: %10.5f" % (sqrt(dot(bp,bp))))
return_flag = kwargs.get('return_flag')
if return_flag == 1:
return energy,bp
elif return_flag == 2:
return energy,bp,orbe,orbs
return bp
def get_Hoep(b,H0,Gij):
Hoep = H0
# Add the contributions from the gaussian potential functions
# H[ij] += b[g]*<ibf|g|jbf>
for g in xrange(len(b)):
Hoep = Hoep + b[g]*Gij[g]
return Hoep
# Here's a much faster way to do this. Haven't figured out how to
# do it for more generic functions like OEP-GVB
def oep_hf_an(atoms,orbs,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
Implementation of Wu and Yang's Approximate Newton Scheme
from J. Theor. Comp. Chem. 2, 627 (2003).
oep_hf(atoms,orbs,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
Options
-------
bfs None The basis functions to use for the wfn
pbfs None The basis functions to use for the pot
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
maxiter = kwargs.get('maxiter',settings.OEPIters)
tol = kwargs.get('tol',settings.OEPTolerance)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms)
nel = atoms.get_nel()
nocc,nopen = atoms.get_closedopen()
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
bp = zeros(nbf,'d')
bvec = kwargs.get('bvec')
if bvec:
assert len(bvec) == npbf
b = array(bvec)
else:
b = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbs,0,nocc)
J0 = getJ(Ints,D0)
Vfa = (2*(nel-1.)/nel)*J0
H0 = h + Vfa
b = zeros(nbf,'d')
eold = 0
for iter in xrange(maxiter):
Hoep = get_Hoep(b,H0,Gij)
orbe,orbs = geigh(Hoep,S)
D = mkdens(orbs,0,nocc)
Vhf = get2JmK(Ints,D)
energy = trace2(2*h+Vhf,D)+Enuke
if abs(energy-eold) < tol:
break
else:
eold = energy
logging.debug("OEP AN Opt: %d %f" % (iter,energy))
dV_ao = Vhf-Vfa
dV = matrixmultiply(transpose(orbs),matrixmultiply(dV_ao,orbs))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
Gkt = zeros((nbf,nbf),'d')
for k in xrange(nbf):
# This didn't work; in fact, it made things worse:
Gk = matrixmultiply(transpose(orbs),matrixmultiply(Gij[k],orbs))
for i in xrange(nocc):
for a in xrange(nocc,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbe[i]-orbe[a])
for l in xrange(nbf):
Gl = matrixmultiply(transpose(orbs),matrixmultiply(Gij[l],orbs))
for i in xrange(nocc):
for a in xrange(nocc,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbe[i]-orbe[a])
# This should actually be a pseudoinverse...
b = solve(X,c)
logger.info("Final OEP energy = %f" % energy)
return energy,orbe,orbs
def oep_uhf_an(atoms,orbsa,orbsb,**kwargs):
"""oep_hf - Form the optimized effective potential for HF exchange.
Implementation of Wu and Yang's Approximate Newton Scheme
from J. Theor. Comp. Chem. 2, 627 (2003).
oep_uhf(atoms,orbs,**kwargs)
atoms A Molecule object containing a list of the atoms
orbs A matrix of guess orbitals
Options
-------
bfs None The basis functions to use for the wfn
pbfs None The basis functions to use for the pot
basis_data None The basis data to use to construct bfs
integrals None The one- and two-electron integrals to use
If not None, S,h,Ints
"""
maxiter = kwargs.get('maxiter',settings.OEPIters)
tol = kwargs.get('tol',settings.OEPTolerance)
ETemp = kwargs.get('ETemp',settings.DFTElectronTemperature)
bfs = getbasis(atoms,**kwargs)
# The basis set for the potential can be set different from
# that used for the wave function
pbfs = kwargs.get('pbfs')
if not pbfs: pbfs = bfs
npbf = len(pbfs)
S,h,Ints = getints(bfs,atoms,**kwargs)
nel = atoms.get_nel()
nclosed,nopen = atoms.get_closedopen()
nalpha,nbeta = nclosed+nopen,nclosed
Enuke = atoms.get_enuke()
# Form the OEP using Yang/Wu, PRL 89 143002 (2002)
nbf = len(bfs)
norb = nbf
ba = zeros(npbf,'d')
bb = zeros(npbf,'d')
# Form and store all of the three-center integrals
# we're going to need.
# These are <ibf|gbf|jbf> (where 'bf' indicates basis func,
# as opposed to MO)
# N^3 storage -- obviously you don't want to do this for
# very large systems
Gij = []
for g in xrange(npbf):
gmat = zeros((nbf,nbf),'d')
Gij.append(gmat)
gbf = pbfs[g]
for i in xrange(nbf):
ibf = bfs[i]
for j in xrange(i+1):
jbf = bfs[j]
gij = three_center(ibf,gbf,jbf)
gmat[i,j] = gij
gmat[j,i] = gij
# Compute the Fermi-Amaldi potential based on the LDA density.
# We're going to form this matrix from the Coulombic matrix that
# arises from the input orbitals. D0 and J0 refer to the density
# matrix and corresponding Coulomb matrix
D0 = mkdens(orbsa,0,nalpha)+mkdens(orbsb,0,nbeta)
J0 = getJ(Ints,D0)
Vfa = ((nel-1.)/nel)*J0
H0 = h + Vfa
eold = 0
for iter in xrange(maxiter):
Hoepa = get_Hoep(ba,H0,Gij)
Hoepb = get_Hoep(ba,H0,Gij)
orbea,orbsa = geigh(Hoepa,S)
orbeb,orbsb = geigh(Hoepb,S)
if ETemp:
efermia = get_efermi(2*nalpha,orbea,ETemp)
occsa = get_fermi_occs(efermia,orbea,ETemp)
Da = mkdens_occs(orbsa,occsa)
efermib = get_efermi(2*nbeta,orbeb,ETemp)
occsb = get_fermi_occs(efermib,orbeb,ETemp)
Db = mkdens_occs(orbsb,occsb)
entropy = 0.5*(get_entropy(occsa,ETemp)+get_entropy(occsb,ETemp))
else:
Da = mkdens(orbsa,0,nalpha)
Db = mkdens(orbsb,0,nbeta)
J = getJ(Ints,Da) + getJ(Ints,Db)
Ka = getK(Ints,Da)
Kb = getK(Ints,Db)
energy = (trace2(2*h+J-Ka,Da)+trace2(2*h+J-Kb,Db))/2\
+Enuke
if ETemp: energy += entropy
if abs(energy-eold) < tol:
break
else:
eold = energy
logging.debug("OEP AN Opt: %d %f" % (iter,energy))
# Do alpha and beta separately
# Alphas
dV_ao = J-Ka-Vfa
dV = matrixmultiply(orbsa,matrixmultiply(dV_ao,transpose(orbsa)))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
for k in xrange(nbf):
Gk = matrixmultiply(orbsa,matrixmultiply(Gij[k],
transpose(orbsa)))
for i in xrange(nalpha):
for a in xrange(nalpha,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbea[i]-orbea[a])
for l in xrange(nbf):
Gl = matrixmultiply(orbsa,matrixmultiply(Gij[l],
transpose(orbsa)))
for i in xrange(nalpha):
for a in xrange(nalpha,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbea[i]-orbea[a])
# This should actually be a pseudoinverse...
ba = solve(X,c)
# Betas
dV_ao = J-Kb-Vfa
dV = matrixmultiply(orbsb,matrixmultiply(dV_ao,transpose(orbsb)))
X = zeros((nbf,nbf),'d')
c = zeros(nbf,'d')
for k in xrange(nbf):
Gk = matrixmultiply(orbsb,matrixmultiply(Gij[k],
transpose(orbsb)))
for i in xrange(nbeta):
for a in xrange(nbeta,norb):
c[k] += dV[i,a]*Gk[i,a]/(orbeb[i]-orbeb[a])
for l in xrange(nbf):
Gl = matrixmultiply(orbsb,matrixmultiply(Gij[l],
transpose(orbsb)))
for i in xrange(nbeta):
for a in xrange(nbeta,norb):
X[k,l] += Gk[i,a]*Gl[i,a]/(orbeb[i]-orbeb[a])
# This should actually be a pseudoinverse...
bb = solve(X,c)
logger.info("Final OEP energy = %f" % energy)
return energy,(orbea,orbeb),(orbsa,orbsb)
def test_old():
from PyQuante.Molecule import Molecule
from PyQuante.Ints import getbasis,getints
from PyQuante.hartree_fock import rhf
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
#mol = Molecule('HF',[('H',(0.,0.,0.)),('F',(0.,0.,0.898369))],
# units='Angstrom')
mol = Molecule('LiH',[(1,(0,0,1.5)),(3,(0,0,-1.5))],units = 'Bohr')
bfs = getbasis(mol)
S,h,Ints = getints(bfs,mol)
print "after integrals"
E_hf,orbe_hf,orbs_hf = rhf(mol,bfs=bfs,integrals=(S,h,Ints),DoAveraging=True)
print "RHF energy = ",E_hf
E_exx,orbe_exx,orbs_exx = exx(mol,orbs_hf,bfs=bfs,integrals=(S,h,Ints))
return
def test():
from PyQuante import Molecule, HFSolver, DFTSolver, UHFSolver
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
mol = Molecule("He",[(2,(0,0,0))])
solver = HFSolver(mol)
solver.iterate()
print "HF energy = ",solver.energy
dft_solver = DFTSolver(mol)
dft_solver.iterate()
print "DFT energy = ",dft_solver.energy
oep = EXXSolver(solver)
# Testing 0 temp
oep.iterate()
# Testing finite temp
oep.iterate(etemp=40000)
return
def utest():
from PyQuante import Molecule, HFSolver, DFTSolver, UHFSolver
logging.basicConfig(level=logging.DEBUG,format="%(message)s")
mol = Molecule("He",[(2,(0,0,0))])
mol = Molecule("Li",[(3,(0,0,0))],multiplicity=2)
solver = UHFSolver(mol)
solver.iterate()
print "HF energy = ",solver.energy
dft_solver = DFTSolver(mol)
dft_solver.iterate()
print "DFT energy = ",dft_solver.energy
oep = UEXXSolver(solver)
# Testing 0 temp
oep.iterate()
# Testing finite temp
oep.iterate(etemp=10000)
return
if __name__ == '__main__':
test()
utest()
| berquist/PyQuante | PyQuante/OEP.py | Python | bsd-3-clause | 25,427 |
def extractLittlebambooHomeBlog(item):
'''
Parser for 'littlebamboo.home.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('FW', 'Fortunate Wife', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractLittlebambooHomeBlog.py | Python | bsd-3-clause | 622 |
<?php
namespace Obsequium\Entity;
use Doctrine\ORM\Mapping as ORM;
use Zend\Stdlib\Hydrator\ClassMethods;
/**
* Escala
*
* @ORM\Table(name="escala", indexes={@ORM\Index(name="fk_escala_policial1_idx", columns={"pol_Codigo"})})
* @ORM\Entity
*/
class Escala
{
/**
* @var integer
*
* @ORM\Column(name="esc_Codigo", type="integer", nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $codigo;
/**
* @var string
*
* @ORM\Column(name="esc_tipo", type="string", length=45, nullable=false)
*/
private $tipo;
/**
* @var integer
*
* @ORM\Column(name="esc_qtd_folgas", type="integer", nullable=false)
*/
private $qtdfolgas;
/**
* @var boolean
*
* @ORM\Column(name="esc_escalado", type="boolean", nullable=false)
*/
private $escalado;
/**
* @var \DateTime
*
* @ORM\Column(name="esc_data", type="datetime", nullable=false)
*/
private $data;
/**
* @var boolean
*
* @ORM\Column(name="esc_apto", type="boolean", nullable=false)
*/
private $apto;
/**
* @var string
*
* @ORM\Column(name="esc_equipe", type="string", length=60, nullable=true)
*/
private $equipe;
/**
* @var string
*
* @ORM\Column(name="esc_posto", type="string", length=120, nullable=false)
*/
private $posto;
/**
* @var string
*
* @ORM\Column(name="esc_funcao", type="string", length=125, nullable=false)
*/
private $funcao;
/**
* @var string
*
* @ORM\Column(name="esc_uniforme", type="string", length=60, nullable=false)
*/
private $uniforme;
/**
* @var \Policial
*
* @ORM\ManyToOne(targetEntity="\Census\Entity\Policial")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="pol_Codigo", referencedColumnName="pol_Codigo")
* })
*/
private $polcodigo;
public function __construct(array $data) {
$hydrator = new ClassMethods();
$hydrator->hydrate($data, $this);
}
public function toArray()
{
$hydrator = new ClassMethods();
return $hydrator->extract($this);
}
public function getCodigo(){
return $this->codigo;
}
public function setCodigo($codigo){
$this->codigo = $codigo;
return $this;
}
public function getTipo(){
return $this->tipo;
}
public function setTipo($tipo){
$this->tipo = $tipo;
return $this;
}
public function getQtdfolgas(){
return $this->qtdfolgas;
}
public function setQtdfolgas($qtdfolgas){
$this->qtdfolgas = $qtdfolgas;
return $this;
}
public function getEscalado(){
return $this->escalado;
}
public function setEscalado($escalado){
$this->escalado = $escalado;
return $this;
}
public function getData(){
return $this->data;
}
public function setData($data){
$this->data = new \DateTime($data);
return $this;
}
public function getApto(){
return $this->apto;
}
public function setApto($apto){
$this->apto = $apto;
return $this;
}
public function getEquipe(){
return $this->equipe;
}
public function setEquipe($equipe){
$this->equipe = $equipe;
return $this;
}
public function getPosto(){
return $this->posto;
}
public function setPosto($posto){
$this->posto = $posto;
return $this;
}
public function getFuncao(){
return $this->funcao;
}
public function setFuncao($funcao){
$this->funcao = $funcao;
return $this;
}
public function getUniforme(){
return $this->uniforme;
}
public function setUniforme($uniforme){
$this->uniforme = $uniforme;
return $this;
}
public function getPolcodigo(){
return $this->polcodigo;
}
public function setPolcodigo($polcodigo){
$this->polcodigo = $polcodigo;
return $this;
}
}
| clauderlima/bpma | module/Obsequium/src/Obsequium/Entity/Escala.php | PHP | bsd-3-clause | 4,158 |
$( document ).ready(function() {
$('.switch').bootstrapSwitch({onText: 'вкл', offText: 'выкл'}).on('switchChange.bootstrapSwitch', function () {
var checkbox = $(this);
checkbox.bootstrapSwitch('disabled', true);
$.getJSON(checkbox.data('link') + '?' + 'status=' + (checkbox.is(':checked') ? 1 : 0) + '&id=' + checkbox.data('id'), function (response) {
if (response.result === 'error') {
alert(response.error);
} else {
checkbox.bootstrapSwitch('disabled', false);
}
});
});
}); | Per1phery/wholetthedogout | web/js/backend/main.js | JavaScript | bsd-3-clause | 607 |
<?php
namespace App\Http\Requests\User;
use App\User;
use Illuminate\Foundation\Http\FormRequest;
class UserEditRequest extends FormRequest
{
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return \Auth::check();
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'name' => 'required|string|exists:users',
'new-name' => 'required|string|unique:users,name'
];
}
}
| pierobot/mangapie | app/Http/Requests/User/UserEditRequest.php | PHP | bsd-3-clause | 618 |
import React from 'react';
import styled from '@emotion/styled';
import space from 'app/styles/space';
import {getListSymbolStyle, listSymbol} from './utils';
type Props = {
children: Array<React.ReactElement>;
symbol?: keyof typeof listSymbol | React.ReactElement;
className?: string;
};
const List = styled(({children, className, symbol, ...props}: Props) => {
const getWrapperComponent = () => {
switch (symbol) {
case 'numeric':
case 'colored-numeric':
return 'ol';
default:
return 'ul';
}
};
const Wrapper = getWrapperComponent();
return (
<Wrapper className={className} {...props}>
{!symbol || typeof symbol === 'string'
? children
: React.Children.map(children, child => {
if (!React.isValidElement(child)) {
return child;
}
return React.cloneElement(child as React.ReactElement, {
symbol,
});
})}
</Wrapper>
);
})`
margin: 0;
padding: 0;
list-style: none;
display: grid;
grid-gap: ${space(0.5)};
${p =>
typeof p.symbol === 'string' &&
listSymbol[p.symbol] &&
getListSymbolStyle(p.theme, p.symbol)}
`;
export default List;
| beeftornado/sentry | src/sentry/static/sentry/app/components/list/index.tsx | TypeScript | bsd-3-clause | 1,235 |
/* Copyright (c) 2006, NIF File Format Library and Tools
All rights reserved. Please see niflib.h for license. */
//-----------------------------------NOTICE----------------------------------//
// Some of this file is automatically filled in by a Python script. Only //
// add custom code in the designated areas or it will be overwritten during //
// the next update. //
//-----------------------------------NOTICE----------------------------------//
//--BEGIN FILE HEAD CUSTOM CODE--//
//--END CUSTOM CODE--//
#include "../../include/FixLink.h"
#include "../../include/ObjectRegistry.h"
#include "../../include/NIF_IO.h"
#include "../../include/obj/BSDismemberSkinInstance.h"
#include "../../include/gen/BodyPartList.h"
using namespace Niflib;
//Definition of TYPE constant
const Type BSDismemberSkinInstance::TYPE("BSDismemberSkinInstance", &NiSkinInstance::TYPE);
BSDismemberSkinInstance::BSDismemberSkinInstance() : numPartitions((int) 0)
{
//--BEGIN CONSTRUCTOR CUSTOM CODE--//
//--END CUSTOM CODE--//
}
BSDismemberSkinInstance::~BSDismemberSkinInstance()
{
//--BEGIN DESTRUCTOR CUSTOM CODE--//
//--END CUSTOM CODE--//
}
const Type & BSDismemberSkinInstance::GetType() const
{
return TYPE;
}
NiObject * BSDismemberSkinInstance::Create()
{
return new BSDismemberSkinInstance;
}
void BSDismemberSkinInstance::Read(istream& in, list<unsigned int> & link_stack, const NifInfo & info)
{
//--BEGIN PRE-READ CUSTOM CODE--//
//--END CUSTOM CODE--//
NiSkinInstance::Read(in, link_stack, info);
NifStream(numPartitions, in, info);
partitions.resize(numPartitions);
for(unsigned int i1 = 0; i1 < partitions.size(); i1++)
{
NifStream(partitions[i1].partFlag, in, info);
NifStream(partitions[i1].bodyPart, in, info);
};
//--BEGIN POST-READ CUSTOM CODE--//
//--END CUSTOM CODE--//
}
void BSDismemberSkinInstance::Write(ostream& out, const map<NiObjectRef, unsigned int> & link_map, list<NiObject *> & missing_link_stack, const NifInfo & info) const
{
//--BEGIN PRE-WRITE CUSTOM CODE--//
//--END CUSTOM CODE--//
NiSkinInstance::Write(out, link_map, missing_link_stack, info);
numPartitions = (int) (partitions.size());
NifStream(numPartitions, out, info);
for(unsigned int i1 = 0; i1 < partitions.size(); i1++)
{
NifStream(partitions[i1].partFlag, out, info);
NifStream(partitions[i1].bodyPart, out, info);
};
//--BEGIN POST-WRITE CUSTOM CODE--//
//--END CUSTOM CODE--//
}
std::string BSDismemberSkinInstance::asString(bool verbose) const
{
//--BEGIN PRE-STRING CUSTOM CODE--//
//--END CUSTOM CODE--//
stringstream out;
unsigned int array_output_count = 0;
out << NiSkinInstance::asString(verbose);
numPartitions = (int) (partitions.size());
out << " Num Partitions: " << numPartitions << endl;
array_output_count = 0;
for(unsigned int i1 = 0; i1 < partitions.size(); i1++)
{
if(!verbose && (array_output_count > MAXARRAYDUMP))
{
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
out << " Part Flag: " << partitions[i1].partFlag << endl;
out << " Body Part: " << partitions[i1].bodyPart << endl;
};
return out.str();
//--BEGIN POST-STRING CUSTOM CODE--//
//--END CUSTOM CODE--//
}
void BSDismemberSkinInstance::FixLinks(const map<unsigned int, NiObjectRef> & objects, list<unsigned int> & link_stack, list<NiObjectRef> & missing_link_stack, const NifInfo & info)
{
//--BEGIN PRE-FIXLINKS CUSTOM CODE--//
//--END CUSTOM CODE--//
NiSkinInstance::FixLinks(objects, link_stack, missing_link_stack, info);
//--BEGIN POST-FIXLINKS CUSTOM CODE--//
//--END CUSTOM CODE--//
}
std::list<NiObjectRef> BSDismemberSkinInstance::GetRefs() const
{
list<Ref<NiObject> > refs;
refs = NiSkinInstance::GetRefs();
return refs;
}
std::list<NiObject *> BSDismemberSkinInstance::GetPtrs() const
{
list<NiObject *> ptrs;
ptrs = NiSkinInstance::GetPtrs();
return ptrs;
}
/***Begin Example Naive Implementation****
vector<BodyPartList > BSDismemberSkinInstance::GetPartitions() const {
return partitions;
}
void BSDismemberSkinInstance::SetPartitions( const vector<BodyPartList >& value ) {
partitions = value;
}
****End Example Naive Implementation***/
//--BEGIN MISC CUSTOM CODE--//
vector<BodyPartList > BSDismemberSkinInstance::GetPartitions() const
{
return partitions;
}
void BSDismemberSkinInstance::SetPartitions(const vector<BodyPartList >& value)
{
partitions = value;
}
//--END CUSTOM CODE--// | BlazesRus/niflib | src/obj/BSDismemberSkinInstance.cpp | C++ | bsd-3-clause | 4,491 |
package edu.umn.cs.crisys.topsort;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
// TODO: currently only sorts the elements in the initial list,
// even if the dependencies contain other elements; this matches
// my current purpose, but may not do what you want!
public class TopologicalSort {
public static <T2 extends DependsOn<T2>> void topoSortElement(T2 elem, Map<T2, Color> assoc,
List<T2> sorted) throws CyclicException {
Color c = assoc.get(elem);
if (c == null) {
assoc.put(elem, Color.GRAY);
if (elem.dependencies() != null) {
for (T2 dep : elem.dependencies()) {
// Don't consider elements outside of the initial set to sort.
topoSortElement(dep, assoc, sorted);
}
}
assoc.put(elem, Color.BLACK);
sorted.add(elem);
} else if (c == Color.GRAY) {
throw new CyclicException("Cycle detected during topological sort.");
}
};
public static <T2 extends DependsOn<T2>> List<T2> performTopologicalSort(Collection<T2> elements)
throws CyclicException {
Map<T2, Color> colorAssoc = new HashMap<T2, Color>();
List<T2> sorted = new ArrayList<T2>(elements.size());
for (T2 e : elements) {
topoSortElement(e, colorAssoc, sorted);
}
return sorted;
}
public static <T2 extends DependsOn<T2>> List<T2> performElementsOnlyTopologicalSort(Collection<T2> elements)
throws CyclicException {
// start from the set under consideration.
final Set<T2> elemSet = new HashSet<T2>(elements);
final List<T2> allTypesSorted = performTopologicalSort(elements);
List<T2> elementsSorted = new ArrayList<T2>();
for (T2 e : allTypesSorted) {
if (elemSet.contains(e)) {
elementsSorted.add(e);
}
}
return elementsSorted;
}
};
// check for duplicates...this is currently inefficient!
/*
boolean found = false;
for (T2 other: sorted) {
if (other.equals(elem)) {
found = true;
}
}
if (!found) {
sorted.add(elem);
}
*/
| smaccm/smaccm | fm-workbench/trusted-build/edu.umn.cs.crisys.tb/src/edu/umn/cs/crisys/topsort/TopologicalSort.java | Java | bsd-3-clause | 2,087 |
<?php
namespace Cliente;
return array(
'router' => array(
'routes' => array(
'cliente' => array(
'type' => 'Literal',
'options' => array(
'route' => '/cliente',
'defaults' => array(
'__NAMESPACE__' => 'Cliente\Controller',
'controller' => 'Index',
'action' => 'index',
),
),
'may_terminate' => true,
'child_routes' => array(
'default' => array(
'type' => 'Segment',
'options' => array(
'route' => '[/:action][/:id][/page/:page]',
'constraints' => array(
'action' => '(?!\bpage\b)[a-zA-Z][a-zA-Z0-9_-]*',
'id' => '[0-9]+',
'page' => '[0-9]+'
),
'defaults' => array(
'controller' => 'Cliente\Controller\Index',
'action' => 'index',
'page' => 1
),
),
),
),
),
),
),
'service_manager' => array(
'abstract_factories' => array(
'Zend\Cache\Service\StorageCacheAbstractServiceFactory',
'Zend\Log\LoggerAbstractServiceFactory',
),
'aliases' => array(
'translator' => 'MvcTranslator',
),
),
'controllers' => array(
'invokables' => array(
'Cliente\Controller\Index' => 'Cliente\Controller\IndexController'
),
),
'view_manager' => array(
'display_not_found_reason' => true,
'display_exceptions' => true,
'doctype' => 'HTML5',
'not_found_template' => 'error/404',
'exception_template' => 'error/index',
'template_map' => array(
'cliente/index/index' => __DIR__ . '/../view/cliente/index/index.phtml',
'error/404' => __DIR__ . '/../view/error/404.phtml',
'error/index' => __DIR__ . '/../view/error/index.phtml',
),
'template_path_stack' => array(
__DIR__ . '/../view',
),
),
'doctrine' => array(
'driver' => array(
__NAMESPACE__ . '_driver' => array(
'class' => 'Doctrine\ORM\Mapping\Driver\AnnotationDriver',
'cache' => 'array',
'paths' => array(__DIR__ . '/../src/' . __NAMESPACE__ . '/Entity')
),
'orm_default' => array(
'drivers' => array(
__NAMESPACE__ . '\Entity' => __NAMESPACE__ . '_driver'
),
),
),
),
);
| junaooaks/prosoftmk | module/Cliente/config/module.config.php | PHP | bsd-3-clause | 2,935 |
// Copyright 2015 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package namespace
import (
"io"
"strings"
"sync"
"v.io/v23"
"v.io/v23/context"
"v.io/v23/glob"
"v.io/v23/naming"
"v.io/v23/options"
"v.io/v23/rpc"
"v.io/v23/verror"
"v.io/x/ref/lib/apilog"
)
type tracks struct {
m sync.Mutex
places map[string]struct{}
}
func (tr *tracks) beenThereDoneThat(servers []naming.MountedServer, pstr string) bool {
tr.m.Lock()
defer tr.m.Unlock()
found := false
for _, s := range servers {
x := naming.JoinAddressName(s.Server, "") + "!" + pstr
if _, ok := tr.places[x]; ok {
found = true
}
tr.places[x] = struct{}{}
}
return found
}
// task is a sub-glob that has to be performed against a mount table. Tasks are
// done in parrallel to speed up the glob.
type task struct {
pattern *glob.Glob // pattern to match
er *naming.GlobError // error for that particular point in the name space
me *naming.MountEntry // server to match at
error error // any error performing this task
depth int // number of mount tables traversed recursively
}
// globAtServer performs a Glob on the servers at a mount point. It cycles through the set of
// servers until it finds one that replies.
func (ns *namespace) globAtServer(ctx *context.T, t *task, replies chan *task, tr *tracks, opts []rpc.CallOpt) {
defer func() {
if t.error == nil {
replies <- nil
} else {
replies <- t
}
}()
client := v23.GetClient(ctx)
pstr := t.pattern.String()
ctx.VI(2).Infof("globAtServer(%v, %v)", *t.me, pstr)
// If there are no servers to call, this isn't a mount point. No sense
// trying to call servers that aren't there.
if len(t.me.Servers) == 0 {
t.error = nil
return
}
// If we've been there before with the same request, give up.
if tr.beenThereDoneThat(t.me.Servers, pstr) {
t.error = nil
return
}
// t.me.Name has already been matched at this point to so don't pass it to the Call. Kind of sleazy to do this
// but it avoids making yet another copy of the MountEntry.
on := t.me.Name
t.me.Name = ""
timeoutCtx, cancel := withTimeout(ctx)
defer cancel()
call, err := client.StartCall(timeoutCtx, "", rpc.GlobMethod, []interface{}{pstr}, append(opts, options.Preresolved{t.me})...)
t.me.Name = on
if err != nil {
t.error = err
return
}
// At this point we're commited to the server that answered the call
// first. Cycle through all replies from that server.
for {
// If the mount table returns an error, we're done. Send the task to the channel
// including the error. This terminates the task.
var gr naming.GlobReply
err := call.Recv(&gr)
if err == io.EOF {
break
}
if err != nil {
t.error = err
return
}
var x *task
switch v := gr.(type) {
case naming.GlobReplyEntry:
// Convert to the ever so slightly different name.MountTable version of a MountEntry
// and add it to the list.
x = &task{
me: &naming.MountEntry{
Name: naming.Join(t.me.Name, v.Value.Name),
Servers: v.Value.Servers,
ServesMountTable: v.Value.ServesMountTable,
IsLeaf: v.Value.IsLeaf,
},
depth: t.depth + 1,
}
case naming.GlobReplyError:
// Pass on the error.
x = &task{
er: &v.Value,
depth: t.depth + 1,
}
}
// x.depth is the number of servers we've walked through since we've gone
// recursive (i.e. with pattern length of 0). Limit the depth of globs.
// TODO(p): return an error?
if t.pattern.Len() == 0 {
if x.depth > ns.maxRecursiveGlobDepth {
continue
}
}
replies <- x
}
t.error = call.Finish()
return
}
// depth returns the directory depth of a given name. It is used to pick off the unsatisfied part of the pattern.
func depth(name string) int {
name = strings.Trim(naming.Clean(name), "/")
if name == "" {
return 0
}
return strings.Count(name, "/") + 1
}
// globLoop fires off a go routine for each server and reads backs replies.
func (ns *namespace) globLoop(ctx *context.T, e *naming.MountEntry, prefix string, pattern *glob.Glob, reply chan naming.GlobReply, tr *tracks, opts []rpc.CallOpt) {
defer close(reply)
// Provide enough buffers to avoid too much switching between the readers and the writers.
// This size is just a guess.
replies := make(chan *task, 100)
defer close(replies)
// Push the first task into the channel to start the ball rolling. This task has the
// root of the search and the full pattern. It will be the first task fired off in the for
// loop that follows.
replies <- &task{me: e, pattern: pattern}
replies <- nil
inFlight := 1
// Perform a parallel search of the name graph. Each task will send what it learns
// on the replies channel. If the reply is a mount point and the pattern is not completely
// fulfilled, a new task will be fired off to handle it.
for inFlight != 0 {
t := <-replies
// A nil reply represents a successfully terminated task.
// If no tasks are running, return.
if t == nil {
inFlight--
continue
}
// We want to output this entry if there was a real error other than
// "not a mount table".
//
// An error reply is also a terminated task.
// If no tasks are running, return.
if t.error != nil {
if !notAnMT(t.error) {
reply <- &naming.GlobReplyError{Value: naming.GlobError{Name: naming.Join(prefix, t.me.Name), Error: t.error}}
}
inFlight--
continue
}
// If this is just an error from the mount table, pass it on.
if t.er != nil {
x := *t.er
x.Name = naming.Join(prefix, x.Name)
reply <- &naming.GlobReplyError{Value: x}
continue
}
// Get the pattern elements below the current path.
suffix := pattern
for i := depth(t.me.Name) - 1; i >= 0; i-- {
suffix = suffix.Tail()
}
// If we've satisfied the request and this isn't the root,
// reply to the caller.
if suffix.Len() == 0 && t.depth != 0 {
x := *t.me
x.Name = naming.Join(prefix, x.Name)
reply <- &naming.GlobReplyEntry{Value: x}
}
// If the pattern is finished (so we're only querying about the root on the
// remote server) and the server is not another MT, then we needn't send the
// query on since we know the server will not supply a new address for the
// current name.
if suffix.Empty() {
if !t.me.ServesMountTable {
continue
}
}
// If this is restricted recursive and not a mount table, don't descend into it.
if suffix.Restricted() && suffix.Len() == 0 && !t.me.ServesMountTable {
continue
}
// Perform a glob at the next server.
inFlight++
t.pattern = suffix
go ns.globAtServer(ctx, t, replies, tr, opts)
}
}
// Glob implements naming.MountTable.Glob.
func (ns *namespace) Glob(ctx *context.T, pattern string, opts ...naming.NamespaceOpt) (<-chan naming.GlobReply, error) {
defer apilog.LogCallf(ctx, "pattern=%.10s...,opts...=%v", pattern, opts)(ctx, "") // gologcop: DO NOT EDIT, MUST BE FIRST STATEMENT
// Root the pattern. If we have no servers to query, give up.
e, patternWasRooted := ns.rootMountEntry(pattern)
if len(e.Servers) == 0 {
return nil, verror.New(naming.ErrNoMountTable, ctx)
}
// If the name doesn't parse, give up.
g, err := glob.Parse(e.Name)
if err != nil {
return nil, err
}
tr := &tracks{places: make(map[string]struct{})}
// If pattern was already rooted, make sure we tack that root
// onto all returned names. Otherwise, just return the relative
// name.
var prefix string
if patternWasRooted {
prefix = e.Servers[0].Server
}
e.Name = ""
reply := make(chan naming.GlobReply, 100)
go ns.globLoop(ctx, e, prefix, g, reply, tr, getCallOpts(opts))
return reply, nil
}
| vanadium/go.ref | runtime/internal/naming/namespace/glob.go | GO | bsd-3-clause | 7,783 |
package gwlpr.protocol.gameserver.outbound;
import gwlpr.protocol.serialization.GWMessage;
/**
* Auto-generated by PacketCodeGen.
* Will display text of hard coded string IDs in the
* chat box
*
*/
public final class P455_InformationChat
extends GWMessage
{
private short text;
@Override
public short getHeader() {
return 455;
}
public void setText(short text) {
this.text = text;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("P455_InformationChat[");
sb.append("text=").append(this.text).append("]");
return sb.toString();
}
}
| GameRevision/GWLP-R | protocol/src/main/java/gwlpr/protocol/gameserver/outbound/P455_InformationChat.java | Java | bsd-3-clause | 655 |
// This is a generated file. Not intended for manual editing.
package io.v.vdl.psi.impl;
import java.util.List;
import org.jetbrains.annotations.*;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.util.PsiTreeUtil;
import static io.v.vdl.psi.VdlTypes.*;
import io.v.vdl.psi.*;
public class VdlFunctionLitImpl extends VdlExpressionImpl implements VdlFunctionLit {
public VdlFunctionLitImpl(ASTNode node) {
super(node);
}
public void accept(@NotNull VdlVisitor visitor) {
visitor.visitFunctionLit(this);
}
public void accept(@NotNull PsiElementVisitor visitor) {
if (visitor instanceof VdlVisitor) accept((VdlVisitor)visitor);
else super.accept(visitor);
}
@Override
@Nullable
public VdlBlock getBlock() {
return findChildByClass(VdlBlock.class);
}
@Override
@Nullable
public VdlSignature getSignature() {
return findChildByClass(VdlSignature.class);
}
@Override
@NotNull
public PsiElement getFunc() {
return findNotNullChildByType(FUNC);
}
}
| vanadium/intellij-vdl-plugin | gen/io/v/vdl/psi/impl/VdlFunctionLitImpl.java | Java | bsd-3-clause | 1,102 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/quic/mock_encrypter.h"
#include "net/third_party/quic/core/quic_data_writer.h"
#include "net/third_party/quic/core/quic_utils.h"
#include "starboard/memory.h"
using quic::DiversificationNonce;
using quic::Perspective;
using quic::QuicPacketNumber;
using quic::QuicStringPiece;
using quic::QuicTransportVersion;
namespace net {
MockEncrypter::MockEncrypter(Perspective perspective) {}
bool MockEncrypter::SetKey(QuicStringPiece key) {
return key.empty();
}
bool MockEncrypter::SetNoncePrefix(QuicStringPiece nonce_prefix) {
return nonce_prefix.empty();
}
bool MockEncrypter::SetIV(QuicStringPiece iv) {
return iv.empty();
}
bool MockEncrypter::EncryptPacket(uint64_t /*packet_number*/,
QuicStringPiece associated_data,
QuicStringPiece plaintext,
char* output,
size_t* output_length,
size_t max_output_length) {
if (max_output_length < plaintext.size()) {
return false;
}
memcpy(output, plaintext.data(), plaintext.length());
*output_length = plaintext.size();
return true;
}
size_t MockEncrypter::GetKeySize() const {
return 0;
}
size_t MockEncrypter::GetNoncePrefixSize() const {
return 0;
}
size_t MockEncrypter::GetIVSize() const {
return 0;
}
size_t MockEncrypter::GetMaxPlaintextSize(size_t ciphertext_size) const {
return ciphertext_size;
}
size_t MockEncrypter::GetCiphertextSize(size_t plaintext_size) const {
return plaintext_size;
}
QuicStringPiece MockEncrypter::GetKey() const {
return QuicStringPiece();
}
QuicStringPiece MockEncrypter::GetNoncePrefix() const {
return QuicStringPiece();
}
} // namespace net
| youtube/cobalt | net/quic/mock_encrypter.cc | C++ | bsd-3-clause | 1,922 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Maui.Trading.Reporting
{
public interface ITableView
{
IEnumerable<TableRow> Rows { get; }
}
}
| bg0jr/Maui | src/Trading/Maui.Trading/Reporting/ITableView.cs | C# | bsd-3-clause | 227 |
from __future__ import print_function, division
from sympy.core import S, sympify, cacheit, pi, I, Rational
from sympy.core.add import Add
from sympy.core.function import Function, ArgumentIndexError, _coeff_isneg
from sympy.functions.combinatorial.factorials import factorial, RisingFactorial
from sympy.functions.elementary.exponential import exp, log, match_real_imag
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.integers import floor
from sympy import pi, Eq
from sympy.logic import Or, And
from sympy.core.logic import fuzzy_or, fuzzy_and, fuzzy_bool
def _rewrite_hyperbolics_as_exp(expr):
expr = sympify(expr)
return expr.xreplace({h: h.rewrite(exp)
for h in expr.atoms(HyperbolicFunction)})
###############################################################################
########################### HYPERBOLIC FUNCTIONS ##############################
###############################################################################
class HyperbolicFunction(Function):
"""
Base class for hyperbolic functions.
See Also
========
sinh, cosh, tanh, coth
"""
unbranched = True
def _peeloff_ipi(arg):
"""
Split ARG into two parts, a "rest" and a multiple of I*pi/2.
This assumes ARG to be an Add.
The multiple of I*pi returned in the second position is always a Rational.
Examples
========
>>> from sympy.functions.elementary.hyperbolic import _peeloff_ipi as peel
>>> from sympy import pi, I
>>> from sympy.abc import x, y
>>> peel(x + I*pi/2)
(x, I*pi/2)
>>> peel(x + I*2*pi/3 + I*pi*y)
(x + I*pi*y + I*pi/6, I*pi/2)
"""
for a in Add.make_args(arg):
if a == S.Pi*S.ImaginaryUnit:
K = S.One
break
elif a.is_Mul:
K, p = a.as_two_terms()
if p == S.Pi*S.ImaginaryUnit and K.is_Rational:
break
else:
return arg, S.Zero
m1 = (K % S.Half)*S.Pi*S.ImaginaryUnit
m2 = K*S.Pi*S.ImaginaryUnit - m1
return arg - m2, m2
class sinh(HyperbolicFunction):
r"""
The hyperbolic sine function, `\frac{e^x - e^{-x}}{2}`.
* sinh(x) -> Returns the hyperbolic sine of x
See Also
========
cosh, tanh, asinh
"""
def fdiff(self, argindex=1):
"""
Returns the first derivative of this function.
"""
if argindex == 1:
return cosh(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return asinh
@classmethod
def eval(cls, arg):
from sympy import sin
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.NegativeInfinity
elif arg.is_zero:
return S.Zero
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.NaN
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return S.ImaginaryUnit * sin(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_Add:
x, m = _peeloff_ipi(arg)
if m:
return sinh(m)*cosh(x) + cosh(m)*sinh(x)
if arg.is_zero:
return S.Zero
if arg.func == asinh:
return arg.args[0]
if arg.func == acosh:
x = arg.args[0]
return sqrt(x - 1) * sqrt(x + 1)
if arg.func == atanh:
x = arg.args[0]
return x/sqrt(1 - x**2)
if arg.func == acoth:
x = arg.args[0]
return 1/(sqrt(x - 1) * sqrt(x + 1))
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
"""
Returns the next term in the Taylor series expansion.
"""
if n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 2:
p = previous_terms[-2]
return p * x**2 / (n*(n - 1))
else:
return x**(n) / factorial(n)
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def as_real_imag(self, deep=True, **hints):
"""
Returns this function as a complex coordinate.
"""
from sympy import cos, sin
if self.args[0].is_extended_real:
if deep:
hints['complex'] = False
return (self.expand(deep, **hints), S.Zero)
else:
return (self, S.Zero)
if deep:
re, im = self.args[0].expand(deep, **hints).as_real_imag()
else:
re, im = self.args[0].as_real_imag()
return (sinh(re)*cos(im), cosh(re)*sin(im))
def _eval_expand_complex(self, deep=True, **hints):
re_part, im_part = self.as_real_imag(deep=deep, **hints)
return re_part + im_part*S.ImaginaryUnit
def _eval_expand_trig(self, deep=True, **hints):
if deep:
arg = self.args[0].expand(deep, **hints)
else:
arg = self.args[0]
x = None
if arg.is_Add: # TODO, implement more if deep stuff here
x, y = arg.as_two_terms()
else:
coeff, terms = arg.as_coeff_Mul(rational=True)
if coeff is not S.One and coeff.is_Integer and terms is not S.One:
x = terms
y = (coeff - 1)*x
if x is not None:
return (sinh(x)*cosh(y) + sinh(y)*cosh(x)).expand(trig=True)
return sinh(arg)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
return (exp(arg) - exp(-arg)) / 2
def _eval_rewrite_as_exp(self, arg, **kwargs):
return (exp(arg) - exp(-arg)) / 2
def _eval_rewrite_as_cosh(self, arg, **kwargs):
return -S.ImaginaryUnit*cosh(arg + S.Pi*S.ImaginaryUnit/2)
def _eval_rewrite_as_tanh(self, arg, **kwargs):
tanh_half = tanh(S.Half*arg)
return 2*tanh_half/(1 - tanh_half**2)
def _eval_rewrite_as_coth(self, arg, **kwargs):
coth_half = coth(S.Half*arg)
return 2*coth_half/(coth_half**2 - 1)
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return arg
else:
return self.func(arg)
def _eval_is_real(self):
arg = self.args[0]
if arg.is_real:
return True
# if `im` is of the form n*pi
# else, check if it is a number
re, im = arg.as_real_imag()
return (im%pi).is_zero
def _eval_is_extended_real(self):
if self.args[0].is_extended_real:
return True
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return self.args[0].is_positive
def _eval_is_negative(self):
if self.args[0].is_extended_real:
return self.args[0].is_negative
def _eval_is_finite(self):
arg = self.args[0]
return arg.is_finite
def _eval_is_zero(self):
arg = self.args[0]
if arg.is_zero:
return True
class cosh(HyperbolicFunction):
r"""
The hyperbolic cosine function, `\frac{e^x + e^{-x}}{2}`.
* cosh(x) -> Returns the hyperbolic cosine of x
See Also
========
sinh, tanh, acosh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return sinh(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
from sympy import cos
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.Infinity
elif arg.is_zero:
return S.One
elif arg.is_negative:
return cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.NaN
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return cos(i_coeff)
else:
if _coeff_isneg(arg):
return cls(-arg)
if arg.is_Add:
x, m = _peeloff_ipi(arg)
if m:
return cosh(m)*cosh(x) + sinh(m)*sinh(x)
if arg.is_zero:
return S.One
if arg.func == asinh:
return sqrt(1 + arg.args[0]**2)
if arg.func == acosh:
return arg.args[0]
if arg.func == atanh:
return 1/sqrt(1 - arg.args[0]**2)
if arg.func == acoth:
x = arg.args[0]
return x/(sqrt(x - 1) * sqrt(x + 1))
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n < 0 or n % 2 == 1:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 2:
p = previous_terms[-2]
return p * x**2 / (n*(n - 1))
else:
return x**(n)/factorial(n)
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def as_real_imag(self, deep=True, **hints):
from sympy import cos, sin
if self.args[0].is_extended_real:
if deep:
hints['complex'] = False
return (self.expand(deep, **hints), S.Zero)
else:
return (self, S.Zero)
if deep:
re, im = self.args[0].expand(deep, **hints).as_real_imag()
else:
re, im = self.args[0].as_real_imag()
return (cosh(re)*cos(im), sinh(re)*sin(im))
def _eval_expand_complex(self, deep=True, **hints):
re_part, im_part = self.as_real_imag(deep=deep, **hints)
return re_part + im_part*S.ImaginaryUnit
def _eval_expand_trig(self, deep=True, **hints):
if deep:
arg = self.args[0].expand(deep, **hints)
else:
arg = self.args[0]
x = None
if arg.is_Add: # TODO, implement more if deep stuff here
x, y = arg.as_two_terms()
else:
coeff, terms = arg.as_coeff_Mul(rational=True)
if coeff is not S.One and coeff.is_Integer and terms is not S.One:
x = terms
y = (coeff - 1)*x
if x is not None:
return (cosh(x)*cosh(y) + sinh(x)*sinh(y)).expand(trig=True)
return cosh(arg)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
return (exp(arg) + exp(-arg)) / 2
def _eval_rewrite_as_exp(self, arg, **kwargs):
return (exp(arg) + exp(-arg)) / 2
def _eval_rewrite_as_sinh(self, arg, **kwargs):
return -S.ImaginaryUnit*sinh(arg + S.Pi*S.ImaginaryUnit/2)
def _eval_rewrite_as_tanh(self, arg, **kwargs):
tanh_half = tanh(S.Half*arg)**2
return (1 + tanh_half)/(1 - tanh_half)
def _eval_rewrite_as_coth(self, arg, **kwargs):
coth_half = coth(S.Half*arg)**2
return (coth_half + 1)/(coth_half - 1)
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return S.One
else:
return self.func(arg)
def _eval_is_real(self):
arg = self.args[0]
# `cosh(x)` is real for real OR purely imaginary `x`
if arg.is_real or arg.is_imaginary:
return True
# cosh(a+ib) = cos(b)*cosh(a) + i*sin(b)*sinh(a)
# the imaginary part can be an expression like n*pi
# if not, check if the imaginary part is a number
re, im = arg.as_real_imag()
return (im%pi).is_zero
def _eval_is_positive(self):
# cosh(x+I*y) = cos(y)*cosh(x) + I*sin(y)*sinh(x)
# cosh(z) is positive iff it is real and the real part is positive.
# So we need sin(y)*sinh(x) = 0 which gives x=0 or y=n*pi
# Case 1 (y=n*pi): cosh(z) = (-1)**n * cosh(x) -> positive for n even
# Case 2 (x=0): cosh(z) = cos(y) -> positive when cos(y) is positive
z = self.args[0]
x, y = z.as_real_imag()
ymod = y % (2*pi)
yzero = ymod.is_zero
# shortcut if ymod is zero
if yzero:
return True
xzero = x.is_zero
# shortcut x is not zero
if xzero is False:
return yzero
return fuzzy_or([
# Case 1:
yzero,
# Case 2:
fuzzy_and([
xzero,
fuzzy_or([ymod < pi/2, ymod > 3*pi/2])
])
])
def _eval_is_nonnegative(self):
z = self.args[0]
x, y = z.as_real_imag()
ymod = y % (2*pi)
yzero = ymod.is_zero
# shortcut if ymod is zero
if yzero:
return True
xzero = x.is_zero
# shortcut x is not zero
if xzero is False:
return yzero
return fuzzy_or([
# Case 1:
yzero,
# Case 2:
fuzzy_and([
xzero,
fuzzy_or([ymod <= pi/2, ymod >= 3*pi/2])
])
])
def _eval_is_finite(self):
arg = self.args[0]
return arg.is_finite
class tanh(HyperbolicFunction):
r"""
The hyperbolic tangent function, `\frac{\sinh(x)}{\cosh(x)}`.
* tanh(x) -> Returns the hyperbolic tangent of x
See Also
========
sinh, cosh, atanh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return S.One - tanh(self.args[0])**2
else:
raise ArgumentIndexError(self, argindex)
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return atanh
@classmethod
def eval(cls, arg):
from sympy import tan
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.One
elif arg is S.NegativeInfinity:
return S.NegativeOne
elif arg.is_zero:
return S.Zero
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.NaN
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
if _coeff_isneg(i_coeff):
return -S.ImaginaryUnit * tan(-i_coeff)
return S.ImaginaryUnit * tan(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_Add:
x, m = _peeloff_ipi(arg)
if m:
tanhm = tanh(m)
if tanhm is S.ComplexInfinity:
return coth(x)
else: # tanhm == 0
return tanh(x)
if arg.is_zero:
return S.Zero
if arg.func == asinh:
x = arg.args[0]
return x/sqrt(1 + x**2)
if arg.func == acosh:
x = arg.args[0]
return sqrt(x - 1) * sqrt(x + 1) / x
if arg.func == atanh:
return arg.args[0]
if arg.func == acoth:
return 1/arg.args[0]
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
from sympy import bernoulli
if n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
a = 2**(n + 1)
B = bernoulli(n + 1)
F = factorial(n + 1)
return a*(a - 1) * B/F * x**n
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def as_real_imag(self, deep=True, **hints):
from sympy import cos, sin
if self.args[0].is_extended_real:
if deep:
hints['complex'] = False
return (self.expand(deep, **hints), S.Zero)
else:
return (self, S.Zero)
if deep:
re, im = self.args[0].expand(deep, **hints).as_real_imag()
else:
re, im = self.args[0].as_real_imag()
denom = sinh(re)**2 + cos(im)**2
return (sinh(re)*cosh(re)/denom, sin(im)*cos(im)/denom)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
neg_exp, pos_exp = exp(-arg), exp(arg)
return (pos_exp - neg_exp)/(pos_exp + neg_exp)
def _eval_rewrite_as_exp(self, arg, **kwargs):
neg_exp, pos_exp = exp(-arg), exp(arg)
return (pos_exp - neg_exp)/(pos_exp + neg_exp)
def _eval_rewrite_as_sinh(self, arg, **kwargs):
return S.ImaginaryUnit*sinh(arg)/sinh(S.Pi*S.ImaginaryUnit/2 - arg)
def _eval_rewrite_as_cosh(self, arg, **kwargs):
return S.ImaginaryUnit*cosh(S.Pi*S.ImaginaryUnit/2 - arg)/cosh(arg)
def _eval_rewrite_as_coth(self, arg, **kwargs):
return 1/coth(arg)
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return arg
else:
return self.func(arg)
def _eval_is_real(self):
from sympy import cos, sinh
arg = self.args[0]
if arg.is_real:
return True
re, im = arg.as_real_imag()
# if denom = 0, tanh(arg) = zoo
if re == 0 and im % pi == pi/2:
return None
# check if im is of the form n*pi/2 to make sin(2*im) = 0
# if not, im could be a number, return False in that case
return (im % (pi/2)).is_zero
def _eval_is_extended_real(self):
if self.args[0].is_extended_real:
return True
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return self.args[0].is_positive
def _eval_is_negative(self):
if self.args[0].is_extended_real:
return self.args[0].is_negative
def _eval_is_finite(self):
from sympy import sinh, cos
arg = self.args[0]
re, im = arg.as_real_imag()
denom = cos(im)**2 + sinh(re)**2
if denom == 0:
return False
elif denom.is_number:
return True
if arg.is_extended_real:
return True
def _eval_is_zero(self):
arg = self.args[0]
if arg.is_zero:
return True
class coth(HyperbolicFunction):
r"""
The hyperbolic cotangent function, `\frac{\cosh(x)}{\sinh(x)}`.
* coth(x) -> Returns the hyperbolic cotangent of x
"""
def fdiff(self, argindex=1):
if argindex == 1:
return -1/sinh(self.args[0])**2
else:
raise ArgumentIndexError(self, argindex)
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return acoth
@classmethod
def eval(cls, arg):
from sympy import cot
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.One
elif arg is S.NegativeInfinity:
return S.NegativeOne
elif arg.is_zero:
return S.ComplexInfinity
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.NaN
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
if _coeff_isneg(i_coeff):
return S.ImaginaryUnit * cot(-i_coeff)
return -S.ImaginaryUnit * cot(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_Add:
x, m = _peeloff_ipi(arg)
if m:
cothm = coth(m)
if cothm is S.ComplexInfinity:
return coth(x)
else: # cothm == 0
return tanh(x)
if arg.is_zero:
return S.ComplexInfinity
if arg.func == asinh:
x = arg.args[0]
return sqrt(1 + x**2)/x
if arg.func == acosh:
x = arg.args[0]
return x/(sqrt(x - 1) * sqrt(x + 1))
if arg.func == atanh:
return 1/arg.args[0]
if arg.func == acoth:
return arg.args[0]
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
from sympy import bernoulli
if n == 0:
return 1 / sympify(x)
elif n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
B = bernoulli(n + 1)
F = factorial(n + 1)
return 2**(n + 1) * B/F * x**n
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def as_real_imag(self, deep=True, **hints):
from sympy import cos, sin
if self.args[0].is_extended_real:
if deep:
hints['complex'] = False
return (self.expand(deep, **hints), S.Zero)
else:
return (self, S.Zero)
if deep:
re, im = self.args[0].expand(deep, **hints).as_real_imag()
else:
re, im = self.args[0].as_real_imag()
denom = sinh(re)**2 + sin(im)**2
return (sinh(re)*cosh(re)/denom, -sin(im)*cos(im)/denom)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
neg_exp, pos_exp = exp(-arg), exp(arg)
return (pos_exp + neg_exp)/(pos_exp - neg_exp)
def _eval_rewrite_as_exp(self, arg, **kwargs):
neg_exp, pos_exp = exp(-arg), exp(arg)
return (pos_exp + neg_exp)/(pos_exp - neg_exp)
def _eval_rewrite_as_sinh(self, arg, **kwargs):
return -S.ImaginaryUnit*sinh(S.Pi*S.ImaginaryUnit/2 - arg)/sinh(arg)
def _eval_rewrite_as_cosh(self, arg, **kwargs):
return -S.ImaginaryUnit*cosh(arg)/cosh(S.Pi*S.ImaginaryUnit/2 - arg)
def _eval_rewrite_as_tanh(self, arg, **kwargs):
return 1/tanh(arg)
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return self.args[0].is_positive
def _eval_is_negative(self):
if self.args[0].is_extended_real:
return self.args[0].is_negative
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return 1/arg
else:
return self.func(arg)
class ReciprocalHyperbolicFunction(HyperbolicFunction):
"""Base class for reciprocal functions of hyperbolic functions. """
#To be defined in class
_reciprocal_of = None
_is_even = None
_is_odd = None
@classmethod
def eval(cls, arg):
if arg.could_extract_minus_sign():
if cls._is_even:
return cls(-arg)
if cls._is_odd:
return -cls(-arg)
t = cls._reciprocal_of.eval(arg)
if hasattr(arg, 'inverse') and arg.inverse() == cls:
return arg.args[0]
return 1/t if t is not None else t
def _call_reciprocal(self, method_name, *args, **kwargs):
# Calls method_name on _reciprocal_of
o = self._reciprocal_of(self.args[0])
return getattr(o, method_name)(*args, **kwargs)
def _calculate_reciprocal(self, method_name, *args, **kwargs):
# If calling method_name on _reciprocal_of returns a value != None
# then return the reciprocal of that value
t = self._call_reciprocal(method_name, *args, **kwargs)
return 1/t if t is not None else t
def _rewrite_reciprocal(self, method_name, arg):
# Special handling for rewrite functions. If reciprocal rewrite returns
# unmodified expression, then return None
t = self._call_reciprocal(method_name, arg)
if t is not None and t != self._reciprocal_of(arg):
return 1/t
def _eval_rewrite_as_exp(self, arg, **kwargs):
return self._rewrite_reciprocal("_eval_rewrite_as_exp", arg)
def _eval_rewrite_as_tractable(self, arg, **kwargs):
return self._rewrite_reciprocal("_eval_rewrite_as_tractable", arg)
def _eval_rewrite_as_tanh(self, arg, **kwargs):
return self._rewrite_reciprocal("_eval_rewrite_as_tanh", arg)
def _eval_rewrite_as_coth(self, arg, **kwargs):
return self._rewrite_reciprocal("_eval_rewrite_as_coth", arg)
def as_real_imag(self, deep = True, **hints):
return (1 / self._reciprocal_of(self.args[0])).as_real_imag(deep, **hints)
def _eval_conjugate(self):
return self.func(self.args[0].conjugate())
def _eval_expand_complex(self, deep=True, **hints):
re_part, im_part = self.as_real_imag(deep=True, **hints)
return re_part + S.ImaginaryUnit*im_part
def _eval_as_leading_term(self, x):
return (1/self._reciprocal_of(self.args[0]))._eval_as_leading_term(x)
def _eval_is_extended_real(self):
return self._reciprocal_of(self.args[0]).is_extended_real
def _eval_is_finite(self):
return (1/self._reciprocal_of(self.args[0])).is_finite
class csch(ReciprocalHyperbolicFunction):
r"""
The hyperbolic cosecant function, `\frac{2}{e^x - e^{-x}}`
* csch(x) -> Returns the hyperbolic cosecant of x
See Also
========
sinh, cosh, tanh, sech, asinh, acosh
"""
_reciprocal_of = sinh
_is_odd = True
def fdiff(self, argindex=1):
"""
Returns the first derivative of this function
"""
if argindex == 1:
return -coth(self.args[0]) * csch(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
"""
Returns the next term in the Taylor series expansion
"""
from sympy import bernoulli
if n == 0:
return 1/sympify(x)
elif n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
B = bernoulli(n + 1)
F = factorial(n + 1)
return 2 * (1 - 2**n) * B/F * x**n
def _eval_rewrite_as_cosh(self, arg, **kwargs):
return S.ImaginaryUnit / cosh(arg + S.ImaginaryUnit * S.Pi / 2)
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return self.args[0].is_positive
def _eval_is_negative(self):
if self.args[0].is_extended_real:
return self.args[0].is_negative
def _sage_(self):
import sage.all as sage
return sage.csch(self.args[0]._sage_())
class sech(ReciprocalHyperbolicFunction):
r"""
The hyperbolic secant function, `\frac{2}{e^x + e^{-x}}`
* sech(x) -> Returns the hyperbolic secant of x
See Also
========
sinh, cosh, tanh, coth, csch, asinh, acosh
"""
_reciprocal_of = cosh
_is_even = True
def fdiff(self, argindex=1):
if argindex == 1:
return - tanh(self.args[0])*sech(self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
from sympy.functions.combinatorial.numbers import euler
if n < 0 or n % 2 == 1:
return S.Zero
else:
x = sympify(x)
return euler(n) / factorial(n) * x**(n)
def _eval_rewrite_as_sinh(self, arg, **kwargs):
return S.ImaginaryUnit / sinh(arg + S.ImaginaryUnit * S.Pi /2)
def _eval_is_positive(self):
if self.args[0].is_extended_real:
return True
def _sage_(self):
import sage.all as sage
return sage.sech(self.args[0]._sage_())
###############################################################################
############################# HYPERBOLIC INVERSES #############################
###############################################################################
class InverseHyperbolicFunction(Function):
"""Base class for inverse hyperbolic functions."""
pass
class asinh(InverseHyperbolicFunction):
"""
The inverse hyperbolic sine function.
* asinh(x) -> Returns the inverse hyperbolic sine of x
See Also
========
acosh, atanh, sinh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return 1/sqrt(self.args[0]**2 + 1)
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
from sympy import asin
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.NegativeInfinity
elif arg.is_zero:
return S.Zero
elif arg is S.One:
return log(sqrt(2) + 1)
elif arg is S.NegativeOne:
return log(sqrt(2) - 1)
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.ComplexInfinity
if arg.is_zero:
return S.Zero
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return S.ImaginaryUnit * asin(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if isinstance(arg, sinh) and arg.args[0].is_number:
z = arg.args[0]
if z.is_real:
return z
r, i = match_real_imag(z)
if r is not None and i is not None:
f = floor((i + pi/2)/pi)
m = z - I*pi*f
even = f.is_even
if even is True:
return m
elif even is False:
return -m
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) >= 2 and n > 2:
p = previous_terms[-2]
return -p * (n - 2)**2/(n*(n - 1)) * x**2
else:
k = (n - 1) // 2
R = RisingFactorial(S.Half, k)
F = factorial(k)
return (-1)**k * R / F * x**n / n
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return arg
else:
return self.func(arg)
def _eval_rewrite_as_log(self, x, **kwargs):
return log(x + sqrt(x**2 + 1))
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return sinh
def _eval_is_zero(self):
arg = self.args[0]
if arg.is_zero:
return True
class acosh(InverseHyperbolicFunction):
"""
The inverse hyperbolic cosine function.
* acosh(x) -> Returns the inverse hyperbolic cosine of x
See Also
========
asinh, atanh, cosh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return 1/sqrt(self.args[0]**2 - 1)
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Infinity
elif arg is S.NegativeInfinity:
return S.Infinity
elif arg.is_zero:
return S.Pi*S.ImaginaryUnit / 2
elif arg is S.One:
return S.Zero
elif arg is S.NegativeOne:
return S.Pi*S.ImaginaryUnit
if arg.is_number:
cst_table = {
S.ImaginaryUnit: log(S.ImaginaryUnit*(1 + sqrt(2))),
-S.ImaginaryUnit: log(-S.ImaginaryUnit*(1 + sqrt(2))),
S.Half: S.Pi/3,
Rational(-1, 2): S.Pi*Rational(2, 3),
sqrt(2)/2: S.Pi/4,
-sqrt(2)/2: S.Pi*Rational(3, 4),
1/sqrt(2): S.Pi/4,
-1/sqrt(2): S.Pi*Rational(3, 4),
sqrt(3)/2: S.Pi/6,
-sqrt(3)/2: S.Pi*Rational(5, 6),
(sqrt(3) - 1)/sqrt(2**3): S.Pi*Rational(5, 12),
-(sqrt(3) - 1)/sqrt(2**3): S.Pi*Rational(7, 12),
sqrt(2 + sqrt(2))/2: S.Pi/8,
-sqrt(2 + sqrt(2))/2: S.Pi*Rational(7, 8),
sqrt(2 - sqrt(2))/2: S.Pi*Rational(3, 8),
-sqrt(2 - sqrt(2))/2: S.Pi*Rational(5, 8),
(1 + sqrt(3))/(2*sqrt(2)): S.Pi/12,
-(1 + sqrt(3))/(2*sqrt(2)): S.Pi*Rational(11, 12),
(sqrt(5) + 1)/4: S.Pi/5,
-(sqrt(5) + 1)/4: S.Pi*Rational(4, 5)
}
if arg in cst_table:
if arg.is_extended_real:
return cst_table[arg]*S.ImaginaryUnit
return cst_table[arg]
if arg is S.ComplexInfinity:
return S.ComplexInfinity
if arg == S.ImaginaryUnit*S.Infinity:
return S.Infinity + S.ImaginaryUnit*S.Pi/2
if arg == -S.ImaginaryUnit*S.Infinity:
return S.Infinity - S.ImaginaryUnit*S.Pi/2
if arg.is_zero:
return S.Pi*S.ImaginaryUnit*S.Half
if isinstance(arg, cosh) and arg.args[0].is_number:
z = arg.args[0]
if z.is_real:
from sympy.functions.elementary.complexes import Abs
return Abs(z)
r, i = match_real_imag(z)
if r is not None and i is not None:
f = floor(i/pi)
m = z - I*pi*f
even = f.is_even
if even is True:
if r.is_nonnegative:
return m
elif r.is_negative:
return -m
elif even is False:
m -= I*pi
if r.is_nonpositive:
return -m
elif r.is_positive:
return m
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n == 0:
return S.Pi*S.ImaginaryUnit / 2
elif n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) >= 2 and n > 2:
p = previous_terms[-2]
return p * (n - 2)**2/(n*(n - 1)) * x**2
else:
k = (n - 1) // 2
R = RisingFactorial(S.Half, k)
F = factorial(k)
return -R / F * S.ImaginaryUnit * x**n / n
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return S.ImaginaryUnit*S.Pi/2
else:
return self.func(arg)
def _eval_rewrite_as_log(self, x, **kwargs):
return log(x + sqrt(x + 1) * sqrt(x - 1))
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return cosh
class atanh(InverseHyperbolicFunction):
"""
The inverse hyperbolic tangent function.
* atanh(x) -> Returns the inverse hyperbolic tangent of x
See Also
========
asinh, acosh, tanh
"""
def fdiff(self, argindex=1):
if argindex == 1:
return 1/(1 - self.args[0]**2)
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
from sympy import atan
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg.is_zero:
return S.Zero
elif arg is S.One:
return S.Infinity
elif arg is S.NegativeOne:
return S.NegativeInfinity
elif arg is S.Infinity:
return -S.ImaginaryUnit * atan(arg)
elif arg is S.NegativeInfinity:
return S.ImaginaryUnit * atan(-arg)
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
from sympy.calculus.util import AccumBounds
return S.ImaginaryUnit*AccumBounds(-S.Pi/2, S.Pi/2)
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return S.ImaginaryUnit * atan(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_zero:
return S.Zero
if isinstance(arg, tanh) and arg.args[0].is_number:
z = arg.args[0]
if z.is_real:
return z
r, i = match_real_imag(z)
if r is not None and i is not None:
f = floor(2*i/pi)
even = f.is_even
m = z - I*f*pi/2
if even is True:
return m
elif even is False:
return m - I*pi/2
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
return x**n / n
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return arg
else:
return self.func(arg)
def _eval_rewrite_as_log(self, x, **kwargs):
return (log(1 + x) - log(1 - x)) / 2
def _eval_is_zero(self):
arg = self.args[0]
if arg.is_zero:
return True
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return tanh
class acoth(InverseHyperbolicFunction):
"""
The inverse hyperbolic cotangent function.
* acoth(x) -> Returns the inverse hyperbolic cotangent of x
"""
def fdiff(self, argindex=1):
if argindex == 1:
return 1/(1 - self.args[0]**2)
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
from sympy import acot
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Zero
elif arg is S.NegativeInfinity:
return S.Zero
elif arg.is_zero:
return S.Pi*S.ImaginaryUnit / 2
elif arg is S.One:
return S.Infinity
elif arg is S.NegativeOne:
return S.NegativeInfinity
elif arg.is_negative:
return -cls(-arg)
else:
if arg is S.ComplexInfinity:
return S.Zero
i_coeff = arg.as_coefficient(S.ImaginaryUnit)
if i_coeff is not None:
return -S.ImaginaryUnit * acot(i_coeff)
else:
if _coeff_isneg(arg):
return -cls(-arg)
if arg.is_zero:
return S.Pi*S.ImaginaryUnit*S.Half
@staticmethod
@cacheit
def taylor_term(n, x, *previous_terms):
if n == 0:
return S.Pi*S.ImaginaryUnit / 2
elif n < 0 or n % 2 == 0:
return S.Zero
else:
x = sympify(x)
return x**n / n
def _eval_as_leading_term(self, x):
from sympy import Order
arg = self.args[0].as_leading_term(x)
if x in arg.free_symbols and Order(1, x).contains(arg):
return S.ImaginaryUnit*S.Pi/2
else:
return self.func(arg)
def _eval_rewrite_as_log(self, x, **kwargs):
return (log(1 + 1/x) - log(1 - 1/x)) / 2
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return coth
class asech(InverseHyperbolicFunction):
"""
The inverse hyperbolic secant function.
* asech(x) -> Returns the inverse hyperbolic secant of x
Examples
========
>>> from sympy import asech, sqrt, S
>>> from sympy.abc import x
>>> asech(x).diff(x)
-1/(x*sqrt(1 - x**2))
>>> asech(1).diff(x)
0
>>> asech(1)
0
>>> asech(S(2))
I*pi/3
>>> asech(-sqrt(2))
3*I*pi/4
>>> asech((sqrt(6) - sqrt(2)))
I*pi/12
See Also
========
asinh, atanh, cosh, acoth
References
==========
.. [1] https://en.wikipedia.org/wiki/Hyperbolic_function
.. [2] http://dlmf.nist.gov/4.37
.. [3] http://functions.wolfram.com/ElementaryFunctions/ArcSech/
"""
def fdiff(self, argindex=1):
if argindex == 1:
z = self.args[0]
return -1/(z*sqrt(1 - z**2))
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Pi*S.ImaginaryUnit / 2
elif arg is S.NegativeInfinity:
return S.Pi*S.ImaginaryUnit / 2
elif arg.is_zero:
return S.Infinity
elif arg is S.One:
return S.Zero
elif arg is S.NegativeOne:
return S.Pi*S.ImaginaryUnit
if arg.is_number:
cst_table = {
S.ImaginaryUnit: - (S.Pi*S.ImaginaryUnit / 2) + log(1 + sqrt(2)),
-S.ImaginaryUnit: (S.Pi*S.ImaginaryUnit / 2) + log(1 + sqrt(2)),
(sqrt(6) - sqrt(2)): S.Pi / 12,
(sqrt(2) - sqrt(6)): 11*S.Pi / 12,
sqrt(2 - 2/sqrt(5)): S.Pi / 10,
-sqrt(2 - 2/sqrt(5)): 9*S.Pi / 10,
2 / sqrt(2 + sqrt(2)): S.Pi / 8,
-2 / sqrt(2 + sqrt(2)): 7*S.Pi / 8,
2 / sqrt(3): S.Pi / 6,
-2 / sqrt(3): 5*S.Pi / 6,
(sqrt(5) - 1): S.Pi / 5,
(1 - sqrt(5)): 4*S.Pi / 5,
sqrt(2): S.Pi / 4,
-sqrt(2): 3*S.Pi / 4,
sqrt(2 + 2/sqrt(5)): 3*S.Pi / 10,
-sqrt(2 + 2/sqrt(5)): 7*S.Pi / 10,
S(2): S.Pi / 3,
-S(2): 2*S.Pi / 3,
sqrt(2*(2 + sqrt(2))): 3*S.Pi / 8,
-sqrt(2*(2 + sqrt(2))): 5*S.Pi / 8,
(1 + sqrt(5)): 2*S.Pi / 5,
(-1 - sqrt(5)): 3*S.Pi / 5,
(sqrt(6) + sqrt(2)): 5*S.Pi / 12,
(-sqrt(6) - sqrt(2)): 7*S.Pi / 12,
}
if arg in cst_table:
if arg.is_extended_real:
return cst_table[arg]*S.ImaginaryUnit
return cst_table[arg]
if arg is S.ComplexInfinity:
from sympy.calculus.util import AccumBounds
return S.ImaginaryUnit*AccumBounds(-S.Pi/2, S.Pi/2)
if arg.is_zero:
return S.Infinity
@staticmethod
@cacheit
def expansion_term(n, x, *previous_terms):
if n == 0:
return log(2 / x)
elif n < 0 or n % 2 == 1:
return S.Zero
else:
x = sympify(x)
if len(previous_terms) > 2 and n > 2:
p = previous_terms[-2]
return p * (n - 1)**2 // (n // 2)**2 * x**2 / 4
else:
k = n // 2
R = RisingFactorial(S.Half , k) * n
F = factorial(k) * n // 2 * n // 2
return -1 * R / F * x**n / 4
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return sech
def _eval_rewrite_as_log(self, arg, **kwargs):
return log(1/arg + sqrt(1/arg - 1) * sqrt(1/arg + 1))
class acsch(InverseHyperbolicFunction):
"""
The inverse hyperbolic cosecant function.
* acsch(x) -> Returns the inverse hyperbolic cosecant of x
Examples
========
>>> from sympy import acsch, sqrt, S
>>> from sympy.abc import x
>>> acsch(x).diff(x)
-1/(x**2*sqrt(1 + x**(-2)))
>>> acsch(1).diff(x)
0
>>> acsch(1)
log(1 + sqrt(2))
>>> acsch(S.ImaginaryUnit)
-I*pi/2
>>> acsch(-2*S.ImaginaryUnit)
I*pi/6
>>> acsch(S.ImaginaryUnit*(sqrt(6) - sqrt(2)))
-5*I*pi/12
References
==========
.. [1] https://en.wikipedia.org/wiki/Hyperbolic_function
.. [2] http://dlmf.nist.gov/4.37
.. [3] http://functions.wolfram.com/ElementaryFunctions/ArcCsch/
"""
def fdiff(self, argindex=1):
if argindex == 1:
z = self.args[0]
return -1/(z**2*sqrt(1 + 1/z**2))
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
arg = sympify(arg)
if arg.is_Number:
if arg is S.NaN:
return S.NaN
elif arg is S.Infinity:
return S.Zero
elif arg is S.NegativeInfinity:
return S.Zero
elif arg.is_zero:
return S.ComplexInfinity
elif arg is S.One:
return log(1 + sqrt(2))
elif arg is S.NegativeOne:
return - log(1 + sqrt(2))
if arg.is_number:
cst_table = {
S.ImaginaryUnit: -S.Pi / 2,
S.ImaginaryUnit*(sqrt(2) + sqrt(6)): -S.Pi / 12,
S.ImaginaryUnit*(1 + sqrt(5)): -S.Pi / 10,
S.ImaginaryUnit*2 / sqrt(2 - sqrt(2)): -S.Pi / 8,
S.ImaginaryUnit*2: -S.Pi / 6,
S.ImaginaryUnit*sqrt(2 + 2/sqrt(5)): -S.Pi / 5,
S.ImaginaryUnit*sqrt(2): -S.Pi / 4,
S.ImaginaryUnit*(sqrt(5)-1): -3*S.Pi / 10,
S.ImaginaryUnit*2 / sqrt(3): -S.Pi / 3,
S.ImaginaryUnit*2 / sqrt(2 + sqrt(2)): -3*S.Pi / 8,
S.ImaginaryUnit*sqrt(2 - 2/sqrt(5)): -2*S.Pi / 5,
S.ImaginaryUnit*(sqrt(6) - sqrt(2)): -5*S.Pi / 12,
S(2): -S.ImaginaryUnit*log((1+sqrt(5))/2),
}
if arg in cst_table:
return cst_table[arg]*S.ImaginaryUnit
if arg is S.ComplexInfinity:
return S.Zero
if arg.is_zero:
return S.ComplexInfinity
if _coeff_isneg(arg):
return -cls(-arg)
def inverse(self, argindex=1):
"""
Returns the inverse of this function.
"""
return csch
def _eval_rewrite_as_log(self, arg, **kwargs):
return log(1/arg + sqrt(1/arg**2 + 1))
| kaushik94/sympy | sympy/functions/elementary/hyperbolic.py | Python | bsd-3-clause | 48,081 |
package goftp
import (
"fmt"
"strings"
)
//ftp客户端命令的帮助信息
var FTP_CLIENT_CMD_HELP = map[string]string{
FCC_HELP: "print local help information",
FCC_QUESTION_MARK: "print local help information",
FCC_CD: "change remote working directory",
FCC_LS: "list contents of remote path",
FCC_LCD: "change local working directory",
FCC_OPEN: "connect to remote ftp server",
FCC_USER: "send new user information",
FCC_USAGE: "show usage of ftp command",
}
//ftp客户端命令的使用方法
//其中带`[]`的参数都是可选参数
var FTP_CLIENT_CMD_USAGE = map[string]string{
FCC_HELP: "help [cmd1],[cmd2],...",
FCC_QUESTION_MARK: "? [cmd1],[cmd2],...",
FCC_CD: "cd remote_dir",
FCC_LS: "ls [remote_dir|remote_file] [local_output_file]",
FCC_LCD: "lcd [local_directory]",
FCC_OPEN: "open remote_host [port]",
FCC_USER: "user username [password] [account]",
}
type GoFtpClientHelp struct {
}
func (this *GoFtpClientHelp) version() {
fmt.Println("GoFtpClient v1.0\r\n多科学堂出品\r\nhttps://github.com/jemygraw/goftp")
}
func (this *GoFtpClientHelp) help() {
}
func (this *GoFtpClientHelp) cmdHelp(cmdNames ...string) {
for _, cmdName := range cmdNames {
cmdName = strings.ToLower(cmdName)
if cmdHelpDoc, ok := FTP_CLIENT_CMD_HELP[cmdName]; ok {
fmt.Println(cmdName, "\t", cmdHelpDoc)
} else {
fmt.Println("?Invalid help command `", cmdName, "'")
}
}
}
func (this *GoFtpClientHelp) cmdUsage(cmdNames ...string) {
for _, cmdName := range cmdNames {
cmdName = strings.ToLower(cmdName)
if cmdUsageDoc, ok := FTP_CLIENT_CMD_USAGE[cmdName]; ok {
fmt.Println("Usage:", cmdUsageDoc)
} else {
fmt.Println("?Invalid usage command `", cmdName, "'")
}
}
}
| jemygraw/goftp | src/goftp/goftphelp.go | GO | bsd-3-clause | 1,847 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/storage/managed_value_store_cache.h"
#include <set>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/file_util.h"
#include "base/logging.h"
#include "base/message_loop_proxy.h"
#include "chrome/browser/extensions/api/storage/policy_value_store.h"
#include "chrome/browser/extensions/api/storage/settings_storage_factory.h"
#include "chrome/browser/extensions/event_names.h"
#include "chrome/browser/extensions/extension_service.h"
#include "chrome/browser/value_store/value_store_change.h"
#include "chrome/common/extensions/extension.h"
#include "content/public/browser/browser_thread.h"
using content::BrowserThread;
namespace extensions {
ManagedValueStoreCache::ManagedValueStoreCache(
policy::PolicyService* policy_service,
EventRouter* event_router,
const scoped_refptr<SettingsStorageFactory>& factory,
const scoped_refptr<SettingsObserverList>& observers,
const FilePath& profile_path)
: ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)),
weak_this_on_ui_(weak_factory_.GetWeakPtr()),
policy_service_(policy_service),
event_router_(event_router),
storage_factory_(factory),
observers_(observers),
base_path_(profile_path.AppendASCII(
ExtensionService::kManagedSettingsDirectoryName)) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
// |event_router| can be NULL on unit_tests.
if (event_router_)
event_router_->RegisterObserver(this, event_names::kOnSettingsChanged);
policy_service_->AddObserver(policy::POLICY_DOMAIN_EXTENSIONS, this);
}
ManagedValueStoreCache::~ManagedValueStoreCache() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
DCHECK(!event_router_);
// Delete the PolicyValueStores on FILE.
store_map_.clear();
}
void ManagedValueStoreCache::ShutdownOnUI() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
policy_service_->RemoveObserver(policy::POLICY_DOMAIN_EXTENSIONS, this);
policy_service_ = NULL;
if (event_router_)
event_router_->UnregisterObserver(this);
event_router_ = NULL;
weak_factory_.InvalidateWeakPtrs();
}
void ManagedValueStoreCache::RunWithValueStoreForExtension(
const StorageCallback& callback,
scoped_refptr<const Extension> extension) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension->id());
if (store) {
callback.Run(store);
} else {
// First time that an extension calls storage.managed.get(). Create the
// store and load it with the current policy, and don't send event
// notifications.
CreateStoreFor(
extension->id(),
false,
base::Bind(&ManagedValueStoreCache::RunWithValueStoreForExtension,
base::Unretained(this),
callback,
extension));
}
}
void ManagedValueStoreCache::DeleteStorageSoon(
const std::string& extension_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// It's possible that the store exists, but hasn't been loaded yet
// (because the extension is unloaded, for example). Open the database to
// clear it if it exists.
// TODO(joaodasilva): move this check to a ValueStore method.
if (file_util::DirectoryExists(base_path_.AppendASCII(extension_id))) {
CreateStoreFor(
extension_id,
false,
base::Bind(&ManagedValueStoreCache::DeleteStorageSoon,
base::Unretained(this),
extension_id));
}
} else {
store->DeleteStorage();
store_map_.erase(extension_id);
}
}
void ManagedValueStoreCache::OnPolicyUpdated(policy::PolicyDomain domain,
const std::string& component_id,
const policy::PolicyMap& previous,
const policy::PolicyMap& current) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::UpdatePolicyOnFILE,
base::Unretained(this),
std::string(component_id),
base::Passed(current.DeepCopy())));
}
void ManagedValueStoreCache::UpdatePolicyOnFILE(
const std::string& extension_id,
scoped_ptr<policy::PolicyMap> current_policy) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// The extension hasn't executed any storage.managed.* calls, and isn't
// listening for onChanged() either. Ignore this notification in that case.
return;
}
// Update the policy on the backing store, and fire notifications if it
// changed.
store->SetCurrentPolicy(*current_policy, true);
}
void ManagedValueStoreCache::OnListenerAdded(
const EventListenerInfo& details) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
DCHECK_EQ(std::string(event_names::kOnSettingsChanged), details.event_name);
// This is invoked on several occasions:
//
// 1. when an extension first registers to observe storage.onChanged; in this
// case the backend doesn't have any previous data persisted, and it won't
// trigger a notification.
//
// 2. when the browser starts up and all existing extensions re-register for
// the onChanged event. In this case, if the current policy differs from
// the persisted version then a notification will be sent.
//
// 3. a policy update just occurred and sent a notification, and an extension
// with EventPages that is observing onChanged just woke up and registed
// again. In this case the policy update already persisted the current
// policy version, and |store| already exists.
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::CreateForExtensionOnFILE,
base::Unretained(this),
details.extension_id));
}
void ManagedValueStoreCache::CreateForExtensionOnFILE(
const std::string& extension_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store)
CreateStoreFor(extension_id, true, base::Closure());
}
PolicyValueStore* ManagedValueStoreCache::GetStoreFor(
const std::string& extension_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStoreMap::iterator it = store_map_.find(extension_id);
if (it == store_map_.end())
return NULL;
return it->second.get();
}
void ManagedValueStoreCache::CreateStoreFor(
const std::string& extension_id,
bool notify_if_changed,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
DCHECK(!GetStoreFor(extension_id));
// Creating or loading an existing database requires an immediate update
// with the current policy for the corresponding extension, which must be
// retrieved on UI.
BrowserThread::PostTask(
BrowserThread::UI, FROM_HERE,
base::Bind(&ManagedValueStoreCache::GetInitialPolicy,
weak_this_on_ui_,
extension_id,
notify_if_changed,
continuation));
}
void ManagedValueStoreCache::GetInitialPolicy(
const std::string& extension_id,
bool notify_if_changed,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
const policy::PolicyMap& policy = policy_service_->GetPolicies(
policy::POLICY_DOMAIN_EXTENSIONS, extension_id);
// Now post back to FILE to create the database.
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::CreateStoreWithInitialPolicy,
base::Unretained(this),
extension_id,
notify_if_changed,
base::Passed(policy.DeepCopy()),
continuation));
}
void ManagedValueStoreCache::CreateStoreWithInitialPolicy(
const std::string& extension_id,
bool notify_if_changed,
scoped_ptr<policy::PolicyMap> initial_policy,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
// If a 2nd call to CreateStoreFor() is issued before the 1st gets to execute
// its UI task, then the 2nd will enter this function but the store has
// already been created. Check for that.
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// Create it now.
// If the database doesn't exist yet then this is the initial install,
// and no notifications should be issued in that case.
// TODO(joaodasilva): move this check to a ValueStore method.
if (!file_util::DirectoryExists(base_path_.AppendASCII(extension_id)))
notify_if_changed = false;
store = new PolicyValueStore(
extension_id,
observers_,
make_scoped_ptr(storage_factory_->Create(base_path_, extension_id)));
store_map_[extension_id] = make_linked_ptr(store);
}
// Send the latest policy to the store.
store->SetCurrentPolicy(*initial_policy, notify_if_changed);
// And finally resume from where this process started.
if (!continuation.is_null())
continuation.Run();
}
} // namespace extensions
| nacl-webkit/chrome_deps | chrome/browser/extensions/api/storage/managed_value_store_cache.cc | C++ | bsd-3-clause | 9,636 |
// Restrict output in a codecell to a maximum length
define([
'base/js/namespace',
'jquery',
'notebook/js/outputarea',
'base/js/dialog',
'notebook/js/codecell',
'services/config',
'base/js/utils'
], function(IPython, $, oa, dialog, cc, configmod, utils) {
"use strict";
var base_url = utils.get_body_data("baseUrl");
var config = new configmod.ConfigSection('notebook', {base_url: base_url});
// define default values for config parameters
var params = {
// maximum number of characters the output area is allowed to print
limit_output : 10000,
// message to print when output is limited
limit_ouput_message : "**OUTPUT MUTED**"
};
// to be called once config is loaded, this updates default config vals
// with the ones specified by the server's config file
var update_params = function() {
for (var key in params) {
if (config.data.hasOwnProperty(key) ){
params[key] = config.data[key];
}
}
};
function isNumber(n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
function makePrintCounter() {
var count = 0,
currentCount = 0,
lastWasCR = false;
// Libraries like TQDM don't nessessarily send messages on clean
// boundaries (i.e. line breaks). This makes counting stateful!
var printCounter = function(str) {
for(var i=0; i<str.length; i+=1){
switch(str[i]) {
case '\b':
lastWasCR = false;
currentCount -= 1;
break;
case '\r': // See if this sets up a CR without an LF.
lastWasCR = true;
currentCount += 1;
break;
case '\n':
lastWasCR = false;
count += currentCount + 1;
currentCount = 0;
break;
default:
if(lastWasCR) {
currentCount = 1;
} else {
currentCount += 1;
}
lastWasCR = false;
}
}
return count + currentCount;
};
return printCounter;
}
config.loaded.then(function() {
var MAX_CHARACTERS = params.limit_output;
update_params();
if (isNumber(params.limit_output)) MAX_CHARACTERS = params.limit_output;
oa.OutputArea.prototype._handle_output = oa.OutputArea.prototype.handle_output;
oa.OutputArea.prototype.handle_output = function (msg) {
if (this.count === undefined) { this.count=0; }
if (this.counter === undefined) { this.counter = makePrintCounter(); }
if (this.max_count === undefined) { this.max_count = MAX_CHARACTERS; }
console.log("}}}}" + String(msg.content.text));
if(msg.content.text !== undefined) {
this.count = this.counter(String(msg.content.text));
console.log(">>>" + String(msg.content.text));
if(this.count > this.max_count) {
if (this.drop) return;
console.log("limit_output: output exceeded", this.max_count, "characters. Further output muted.");
msg.content.text = msg.content.text.substr(0, this.max_count) + params.limit_ouput_message;
this.drop = true;
}
}
return this._handle_output(msg);
};
cc.CodeCell.prototype._execute = cc.CodeCell.prototype.execute;
cc.CodeCell.prototype.execute = function() {
// reset counter on execution.
this.output_area.count = 0;
this.output_area.drop = false;
return this._execute();
};
});
var load_ipython_extension = function() {
config.load();
};
var extension = {
load_ipython_extension : load_ipython_extension
};
return extension;
});
| jbn/IPython-notebook-extensions | nbextensions/usability/limit_output/main.js | JavaScript | bsd-3-clause | 4,242 |
// Copyright 2016 Keybase Inc. All rights reserved.
// Use of this source code is governed by a BSD
// license that can be found in the LICENSE file.
package libdokan
import (
"strings"
"time"
"github.com/keybase/client/go/kbfs/data"
"github.com/keybase/client/go/kbfs/dokan"
"github.com/keybase/client/go/kbfs/idutil"
"github.com/keybase/client/go/kbfs/kbfsmd"
"golang.org/x/net/context"
)
const (
// PublicName is the name of the parent of all public top-level folders.
PublicName = "public"
// PrivateName is the name of the parent of all private top-level folders.
PrivateName = "private"
// TeamName is the name of the parent of all team top-level folders.
TeamName = "team"
// CtxOpID is the display name for the unique operation Dokan ID tag.
CtxOpID = "DID"
// WrongUserErrorFileName is the name of error directory for other users.
WrongUserErrorFileName = `kbfs.access.denied.for.other.windows.users.txt`
// WrongUserErrorContents is the contents of the file.
WrongUserErrorContents = `Access to KBFS is limited to the windows user (sid) running KBFS.`
)
// CtxTagKey is the type used for unique context tags
type CtxTagKey int
const (
// CtxIDKey is the type of the tag for unique operation IDs.
CtxIDKey CtxTagKey = iota
)
// eiToStat converts from a libkbfs.EntryInfo and error to a *dokan.Stat and error.
// Note that handling symlinks to directories requires extra processing not done here.
func eiToStat(ei data.EntryInfo, err error) (*dokan.Stat, error) {
if err != nil {
return nil, errToDokan(err)
}
st := &dokan.Stat{}
fillStat(st, &ei)
return st, nil
}
// fillStat fill a dokan.Stat from a libkbfs.DirEntry.
// Note that handling symlinks to directories requires extra processing not done here.
func fillStat(a *dokan.Stat, de *data.EntryInfo) {
a.FileSize = int64(de.Size)
a.LastWrite = time.Unix(0, de.Mtime)
a.LastAccess = a.LastWrite
a.Creation = time.Unix(0, de.Ctime)
switch de.Type {
case data.File, data.Exec:
a.FileAttributes = dokan.FileAttributeNormal
case data.Dir:
a.FileAttributes = dokan.FileAttributeDirectory
case data.Sym:
a.FileAttributes = dokan.FileAttributeReparsePoint
a.ReparsePointTag = dokan.IOReparseTagSymlink
}
}
// addFileAttribute adds a file attribute to the stat struct.
func addFileAttribute(a *dokan.Stat, fa dokan.FileAttribute) {
// FileAttributeNormal is valid only if no other attribute is set.
// Thus clear the normal flag (if set) from the attributes and or
// the new flag.
a.FileAttributes = (a.FileAttributes &^ dokan.FileAttributeNormal) | fa
}
// errToDokan makes some libkbfs errors easier to digest in dokan. Not needed in most places.
func errToDokan(err error) error {
switch err.(type) {
case idutil.NoSuchNameError:
return dokan.ErrObjectNameNotFound
case idutil.NoSuchUserError:
return dokan.ErrObjectNameNotFound
case kbfsmd.ServerErrorUnauthorized:
return dokan.ErrAccessDenied
case nil:
return nil
}
return err
}
// defaultDirectoryInformation returns default directory information.
func defaultDirectoryInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeDirectory
return &st, nil
}
// defaultFileInformation returns default file information.
func defaultFileInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeNormal
return &st, nil
}
// defaultSymlinkFileInformation returns default symlink to file information.
func defaultSymlinkFileInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeReparsePoint
st.ReparsePointTag = dokan.IOReparseTagSymlink
return &st, nil
}
// defaultSymlinkDirInformation returns default symlink to directory information.
func defaultSymlinkDirInformation() (*dokan.Stat, error) {
var st dokan.Stat
st.FileAttributes = dokan.FileAttributeReparsePoint | dokan.FileAttributeDirectory
st.ReparsePointTag = dokan.IOReparseTagSymlink
return &st, nil
}
// lowerTranslateCandidate returns whether a path components
// has a (different) lowercase translation.
func lowerTranslateCandidate(oc *openContext, s string) string {
if !oc.isUppercasePath {
return ""
}
c := strings.ToLower(s)
if c == s {
return ""
}
return c
}
func stringReadFile(contents string) dokan.File {
return &stringFile{data: contents}
}
type stringFile struct {
emptyFile
data string
}
// GetFileInformation does stats for dokan.
func (s *stringFile) GetFileInformation(ctx context.Context, fi *dokan.FileInfo) (*dokan.Stat, error) {
a, err := defaultFileInformation()
if err != nil {
return nil, err
}
a.FileAttributes |= dokan.FileAttributeReadonly
a.FileSize = int64(len(s.data))
t := time.Now()
a.LastWrite = t
a.LastAccess = t
a.Creation = t
return a, nil
}
// ReadFile does reads for dokan.
func (s *stringFile) ReadFile(ctx context.Context, fi *dokan.FileInfo, bs []byte, offset int64) (int, error) {
data := s.data
if offset >= int64(len(data)) {
return 0, nil
}
data = data[int(offset):]
return copy(bs, data), nil
}
| keybase/client | go/kbfs/libdokan/common.go | GO | bsd-3-clause | 5,056 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Feed
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: ImportTest.php 8649 2008-03-07 19:49:02Z darby $
*/
/**
* Test helper
*/
require_once dirname(__FILE__) . '/../../TestHelper.php';
/**
* @see Zend_Feed
*/
require_once 'Zend/Feed.php';
/**
* @see Zend_Feed_Builder
*/
require_once 'Zend/Feed/Builder.php';
/**
* @see Zend_Http_Client_Adapter_Test
*/
require_once 'Zend/Http/Client/Adapter/Test.php';
/**
* @see Zend_Http_Client
*/
require_once 'Zend/Http/Client.php';
/**
* @category Zend
* @package Zend_Feed
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Feed_ImportTest extends PHPUnit_Framework_TestCase
{
protected $_client;
protected $_feedDir;
/**
* HTTP client test adapter
*
* @var Zend_Http_Client_Adapter_Test
*/
protected $_adapter;
public function setUp()
{
$this->_adapter = new Zend_Http_Client_Adapter_Test();
Zend_Feed::setHttpClient(new Zend_Http_Client(null, array('adapter' => $this->_adapter)));
$this->_client = Zend_Feed::getHttpClient();
$this->_feedDir = dirname(__FILE__) . '/_files';
}
/**
* Test an atom feed generated by google's Blogger platform
*/
public function testAtomGoogle()
{
$this->_importAtomValid('AtomTestGoogle.xml');
}
/**
* Test an atom feed generated by mozillaZine.org
*/
public function testAtomMozillazine()
{
$this->_importAtomValid('AtomTestMozillazine.xml');
}
/**
* Test an atom feed generated by O'Reilly
*/
public function testAtomOReilly()
{
$this->_importAtomValid('AtomTestOReilly.xml');
}
/**
* Test an atom feed generated by PlanetPHP
*/
public function testAtomPlanetPHP()
{
$this->_importAtomValid('AtomTestPlanetPHP.xml');
}
/**
* Test a small atom feed
*/
public function testAtomSample1()
{
$this->_importAtomValid('AtomTestSample1.xml');
}
/**
* Test a small atom feed without any entries
*/
public function testAtomSample2()
{
$this->_importAtomValid('AtomTestSample2.xml');
}
/**
* Test an atom feed with a </entry> tag missing
*/
public function testAtomSample3()
{
$this->_importInvalid('AtomTestSample3.xml');
}
/**
* Test an atom feed with links within entries
*/
public function testAtomSample4()
{
$this->_importAtomValid('AtomTestSample4.xml');
}
/**
* Test a RSS feed generated by UserLand Frontier v9.5
*/
public function testRssHarvardLaw()
{
$this->_importRssValid('RssTestHarvardLaw.xml');
}
/**
* Test a RSS feed generated by PlanetPHP
*/
public function testRssPlanetPHP()
{
$this->_importRssValid('RssTestPlanetPHP.xml');
}
/**
* Test a RSS feed generated by Slashdot
*/
public function testRssSlashdot()
{
$this->_importRssValid('RssTestSlashdot.xml');
}
/**
* Test a RSS feed generated by CNN
*/
public function testRssCNN()
{
$this->_importRssValid('RssTestCNN.xml');
}
/**
* Test a valid RSS 0.91 sample
*/
public function testRss091Sample1()
{
$this->_importRssValid('RssTest091Sample1.xml');
}
/**
* Test a valid RSS 0.91 sample
*/
public function testRss092Sample1()
{
$this->_importRssValid('RssTest092Sample1.xml');
}
/**
* Test a valid RSS 1.0 sample
*/
public function testRss100Sample1()
{
$this->_importRssValid('RssTest100Sample1.xml');
}
/**
* Test a valid RSS 1.0 sample with some extensions in it
*/
public function testRss100Sample2()
{
$this->_importRssValid('RssTest100Sample2.xml');
}
/**
* Test a valid RSS 2.0 sample
*/
public function testRss200Sample1()
{
$this->_importRssValid('RssTest200Sample1.xml');
}
/**
* Test the import of a RSS feed from an array
*/
public function testRssImportFullArray()
{
$feed = Zend_Feed::importArray($this->_getFullArray(), 'rss');
$this->assertType('Zend_Feed_Rss', $feed);
}
/**
* Test the import of a RSS feed from an array
*/
public function testAtomImportFullArray()
{
$feed = Zend_Feed::importArray($this->_getFullArray(), 'atom');
}
/**
* Test the import of a RSS feed from a builder
*/
public function testRssImportFullBuilder()
{
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($this->_getFullArray()), 'rss');
$this->assertType('Zend_Feed_Rss', $feed);
}
/**
* Test the import of a full iTunes RSS feed from a builder
*/
public function testRssImportFulliTunesBuilder()
{
$array = $this->_getFullArray();
$array['itunes']['author'] = 'iTunes Author';
$array['itunes']['owner'] = array('name' => 'iTunes Owner',
'email' => 'itunes@example.com');
$array['itunes']['image'] = 'http://www.example/itunes.png';
$array['itunes']['subtitle'] = 'iTunes subtitle';
$array['itunes']['summary'] = 'iTunes summary';
$array['itunes']['explicit'] = 'clean';
$array['itunes']['block'] = 'no';
$array['itunes']['new-feed-url'] = 'http://www.example/itunes.xml';
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($array), 'rss');
$this->assertType('Zend_Feed_Rss', $feed);
}
/**
* Test the import of an Atom feed from a builder
*/
public function testAtomImportFullBuilder()
{
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($this->_getFullArray()), 'atom');
}
/**
* Test the import of an Atom feed from a builder
*/
public function testAtomImportFullBuilderValid()
{
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($this->_getFullArray()), 'atom');
$feed = Zend_Feed::importString($feed->saveXml());
$this->assertType('Zend_Feed_Atom', $feed);
}
/**
* Check the validity of the builder import (rss)
*/
public function testRssImportFullBuilderValid()
{
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($this->_getFullArray()), 'rss');
$this->assertType('Zend_Feed_Rss', $feed);
$feed = Zend_Feed::importString($feed->saveXml());
$this->assertType('Zend_Feed_Rss', $feed);
}
/**
* Test the return of a link() call (atom)
*/
public function testAtomGetLink()
{
$feed = Zend_Feed::importBuilder(new Zend_Feed_Builder($this->_getFullArray()), 'atom');
$this->assertType('Zend_Feed_Atom', $feed);
$feed = Zend_Feed::importString($feed->saveXml());
$this->assertType('Zend_Feed_Atom', $feed);
$href = $feed->link('self');
$this->assertEquals('http://www.example.com', $href);
}
/**
* Imports an invalid feed and ensure everything works as expected
* even if XDebug is running (ZF-2590).
*/
public function testImportInvalidIsXdebugAware()
{
if (!function_exists('xdebug_is_enabled')) {
$this->markTestIncomplete('XDebug not installed');
}
$response = new Zend_Http_Response(200, array(), '');
$this->_adapter->setResponse($response);
try {
$feed = Zend_Feed::import('http://localhost');
$this->fail('Expected Zend_Feed_Exception not thrown');
} catch (Zend_Feed_Exception $e) {
$this->assertType('Zend_Feed_Exception', $e);
$this->assertRegExp('/(XDebug is running|Empty string)/', $e->getMessage());
}
}
/**
* Returns the array used by Zend_Feed::importArray
* and Zend_Feed::importBuilder tests
*
* @return array
*/
protected function _getFullArray()
{
$array = array('title' => 'Title of the feed',
'link' => 'http://www.example.com',
'description' => 'Description of the feed',
'author' => 'Olivier Sirven',
'email' => 'olivier@elma.fr',
'webmaster' => 'olivier@elma.fr',
'charset' => 'iso-8859-15',
'lastUpdate' => time(),
'published' => strtotime('2007-02-27'),
'copyright' => 'Common Creative',
'image' => 'http://www.example/images/icon.png',
'language' => 'en',
'ttl' => 60,
'rating' => ' (PICS-1.1 "http://www.gcf.org/v2.5" labels
on "1994.11.05T08:15-0500"
exp "1995.12.31T23:59-0000"
for "http://www.greatdocs.com/foo.html"
by "George Sanderson, Jr."
ratings (suds 0.5 density 0 color/hue 1))',
'cloud' => array('domain' => 'rpc.sys.com',
'path' => '/rpc',
'registerProcedure' => 'webServices.pingMe',
'protocol' => 'xml-rpc'),
'textInput' => array('title' => 'subscribe',
'description' => 'enter your email address to subscribe by mail',
'name' => 'email',
'link' => 'http://www.example.com/subscribe'),
'skipHours' => array(1, 13, 17),
'skipDays' => array('Saturday', 'Sunday'),
'itunes' => array('block' => 'no',
'keywords' => 'example,itunes,podcast',
'category' => array(array('main' => 'Technology',
'sub' => 'Gadgets'),
array('main' => 'Music'))),
'entries' => array(array('guid' => time(),
'title' => 'First article',
'link' => 'http://www.example.com',
'description' => 'First article description',
'content' => 'First article <strong>content</strong>',
'lastUpdate' => time(),
'comments' => 'http://www.example.com/#comments',
'commentRss' => 'http://www.example.com/comments.xml',
'source' => array('title' => 'Original title',
'url' => 'http://www.domain.com'),
'category' => array(array('term' => 'test category',
'scheme' => 'http://www.example.com/scheme'),
array('term' => 'another category')
),
'enclosure' => array(array('url' => 'http://www.example.com/podcast.mp3',
'type' => 'audio/mpeg',
'length' => '12216320'
),
array('url' => 'http://www.example.com/podcast2.mp3',
'type' => 'audio/mpeg',
'length' => '1221632'
)
)
),
array('title' => 'Second article',
'link' => 'http://www.example.com/two',
'description' => 'Second article description',
'content' => 'Second article <strong>content</strong>',
'lastUpdate' => time(),
'comments' => 'http://www.example.com/two/#comments',
'category' => array(array('term' => 'test category')),
)
)
);
return $array;
}
/**
* Import an invalid atom feed
*/
protected function _importAtomValid($filename)
{
$response = new Zend_Http_Response(200, array(), file_get_contents("$this->_feedDir/$filename"));
$this->_adapter->setResponse($response);
$feed = Zend_Feed::import('http://localhost');
$this->assertType('Zend_Feed_Atom', $feed);
}
/**
* Import a valid rss feed
*/
protected function _importRssValid($filename)
{
$response = new Zend_Http_Response(200, array(), file_get_contents("$this->_feedDir/$filename"));
$this->_adapter->setResponse($response);
$feed = Zend_Feed::import('http://localhost');
$this->assertType('Zend_Feed_Rss', $feed);
}
/**
* Imports an invalid feed
*/
protected function _importInvalid($filename)
{
$response = new Zend_Http_Response(200, array(), file_get_contents("$this->_feedDir/$filename"));
$this->_adapter->setResponse($response);
try {
$feed = Zend_Feed::import('http://localhost');
$this->fail('Expected Zend_Feed_Exception not thrown');
} catch (Zend_Feed_Exception $e) {
$this->assertType('Zend_Feed_Exception', $e);
}
}
}
| lortnus/zf1 | tests/Zend/Feed/ImportTest.php | PHP | bsd-3-clause | 15,203 |
#!/usr/bin/env python
"""
Single trace Analysis
"""
__author__ = "Yanlong Yin (yyin2@iit.edu)"
__version__ = "$Revision: 1.4$"
__date__ = "$Date: 02/08/2014 $"
__copyright__ = "Copyright (c) 2010-2014 SCS Lab, IIT"
__license__ = "Python"
import sys, os, string, getopt, gc, multiprocessing
from sig import *
from access import *
from accList import *
from prop import *
from util import *
def detectSignature(filename):
# the list contains all the accesses
rlist = AccList()
wlist = AccList()
accList = AccList() # all lines with "accList" are commentted out
# because the figure drawing using accList
# is replaced with rlist and wlist
# open the trace file
f = open(filename, 'r')
# skip the first several lines
# Maybe the skipped lines are table heads
for i in range(int(sig._format_prop['skip_lines'])):
line = f.readline()
# scan the file and put the access item into list
i = 0
j = 0
op_index = int(sig._format_prop['op'])
debugPrint ('op_index: ', op_index)
op = ''
# TODO: add while 1 loop here
for i in range(sig._range):
line = f.readline()
if not line:
break
words = string.split(line)
# there might be some blank lines
if len(words) < 6:
j+=1
continue
## only "READ" and "WRITE" will be saved
#if words[-1].count('READ') == 0 and words[-1].count('WRITE') == 0:
# to test chomob, only use write
# if words[-1].count('WRITE') == 0:
# j+=1
# continue
## save to list
op = words[op_index].upper();
acc = Access(words)
if acc.size >= 1:
accList.append(acc)
if op.count('READ')>0 or op == 'R':
debugPrint("one READ")
rlist.append(acc)
if op.count('WRITE')>0 or op == 'W':
debugPrint("one WRITE")
wlist.append(acc)
## close the opened file
f.close()
rlist.trace = filename
wlist.trace = filename
accList.trace = filename
# print the time summary
print 'Total read time: ', sig._total_read_time
print 'Total write time: ', sig._total_write_time
print 'Numbers of operations - ', 'Read: ', len(rlist), ' write: ', len(wlist)
## deal with the list
rlist.detect_signature(0, min(sig._range-j-1, len(rlist)-1) )
wlist.detect_signature(0, min(sig._range-j-1, len(wlist)-1) )
## Done with the whole process of detecting
## Print the whole signature
if len(rlist.signatures) > 0 or len(wlist.signatures) > 0:
print '----------------------------------------'
print 'The following signatures are detected:'
if len(rlist.signatures) > 0:
rlist.print_signature()
rlist.gen_protobuf(sig._out_path)
rlist.makeup_output(sig._out_path)
if len(wlist.signatures) > 0:
wlist.print_signature()
wlist.gen_protobuf(sig._out_path)
wlist.makeup_output(sig._out_path)
#if len(accList) > 0:
accList.gen_iorates(sig._out_path)
def generateCSVs(single_trace_filename):
"""Generate the Read/Write Bandwidth figures"""
trace_path, trace_filename = os.path.split(single_trace_filename)
# the list contains all the accesses
rlist = AccList()
wlist = AccList()
rlistEmpty = 1
wlistEmpty = 1
total_read_count = 0
total_write_count = 0
total_read_time = 0.0
total_write_time = 0.0
# Create and empty each CSV files, write the CSV title line
output = os.path.join(sig._out_path, trace_filename + ".read.rate.csv")
f = open(output, 'w')
f.write("Time,Rate\n")
f.close()
output = os.path.join(sig._out_path, trace_filename + ".write.rate.csv")
f = open(output, 'w')
f.write("Time,Rate\n")
f.close()
output = os.path.join(sig._out_path, trace_filename + ".read.interval.csv")
f = open(output, 'w')
f.write("Begin,End\n")
f.close()
output = os.path.join(sig._out_path, trace_filename + ".write.interval.csv")
f = open(output, 'w')
f.write("Begin,End\n")
f.close()
output = os.path.join(sig._out_path, trace_filename + ".read.hole.sizes.csv")
f = open(output, 'w')
f.write("Time,Size\n")
f.close()
# open the trace file
f = open(single_trace_filename, 'r')
# skip the first several lines
# Maybe the skipped lines are table heads
for i in range(int(sig._format_prop['skip_lines'])):
line = f.readline()
# scan the file and put the access item into list
i = 0
j = 0
eof = 0 # reaching the EOF?
op_index = int(sig._format_prop['op'])
debugPrint ('op_index: ', op_index)
op = ''
while 1:
# handle 5000 operations once
for i in range(sig._range):
line = f.readline()
if not line:
eof = 1
break
words = string.split(line)
# there might be some blank lines
if len(words) < 6:
j+=1
continue
## only "READ" and "WRITE" will be saved
#if words[-1].count('READ') == 0 and words[-1].count('WRITE') == 0:
# to test chomob, only use write
# if words[-1].count('WRITE') == 0:
# j+=1
# continue
## save to list
op = words[op_index].upper();
acc = Access(words)
if acc.size >= 1:
if op.count('READ')>0 or op == 'R':
debugPrint("one READ")
rlist.append(acc)
total_read_count += 1
total_read_time += acc.endTime - acc.startTime
if op.count('WRITE')>0 or op == 'W':
debugPrint("one WRITE")
wlist.append(acc)
total_write_count += 1
total_write_time += acc.endTime - acc.startTime
# finish reading a batch of 5000 lines of the trace file
# Generate all kinds of CSV files using the rlist and wlist
# here the write operation should be "append"
# because it's handling 5000 lines each time
if (len(rlist) > 0):
rlist.toIORStep(trace_filename, 'r') # 'r' for read
rlist.toDataAccessHoleSizes(trace_filename, 'r')
rlistEmpty = 0
if (len(wlist) > 0):
wlist.toIORStep(trace_filename, 'w') # 'w' for write
wlistEmpty = 0
# empty the two lists
rlist = AccList()
wlist = AccList()
gc.collect() # garbage collection
# reached EOF? exit the "while 1" loop
if eof == 1:
break
## close the opened file
f.close()
if (rlistEmpty == 1):
readF = open( os.path.join(sig._out_path, trace_filename + ".read.rate.csv"), 'a+')
readF.write( "{0},{1}\n".format(0, 0) )
readF.close()
readF = open( os.path.join(sig._out_path, trace_filename + ".read.hole.sizes.csv"), 'a+')
readF.write( "{0},{1}\n".format(0, 0) )
readF.close()
if (wlistEmpty == 1):
writeF = open( os.path.join(sig._out_path, trace_filename + ".write.rate.csv"), 'a+')
writeF.write( "{0},{1}\n".format(0, 0) )
writeF.close()
# TODO: gnuplot for read and write rates
# save the statistics information to files
output = os.path.join(sig._out_path, trace_filename + ".stat.properties")
f = open(output, 'a+')
f.write("total_read_time: {0}\n".format(total_read_time))
f.write("total_read_count: {0}\n".format(total_read_count))
f.write("total_write_time: {0}\n".format(total_write_time))
f.write("total_write_count: {0}\n".format(total_write_count))
#f.write("global_total_read_time: {0}\n".format(sig._total_read_time))
#f.write("global_total_write_time: {0}\n".format(sig._total_write_time))
| yinyanlong/iosig | src/analysis/single_trace_analysis.py | Python | bsd-3-clause | 8,008 |
//
// Copyright 2015 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// BufferGL.cpp: Implements the class methods for BufferGL.
#include "libANGLE/renderer/gl/BufferGL.h"
#include "common/debug.h"
#include "common/utilities.h"
#include "libANGLE/angletypes.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/gl/FunctionsGL.h"
#include "libANGLE/renderer/gl/StateManagerGL.h"
#include "libANGLE/renderer/gl/renderergl_utils.h"
namespace rx
{
// Use the GL_COPY_READ_BUFFER binding when two buffers need to be bound simultaneously.
// GL_ELEMENT_ARRAY_BUFFER is supported on more versions but can modify the state of the currently
// bound VAO. Two simultaneous buffer bindings are only needed for glCopyBufferSubData which also
// adds the GL_COPY_READ_BUFFER binding.
static const GLenum SourceBufferOperationTarget = GL_COPY_READ_BUFFER;
// Use the GL_ELEMENT_ARRAY_BUFFER binding for most operations since it's available on all
// supported GL versions and doesn't affect any current state when it changes.
static const GLenum DestBufferOperationTarget = GL_ARRAY_BUFFER;
BufferGL::BufferGL(const FunctionsGL *functions, StateManagerGL *stateManager)
: BufferImpl(),
mIsMapped(false),
mMapOffset(0),
mMapSize(0),
mShadowBufferData(!CanMapBufferForRead(functions)),
mShadowCopy(),
mBufferSize(0),
mFunctions(functions),
mStateManager(stateManager),
mBufferID(0)
{
ASSERT(mFunctions);
ASSERT(mStateManager);
mFunctions->genBuffers(1, &mBufferID);
}
BufferGL::~BufferGL()
{
mStateManager->deleteBuffer(mBufferID);
mBufferID = 0;
}
gl::Error BufferGL::setData(const void* data, size_t size, GLenum usage)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferData(DestBufferOperationTarget, size, data, usage);
if (mShadowBufferData)
{
if (!mShadowCopy.resize(size))
{
return gl::Error(GL_OUT_OF_MEMORY, "Failed to resize buffer data shadow copy.");
}
if (size > 0 && data != nullptr)
{
memcpy(mShadowCopy.data(), data, size);
}
}
mBufferSize = size;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::setSubData(const void* data, size_t size, size_t offset)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferSubData(DestBufferOperationTarget, offset, size, data);
if (mShadowBufferData && size > 0)
{
memcpy(mShadowCopy.data() + offset, data, size);
}
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::copySubData(BufferImpl* source, GLintptr sourceOffset, GLintptr destOffset, GLsizeiptr size)
{
BufferGL *sourceGL = GetAs<BufferGL>(source);
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mStateManager->bindBuffer(SourceBufferOperationTarget, sourceGL->getBufferID());
mFunctions->copyBufferSubData(SourceBufferOperationTarget, DestBufferOperationTarget, sourceOffset, destOffset, size);
if (mShadowBufferData && size > 0)
{
ASSERT(sourceGL->mShadowBufferData);
memcpy(mShadowCopy.data() + destOffset, sourceGL->mShadowCopy.data() + sourceOffset, size);
}
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::map(GLenum access, GLvoid **mapPtr)
{
if (mShadowBufferData)
{
*mapPtr = mShadowCopy.data();
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*mapPtr = mFunctions->mapBuffer(DestBufferOperationTarget, access);
}
mIsMapped = true;
mMapOffset = 0;
mMapSize = mBufferSize;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::mapRange(size_t offset, size_t length, GLbitfield access, GLvoid **mapPtr)
{
if (mShadowBufferData)
{
*mapPtr = mShadowCopy.data() + offset;
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*mapPtr = mFunctions->mapBufferRange(DestBufferOperationTarget, offset, length, access);
}
mIsMapped = true;
mMapOffset = offset;
mMapSize = length;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::unmap(GLboolean *result)
{
ASSERT(result);
ASSERT(mIsMapped);
if (mShadowBufferData)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferSubData(DestBufferOperationTarget, mMapOffset, mMapSize,
mShadowCopy.data() + mMapOffset);
*result = GL_TRUE;
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*result = mFunctions->unmapBuffer(DestBufferOperationTarget);
}
mIsMapped = false;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::getIndexRange(GLenum type,
size_t offset,
size_t count,
bool primitiveRestartEnabled,
gl::IndexRange *outRange)
{
ASSERT(!mIsMapped);
if (mShadowBufferData)
{
*outRange = gl::ComputeIndexRange(type, mShadowCopy.data() + offset, count,
primitiveRestartEnabled);
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
const gl::Type &typeInfo = gl::GetTypeInfo(type);
const uint8_t *bufferData = MapBufferRangeWithFallback(
mFunctions, DestBufferOperationTarget, offset, count * typeInfo.bytes, GL_MAP_READ_BIT);
*outRange = gl::ComputeIndexRange(type, bufferData, count, primitiveRestartEnabled);
mFunctions->unmapBuffer(DestBufferOperationTarget);
}
return gl::Error(GL_NO_ERROR);
}
GLuint BufferGL::getBufferID() const
{
return mBufferID;
}
}
| mikolalysenko/angle | src/libANGLE/renderer/gl/BufferGL.cpp | C++ | bsd-3-clause | 5,956 |
/**
* Copyright (c) 2015 See AUTHORS file
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the mini2Dx nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.mini2Dx.core.graphics;
import java.util.HashMap;
import java.util.Map;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.Texture;
/**
* Implements a cache of textures for shapes
*/
public class ShapeTextureCache {
private Map<Integer, Texture> filledRectangleTextures;
private Map<String, Texture> rectangleTextures;
private Map<String, Texture> circleTextures;
private Map<String, Texture> filledCircleTextures;
/**
* Constructor
*/
public ShapeTextureCache() {
rectangleTextures = new HashMap<String, Texture>();
filledRectangleTextures = new HashMap<Integer, Texture>();
circleTextures = new HashMap<String, Texture>();
filledCircleTextures = new HashMap<String, Texture>();
}
/**
* Returns a filled rectangular texture for the provided {@link Color}
*
* @param color
* The {@link Color} to fetch a texture of
* @return A new {@link Texture} if this is first time it has been
* requested, otherwise it will return a cached instance of the
* {@link Texture} for the given {@link Color}
*/
public Texture getFilledRectangleTexture(Color color) {
int bits = color.toIntBits();
if (!filledRectangleTextures.containsKey(bits)) {
Pixmap pixmap = new Pixmap(1, 1, Pixmap.Format.RGBA8888);
pixmap.setColor(color);
pixmap.fillRectangle(0, 0, 1, 1);
filledRectangleTextures.put(bits, new Texture(pixmap));
pixmap.dispose();
}
return filledRectangleTextures.get(bits);
}
/**
* Returns a rectangular texture for the provided {@link Color}
*
* @param color
* The {@link Color} to fetch a texture of
* @param width
* The width of the rectangle
* @param height
* The height of the rectangle
* @param lineHeight
* The line height of the rectangle
* @return A new {@link Texture} if this is first time it has been
* requested, otherwise it will return a cached instance of the
* {@link Texture} for the given {@link Color}
*/
public Texture getRectangleTexture(Color color, int width, int height,
int lineHeight) {
int bits = color.toIntBits();
String key = "" + bits + "," + width + "," + height + "," + lineHeight;
if (!rectangleTextures.containsKey(key)) {
Pixmap pixmap = new Pixmap(width + 1, height + 1,
Pixmap.Format.RGBA8888);
pixmap.setColor(color);
for (int i = 0; i < lineHeight; i++) {
pixmap.drawRectangle(i, i, width - (i * 2), height - (i * 2));
}
rectangleTextures.put(key, new Texture(pixmap));
pixmap.dispose();
}
return rectangleTextures.get(key);
}
/**
* Returns a circle texture for the provided {@link Color}
*
* @param color
* The {@link Color} to fetch a texture of
* @param radius
* The radius of the circle
* @param lineHeight
* The line height of the circle
* @return A new {@link Texture} if this is first time it has been
* requested, otherwise it will return a cached instance of the
* {@link Texture} for the given {@link Color}
*/
public Texture getCircleTexture(Color color, int radius, int lineHeight) {
int bits = color.toIntBits();
String key = "" + bits + "," + radius + "," + lineHeight;
if (!circleTextures.containsKey(key)) {
Pixmap pixmap = new Pixmap((radius * 2) + 1, (radius * 2) + 1,
Pixmap.Format.RGBA8888);
pixmap.setColor(color);
for (int i = 0; i < lineHeight; i++) {
pixmap.drawCircle(radius, radius, radius - i);
}
circleTextures.put(key, new Texture(pixmap));
pixmap.dispose();
}
return circleTextures.get(key);
}
/**
* Returns a filled circular texture for the provided {@link Color}
*
* @param color
* The {@link Color} to fetch a texture of
* @param radius
* The radius of the circle
* @return A new {@link Texture} if this is first time it has been
* requested, otherwise it will return a cached instance of the
* {@link Texture} for the given {@link Color}
*/
public Texture getFilledCircleTexture(Color color, int radius) {
int bits = color.toIntBits();
String key = "" + bits + "," + radius;
if (!filledCircleTextures.containsKey(key)) {
Pixmap pixmap = new Pixmap((radius * 2) + 1, (radius * 2) + 1,
Pixmap.Format.RGBA8888);
pixmap.setColor(color);
pixmap.fillCircle(radius, radius, radius);
filledCircleTextures.put(key, new Texture(pixmap));
pixmap.dispose();
}
return filledCircleTextures.get(key);
}
}
| hyperverse/mini2Dx | core/src/main/java/org/mini2Dx/core/graphics/ShapeTextureCache.java | Java | bsd-3-clause | 6,063 |
/*****************************************************************************/
/**
* @file ParticleBasedRenderer.cpp
* @author Naohisa Sakamoto
*/
/*****************************************************************************/
#include "ParticleBasedRendererGLSL.h"
#include <cmath>
#include <kvs/OpenGL>
#include <kvs/PointObject>
#include <kvs/Camera>
#include <kvs/Light>
#include <kvs/Assert>
#include <kvs/Math>
#include <kvs/MersenneTwister>
#include <kvs/Xorshift128>
namespace
{
/*===========================================================================*/
/**
* @brief Returns shuffled array.
* @param values [in] value array
* @param seed [in] seed value for random number generator
*/
/*===========================================================================*/
template <int Dim, typename T>
kvs::ValueArray<T> ShuffleArray( const kvs::ValueArray<T>& values, kvs::UInt32 seed )
{
KVS_ASSERT( Dim > 0 );
KVS_ASSERT( values.size() % Dim == 0 );
kvs::Xorshift128 rng; rng.setSeed( seed );
kvs::ValueArray<T> ret;
if ( values.unique() ) { ret = values; }
else { ret = values.clone(); }
T* p = ret.data();
size_t size = ret.size() / Dim;
for ( size_t i = 0; i < size; ++i )
{
size_t j = rng.randInteger() % ( i + 1 );
for ( int k = 0; k < Dim; ++k )
{
std::swap( p[ i * Dim + k ], p[ j * Dim + k ] );
}
}
return ret;
}
}
namespace kvs
{
namespace glsl
{
/*===========================================================================*/
/**
* @brief Constructs a new ParticleBasedRenderer class.
*/
/*===========================================================================*/
ParticleBasedRenderer::ParticleBasedRenderer():
StochasticRendererBase( new Engine() )
{
}
/*===========================================================================*/
/**
* @brief Constructs a new ParticleBasedRenderer class.
* @param m [in] initial modelview matrix
* @param p [in] initial projection matrix
* @param v [in] initial viewport
*/
/*===========================================================================*/
ParticleBasedRenderer::ParticleBasedRenderer( const kvs::Mat4& m, const kvs::Mat4& p, const kvs::Vec4& v ):
StochasticRendererBase( new Engine( m, p, v ) )
{
}
/*===========================================================================*/
/**
* @brief Returns true if the particle shuffling is enabled
* @return true, if the shuffling is enabled
*/
/*===========================================================================*/
bool ParticleBasedRenderer::isShuffleEnabled() const
{
return static_cast<const Engine&>( engine() ).isShuffleEnabled();
}
/*===========================================================================*/
/**
* @brief Returns true if the particle zooming is enabled
* @return true, if the zooming is enabled
*/
/*===========================================================================*/
bool ParticleBasedRenderer::isZoomingEnabled() const
{
return static_cast<const Engine&>( engine() ).isZoomingEnabled();
}
/*===========================================================================*/
/**
* @brief Sets enable-flag for the particle shuffling.
* @param enable [in] enable-flag
*/
/*===========================================================================*/
void ParticleBasedRenderer::setShuffleEnabled( const bool enable )
{
static_cast<Engine&>( engine() ).setShuffleEnabled( enable );
}
/*===========================================================================*/
/**
* @brief Sets enable-flag for the particle zooming.
* @param enable [in] enable-flag
*/
/*===========================================================================*/
void ParticleBasedRenderer::setZoomingEnabled( const bool enable )
{
static_cast<Engine&>( engine() ).setZoomingEnabled( enable );
}
/*===========================================================================*/
/**
* @brief Enable the particle shuffling.
*/
/*===========================================================================*/
void ParticleBasedRenderer::enableShuffle()
{
static_cast<Engine&>( engine() ).enableShuffle();
}
/*===========================================================================*/
/**
* @brief Enable the particle zooming.
*/
/*===========================================================================*/
void ParticleBasedRenderer::enableZooming()
{
static_cast<Engine&>( engine() ).enableZooming();
}
/*===========================================================================*/
/**
* @brief Disable the particle shuffling.
*/
/*===========================================================================*/
void ParticleBasedRenderer::disableShuffle()
{
static_cast<Engine&>( engine() ).disableShuffle();
}
/*===========================================================================*/
/**
* @brief Disable the particle zooming.
*/
/*===========================================================================*/
void ParticleBasedRenderer::disableZooming()
{
static_cast<Engine&>( engine() ).disableZooming();
}
/*===========================================================================*/
/**
* @brief Returns the initial modelview matrix.
* @param initial modelview matrix
*/
/*===========================================================================*/
const kvs::Mat4& ParticleBasedRenderer::initialModelViewMatrix() const
{
return static_cast<const Engine&>( engine() ).initialModelViewMatrix();
}
/*===========================================================================*/
/**
* @brief Returns the initial projection matrix.
* @param initial projection matrix
*/
/*===========================================================================*/
const kvs::Mat4& ParticleBasedRenderer::initialProjectionMatrix() const
{
return static_cast<const Engine&>( engine() ).initialProjectionMatrix();
}
/*===========================================================================*/
/**
* @brief Returns the initial viewport.
* @param initial viewport
*/
/*===========================================================================*/
const kvs::Vec4& ParticleBasedRenderer::initialViewport() const
{
return static_cast<const Engine&>( engine() ).initialViewport();
}
void ParticleBasedRenderer::setVertexShaderFile( const std::string& file )
{
static_cast<Engine&>( engine() ).setVertexShaderFile( file );
}
void ParticleBasedRenderer::setFragmentShaderFile( const std::string& file )
{
static_cast<Engine&>( engine() ).setFragmentShaderFile( file );
}
void ParticleBasedRenderer::setShaderFiles( const std::string& vert_file, const std::string& frag_file )
{
static_cast<Engine&>( engine() ).setShaderFiles( vert_file, frag_file );
}
void ParticleBasedRenderer::Engine::BufferObject::create(
const kvs::ObjectBase* object,
const size_t nmanagers )
{
const auto* point = kvs::PointObject::DownCast( object );
KVS_ASSERT( point->coords().size() == point->colors().size() );
const bool has_normal = point->normals().size() > 0;
auto coords = point->coords();
auto colors = point->colors();
auto normals = point->normals();
if ( m_enable_shuffle )
{
kvs::UInt32 seed = 12345678;
coords = ::ShuffleArray<3>( point->coords(), seed );
colors = ::ShuffleArray<3>( point->colors(), seed );
if ( has_normal ) { normals = ::ShuffleArray<3>( point->normals(), seed ); }
}
if ( !m_managers ) { delete [] m_managers; }
m_nmanagers = nmanagers;
m_managers = new Manager [ m_nmanagers ];
const size_t nvertices = point->numberOfVertices();
const size_t rem = nvertices % m_nmanagers;
const size_t quo = nvertices / m_nmanagers;
for ( size_t i = 0; i < m_nmanagers; i++ )
{
const size_t count = quo + ( i < rem ? 1 : 0 );
const size_t first = quo * i + kvs::Math::Min( i, rem );
Manager::VertexBuffer vertex_array;
vertex_array.type = GL_FLOAT;
vertex_array.size = count * sizeof( kvs::Real32 ) * 3;;
vertex_array.dim = 3;
vertex_array.pointer = coords.data() + first * 3;
m_managers[i].setVertexArray( vertex_array );
Manager::VertexBuffer color_array;
color_array.type = GL_UNSIGNED_BYTE;
color_array.size = count * sizeof( kvs::UInt8 ) * 3;
color_array.dim = 3;
color_array.pointer = colors.data() + first * 3;
m_managers[i].setColorArray( color_array );
if ( has_normal )
{
Manager::VertexBuffer normal_array;
normal_array.type = GL_FLOAT;
normal_array.size = count * sizeof( kvs::Real32 ) * 3;
normal_array.dim = 3;
normal_array.pointer = normals.data() + first * 3;
m_managers[i].setNormalArray( normal_array );
}
m_managers[i].create();
}
}
void ParticleBasedRenderer::Engine::BufferObject::draw(
const kvs::ObjectBase* object,
const size_t index )
{
KVS_ASSERT( index < m_nmanagers );
const auto* point = kvs::PointObject::DownCast( object );
const size_t nvertices = point->numberOfVertices();
const size_t rem = nvertices % m_nmanagers;
const size_t quo = nvertices / m_nmanagers;
const size_t count = quo + ( index < rem ? 1 : 0 );
auto& manager = m_managers[index];
kvs::VertexBufferObjectManager::Binder bind( manager );
kvs::OpenGL::EnableVertexAttribArray( m_random_index );
kvs::OpenGL::VertexAttribPointer( m_random_index, 2, GL_UNSIGNED_SHORT, GL_FALSE, 0, (GLubyte*)NULL + 0 );
manager.drawArrays( GL_POINTS, 0, count );
kvs::OpenGL::DisableVertexAttribArray( m_random_index );
}
void ParticleBasedRenderer::Engine::RenderPass::setShaderFiles(
const std::string& vert_file,
const std::string& frag_file )
{
this->setVertexShaderFile( vert_file );
this->setFragmentShaderFile( frag_file );
}
void ParticleBasedRenderer::Engine::RenderPass::create(
const kvs::Shader::ShadingModel& model,
const bool enable )
{
kvs::ShaderSource vert( "PBR_zooming.vert" );
kvs::ShaderSource frag( "PBR_zooming.frag" );
if ( enable )
{
switch ( model.type() )
{
case kvs::Shader::LambertShading: frag.define("ENABLE_LAMBERT_SHADING"); break;
case kvs::Shader::PhongShading: frag.define("ENABLE_PHONG_SHADING"); break;
case kvs::Shader::BlinnPhongShading: frag.define("ENABLE_BLINN_PHONG_SHADING"); break;
default: break; // NO SHADING
}
if ( model.two_side_lighting )
{
frag.define("ENABLE_TWO_SIDE_LIGHTING");
}
}
if ( m_enable_zooming )
{
vert.define("ENABLE_PARTICLE_ZOOMING");
frag.define("ENABLE_PARTICLE_ZOOMING");
}
m_shader_program.build( vert, frag );
}
void ParticleBasedRenderer::Engine::RenderPass::update(
const kvs::Shader::ShadingModel& model,
const bool enable )
{
m_shader_program.release();
this->create( model, enable );
}
void ParticleBasedRenderer::Engine::RenderPass::setup(
const kvs::Shader::ShadingModel& model )
{
kvs::ProgramObject::Binder bind( m_shader_program );
m_shader_program.setUniform( "shading.Ka", model.Ka );
m_shader_program.setUniform( "shading.Kd", model.Kd );
m_shader_program.setUniform( "shading.Ks", model.Ks );
m_shader_program.setUniform( "shading.S", model.S );
const auto M = kvs::OpenGL::ModelViewMatrix();
const auto P = kvs::OpenGL::ProjectionMatrix();
m_shader_program.setUniform( "ModelViewMatrix", M );
m_shader_program.setUniform( "ProjectionMatrix", P );
const auto size_inv = 1.0f / m_parent->randomTextureSize();
m_shader_program.setUniform( "random_texture", 0 );
m_shader_program.setUniform( "random_texture_size_inv", size_inv );
auto random_index = m_shader_program.attributeLocation("random_index");
m_buffer_object.setRandomIndex( random_index );
}
void ParticleBasedRenderer::Engine::RenderPass::draw(
const kvs::ObjectBase* object,
const size_t index )
{
kvs::ProgramObject::Binder po( m_shader_program );
kvs::Texture::Binder tex( m_parent->randomTexture() );
m_buffer_object.draw( object, index );
}
/*===========================================================================*/
/**
* @brief Create shaders, VBO, and framebuffers.
* @param point [in] pointer to the point object
* @param camera [in] pointer to the camera
* @param light [in] pointer to the light
*/
/*===========================================================================*/
void ParticleBasedRenderer::Engine::create(
kvs::ObjectBase* object,
kvs::Camera* camera,
kvs::Light* light )
{
auto* point = kvs::PointObject::DownCast( object );
BaseClass::attachObject( object );
BaseClass::createRandomTexture();
m_render_pass.create( BaseClass::shader(), BaseClass::isShadingEnabled() );
m_buffer_object.create( object, BaseClass::repetitionLevel() );
// Initial values for calculating the object depth.
if ( kvs::Math::IsZero( m_initial_modelview[3][3] ) )
{
m_initial_modelview = kvs::OpenGL::ModelViewMatrix();
}
if ( kvs::Math::IsZero( m_initial_projection[3][3] ) )
{
m_initial_projection = kvs::OpenGL::ProjectionMatrix();
}
if ( kvs::Math::IsZero( m_initial_viewport[2] ) )
{
const float dpr = camera->devicePixelRatio();
const float framebuffer_width = camera->windowWidth() * dpr;
const float framebuffer_height = camera->windowHeight() * dpr;
m_initial_viewport[2] = framebuffer_width;
m_initial_viewport[3] = framebuffer_height;
}
const kvs::Vec4 I( point->objectCenter(), 1.0f );
const kvs::Vec4 O = m_initial_projection * m_initial_modelview * I;
m_initial_object_depth = O.z();
}
/*===========================================================================*/
/**
* @brief Update.
* @param point [in] pointer to the point object
* @param camera [in] pointer to the camera
* @param light [in] pointer to the light
*/
/*===========================================================================*/
void ParticleBasedRenderer::Engine::update(
kvs::ObjectBase* object,
kvs::Camera* camera,
kvs::Light* light )
{
m_render_pass.update( BaseClass::shader(), BaseClass::isShadingEnabled() );
const float dpr = camera->devicePixelRatio();
const float framebuffer_width = camera->windowWidth() * dpr;
m_initial_viewport[2] = framebuffer_width;
}
/*===========================================================================*/
/**
* @brief Setup.
* @param point [in] pointer to the point object
* @param camera [in] pointer to the camera
* @param light [in] pointer to the light
*/
/*===========================================================================*/
void ParticleBasedRenderer::Engine::setup(
kvs::ObjectBase* object,
kvs::Camera* camera,
kvs::Light* light )
{
// The repetition counter must be reset here.
BaseClass::resetRepetitions();
m_render_pass.setup( BaseClass::shader() );
const kvs::Mat4& m0 = m_initial_modelview;
const float scale0 = kvs::Vec3( m0[0][0], m0[1][0], m0[2][0] ).length();
const float width0 = m_initial_viewport[2];
const float height0 = m_initial_viewport[3];
const kvs::Mat4 m = kvs::OpenGL::ModelViewMatrix();
const float scale = kvs::Vec3( m[0][0], m[1][0], m[2][0] ).length();
const float dpr = camera->devicePixelRatio();
const float width = camera->windowWidth() * dpr;
const float height = camera->windowHeight() * dpr;
const float Cr = ( width / width0 ) * ( height / height0 );
const float Cs = scale / scale0;
const float D0 = m_initial_object_depth;
const float object_scale = Cr * Cs * dpr;
const float object_depth = object_scale * D0;
auto& shader_program = m_render_pass.shaderProgram();
shader_program.bind();
shader_program.setUniform( "object_scale", object_scale );
shader_program.setUniform( "object_depth", object_depth );
shader_program.setUniform( "screen_scale", kvs::Vec2( width * 0.5f, height * 0.5f ) );
shader_program.unbind();
}
/*===========================================================================*/
/**
* @brief Draw an ensemble.
* @param point [in] pointer to the point object
* @param camera [in] pointer to the camera
* @param light [in] pointer to the light
*/
/*===========================================================================*/
void ParticleBasedRenderer::Engine::draw(
kvs::ObjectBase* object,
kvs::Camera* camera,
kvs::Light* light )
{
kvs::OpenGL::Enable( GL_DEPTH_TEST );
kvs::OpenGL::Enable( GL_VERTEX_PROGRAM_POINT_SIZE );
m_render_pass.draw( object, BaseClass::repetitionCount() );
}
} // end of glsl
} // end of kvs
| naohisas/KVS | Source/Core/Visualization/Renderer/ParticleBasedRendererGLSL.cpp | C++ | bsd-3-clause | 17,008 |
/*-------------------------------------------------------------------------
*
* Copyright (c) 2004-2014, PostgreSQL Global Development Group
*
*
*-------------------------------------------------------------------------
*/
package org.postgresql.jdbc2;
import java.lang.reflect.Method;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Locale;
import java.util.List;
import java.util.Map;
import org.postgresql.util.GT;
import org.postgresql.util.PSQLException;
import org.postgresql.util.PSQLState;
/**
* this class stores supported escaped function
* @author Xavier Poinsard
*/
public class EscapedFunctions {
// numeric functions names
public final static String ABS="abs";
public final static String ACOS="acos";
public final static String ASIN="asin";
public final static String ATAN="atan";
public final static String ATAN2="atan2";
public final static String CEILING="ceiling";
public final static String COS="cos";
public final static String COT="cot";
public final static String DEGREES="degrees";
public final static String EXP="exp";
public final static String FLOOR="floor";
public final static String LOG="log";
public final static String LOG10="log10";
public final static String MOD="mod";
public final static String PI="pi";
public final static String POWER="power";
public final static String RADIANS="radians";
public final static String ROUND="round";
public final static String SIGN="sign";
public final static String SIN="sin";
public final static String SQRT="sqrt";
public final static String TAN="tan";
public final static String TRUNCATE="truncate";
// string function names
public final static String ASCII="ascii";
public final static String CHAR="char";
public final static String CONCAT="concat";
public final static String INSERT="insert"; // change arguments order
public final static String LCASE="lcase";
public final static String LEFT="left";
public final static String LENGTH="length";
public final static String LOCATE="locate"; // the 3 args version duplicate args
public final static String LTRIM="ltrim";
public final static String REPEAT="repeat";
public final static String REPLACE="replace";
public final static String RIGHT="right"; // duplicate args
public final static String RTRIM="rtrim";
public final static String SPACE="space";
public final static String SUBSTRING="substring";
public final static String UCASE="ucase";
// soundex is implemented on the server side by
// the contrib/fuzzystrmatch module. We provide a translation
// for this in the driver, but since we don't want to bother with run
// time detection of this module's installation we don't report this
// method as supported in DatabaseMetaData.
// difference is currently unsupported entirely.
// date time function names
public final static String CURDATE="curdate";
public final static String CURTIME="curtime";
public final static String DAYNAME="dayname";
public final static String DAYOFMONTH="dayofmonth";
public final static String DAYOFWEEK="dayofweek";
public final static String DAYOFYEAR="dayofyear";
public final static String HOUR="hour";
public final static String MINUTE="minute";
public final static String MONTH="month";
public final static String MONTHNAME="monthname";
public final static String NOW="now";
public final static String QUARTER="quarter";
public final static String SECOND="second";
public final static String WEEK="week";
public final static String YEAR="year";
// for timestampadd and timestampdiff the fractional part of second is not supported
// by the backend
// timestampdiff is very partially supported
public final static String TIMESTAMPADD="timestampadd";
public final static String TIMESTAMPDIFF="timestampdiff";
// constants for timestampadd and timestampdiff
public final static String SQL_TSI_ROOT="SQL_TSI_";
public final static String SQL_TSI_DAY="DAY";
public final static String SQL_TSI_FRAC_SECOND="FRAC_SECOND";
public final static String SQL_TSI_HOUR="HOUR";
public final static String SQL_TSI_MINUTE="MINUTE";
public final static String SQL_TSI_MONTH="MONTH";
public final static String SQL_TSI_QUARTER="QUARTER";
public final static String SQL_TSI_SECOND="SECOND";
public final static String SQL_TSI_WEEK="WEEK";
public final static String SQL_TSI_YEAR="YEAR";
// system functions
public final static String DATABASE="database";
public final static String IFNULL="ifnull";
public final static String USER="user";
/** storage for functions implementations */
private static Map functionMap = createFunctionMap();
private static Map createFunctionMap() {
Method[] arrayMeths = EscapedFunctions.class.getDeclaredMethods();
Map functionMap = new HashMap(arrayMeths.length*2);
for (int i=0;i<arrayMeths.length;i++){
Method meth = arrayMeths[i];
if (meth.getName().startsWith("sql"))
functionMap.put(meth.getName().toLowerCase(Locale.US),meth);
}
return functionMap;
}
/**
* get Method object implementing the given function
* @param functionName name of the searched function
* @return a Method object or null if not found
*/
public static Method getFunction(String functionName){
return (Method) functionMap.get("sql"+functionName.toLowerCase(Locale.US));
}
// ** numeric functions translations **
/** ceiling to ceil translation */
public static String sqlceiling(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("ceil(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","ceiling"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** log to ln translation */
public static String sqllog(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("ln(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","log"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** log10 to log translation */
public static String sqllog10(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("log(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","log10"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** power to pow translation */
public static String sqlpower(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("pow(");
if (parsedArgs.size()!=2){
throw new PSQLException(GT.tr("{0} function takes two and only two arguments.","power"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0)).append(',').append(parsedArgs.get(1));
return buf.append(')').toString();
}
/** truncate to trunc translation */
public static String sqltruncate(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("trunc(");
if (parsedArgs.size()!=2){
throw new PSQLException(GT.tr("{0} function takes two and only two arguments.","truncate"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0)).append(',').append(parsedArgs.get(1));
return buf.append(')').toString();
}
// ** string functions translations **
/** char to chr translation */
public static String sqlchar(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("chr(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","char"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** concat translation */
public static String sqlconcat(List parsedArgs){
StringBuilder buf = new StringBuilder();
buf.append('(');
for (int iArg = 0;iArg<parsedArgs.size();iArg++){
buf.append(parsedArgs.get(iArg));
if (iArg!=(parsedArgs.size()-1))
buf.append(" || ");
}
return buf.append(')').toString();
}
/** insert to overlay translation */
public static String sqlinsert(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("overlay(");
if (parsedArgs.size()!=4){
throw new PSQLException(GT.tr("{0} function takes four and only four argument.","insert"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0)).append(" placing ").append(parsedArgs.get(3));
buf.append(" from ").append(parsedArgs.get(1)).append(" for ").append(parsedArgs.get(2));
return buf.append(')').toString();
}
/** lcase to lower translation */
public static String sqllcase(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("lower(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","lcase"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** left to substring translation */
public static String sqlleft(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("substring(");
if (parsedArgs.size()!=2){
throw new PSQLException(GT.tr("{0} function takes two and only two arguments.","left"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0)).append(" for ").append(parsedArgs.get(1));
return buf.append(')').toString();
}
/** length translation */
public static String sqllength(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("length(trim(trailing from ");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","length"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append("))").toString();
}
/** locate translation */
public static String sqllocate(List parsedArgs) throws SQLException{
if (parsedArgs.size()==2){
return "position("+parsedArgs.get(0)+" in "+parsedArgs.get(1)+")";
}else if (parsedArgs.size()==3){
String tmp = "position("+parsedArgs.get(0)+" in substring("+parsedArgs.get(1)+" from "+parsedArgs.get(2)+"))";
return "("+parsedArgs.get(2)+"*sign("+tmp+")+"+tmp+")";
}else{
throw new PSQLException(GT.tr("{0} function takes two or three arguments.","locate"),
PSQLState.SYNTAX_ERROR);
}
}
/** ltrim translation */
public static String sqlltrim(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("trim(leading from ");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","ltrim"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** right to substring translation */
public static String sqlright(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("substring(");
if (parsedArgs.size()!=2){
throw new PSQLException(GT.tr("{0} function takes two and only two arguments.","right"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0)).append(" from (length(").append(parsedArgs.get(0)).append(")+1-").append(parsedArgs.get(1));
return buf.append("))").toString();
}
/** rtrim translation */
public static String sqlrtrim(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("trim(trailing from ");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","rtrim"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** space translation */
public static String sqlspace(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("repeat(' ',");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","space"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** substring to substr translation */
public static String sqlsubstring(List parsedArgs) throws SQLException{
if (parsedArgs.size()==2){
return "substr("+parsedArgs.get(0)+","+parsedArgs.get(1)+")";
}else if (parsedArgs.size()==3){
return "substr("+parsedArgs.get(0)+","+parsedArgs.get(1)+","+parsedArgs.get(2)+")";
}else{
throw new PSQLException(GT.tr("{0} function takes two or three arguments.","substring"),
PSQLState.SYNTAX_ERROR);
}
}
/** ucase to upper translation */
public static String sqlucase(List parsedArgs) throws SQLException{
StringBuilder buf = new StringBuilder();
buf.append("upper(");
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","ucase"),
PSQLState.SYNTAX_ERROR);
}
buf.append(parsedArgs.get(0));
return buf.append(')').toString();
}
/** curdate to current_date translation */
public static String sqlcurdate(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=0){
throw new PSQLException(GT.tr("{0} function doesn''t take any argument.","curdate"),
PSQLState.SYNTAX_ERROR);
}
return "current_date";
}
/** curtime to current_time translation */
public static String sqlcurtime(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=0){
throw new PSQLException(GT.tr("{0} function doesn''t take any argument.","curtime"),
PSQLState.SYNTAX_ERROR);
}
return "current_time";
}
/** dayname translation */
public static String sqldayname(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","dayname"),
PSQLState.SYNTAX_ERROR);
}
return "to_char("+parsedArgs.get(0)+",'Day')";
}
/** dayofmonth translation */
public static String sqldayofmonth(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","dayofmonth"),
PSQLState.SYNTAX_ERROR);
}
return "extract(day from "+parsedArgs.get(0)+")";
}
/** dayofweek translation
* adding 1 to postgresql function since we expect values from 1 to 7 */
public static String sqldayofweek(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","dayofweek"),
PSQLState.SYNTAX_ERROR);
}
return "extract(dow from "+parsedArgs.get(0)+")+1";
}
/** dayofyear translation */
public static String sqldayofyear(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","dayofyear"),
PSQLState.SYNTAX_ERROR);
}
return "extract(doy from "+parsedArgs.get(0)+")";
}
/** hour translation */
public static String sqlhour(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","hour"),
PSQLState.SYNTAX_ERROR);
}
return "extract(hour from "+parsedArgs.get(0)+")";
}
/** minute translation */
public static String sqlminute(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","minute"),
PSQLState.SYNTAX_ERROR);
}
return "extract(minute from "+parsedArgs.get(0)+")";
}
/** month translation */
public static String sqlmonth(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","month"),
PSQLState.SYNTAX_ERROR);
}
return "extract(month from "+parsedArgs.get(0)+")";
}
/** monthname translation */
public static String sqlmonthname(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","monthname"),
PSQLState.SYNTAX_ERROR);
}
return "to_char("+parsedArgs.get(0)+",'Month')";
}
/** quarter translation */
public static String sqlquarter(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","quarter"),
PSQLState.SYNTAX_ERROR);
}
return "extract(quarter from "+parsedArgs.get(0)+")";
}
/** second translation */
public static String sqlsecond(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","second"),
PSQLState.SYNTAX_ERROR);
}
return "extract(second from "+parsedArgs.get(0)+")";
}
/** week translation */
public static String sqlweek(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","week"),
PSQLState.SYNTAX_ERROR);
}
return "extract(week from "+parsedArgs.get(0)+")";
}
/** year translation */
public static String sqlyear(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=1){
throw new PSQLException(GT.tr("{0} function takes one and only one argument.","year"),
PSQLState.SYNTAX_ERROR);
}
return "extract(year from "+parsedArgs.get(0)+")";
}
/** time stamp add */
public static String sqltimestampadd(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=3){
throw new PSQLException(GT.tr("{0} function takes three and only three arguments.","timestampadd"),
PSQLState.SYNTAX_ERROR);
}
String interval = EscapedFunctions.constantToInterval(parsedArgs.get(0).toString(),parsedArgs.get(1).toString());
StringBuilder buf = new StringBuilder();
buf.append("(").append(interval).append("+");
buf.append(parsedArgs.get(2)).append(")");
return buf.toString();
}
private final static String constantToInterval(String type,String value)throws SQLException{
if (!type.startsWith(SQL_TSI_ROOT))
throw new PSQLException(GT.tr("Interval {0} not yet implemented",type),
PSQLState.SYNTAX_ERROR);
String shortType = type.substring(SQL_TSI_ROOT.length());
if (SQL_TSI_DAY.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' day' as interval)";
else if (SQL_TSI_SECOND.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' second' as interval)";
else if (SQL_TSI_HOUR.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' hour' as interval)";
else if (SQL_TSI_MINUTE.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' minute' as interval)";
else if (SQL_TSI_MONTH.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' month' as interval)";
else if (SQL_TSI_QUARTER.equalsIgnoreCase(shortType))
return "CAST((" + value + "::int * 3) || ' month' as interval)";
else if (SQL_TSI_WEEK.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' week' as interval)";
else if (SQL_TSI_YEAR.equalsIgnoreCase(shortType))
return "CAST(" + value + " || ' year' as interval)";
else if (SQL_TSI_FRAC_SECOND.equalsIgnoreCase(shortType))
throw new PSQLException(GT.tr("Interval {0} not yet implemented","SQL_TSI_FRAC_SECOND"),
PSQLState.SYNTAX_ERROR);
else throw new PSQLException(GT.tr("Interval {0} not yet implemented",type),
PSQLState.SYNTAX_ERROR);
}
/** time stamp diff */
public static String sqltimestampdiff(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=3){
throw new PSQLException(GT.tr("{0} function takes three and only three arguments.","timestampdiff"),
PSQLState.SYNTAX_ERROR);
}
String datePart = EscapedFunctions.constantToDatePart(parsedArgs.get(0).toString());
StringBuilder buf = new StringBuilder();
buf.append("extract( ").append(datePart)
.append(" from (").append(parsedArgs.get(2)).append("-").append(parsedArgs.get(1)).append("))");
return buf.toString();
}
private final static String constantToDatePart(String type)throws SQLException{
if (!type.startsWith(SQL_TSI_ROOT))
throw new PSQLException(GT.tr("Interval {0} not yet implemented",type),
PSQLState.SYNTAX_ERROR);
String shortType = type.substring(SQL_TSI_ROOT.length());
if (SQL_TSI_DAY.equalsIgnoreCase(shortType))
return "day";
else if (SQL_TSI_SECOND.equalsIgnoreCase(shortType))
return "second";
else if (SQL_TSI_HOUR.equalsIgnoreCase(shortType))
return "hour";
else if (SQL_TSI_MINUTE.equalsIgnoreCase(shortType))
return "minute";
// See http://archives.postgresql.org/pgsql-jdbc/2006-03/msg00096.php
/*else if (SQL_TSI_MONTH.equalsIgnoreCase(shortType))
return "month";
else if (SQL_TSI_QUARTER.equalsIgnoreCase(shortType))
return "quarter";
else if (SQL_TSI_WEEK.equalsIgnoreCase(shortType))
return "week";
else if (SQL_TSI_YEAR.equalsIgnoreCase(shortType))
return "year";*/
else if (SQL_TSI_FRAC_SECOND.equalsIgnoreCase(shortType))
throw new PSQLException(GT.tr("Interval {0} not yet implemented","SQL_TSI_FRAC_SECOND"),
PSQLState.SYNTAX_ERROR);
else throw new PSQLException(GT.tr("Interval {0} not yet implemented",type),
PSQLState.SYNTAX_ERROR);
}
/** database translation */
public static String sqldatabase(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=0){
throw new PSQLException(GT.tr("{0} function doesn''t take any argument.","database"),
PSQLState.SYNTAX_ERROR);
}
return "current_database()";
}
/** ifnull translation */
public static String sqlifnull(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=2){
throw new PSQLException(GT.tr("{0} function takes two and only two arguments.","ifnull"),
PSQLState.SYNTAX_ERROR);
}
return "coalesce("+parsedArgs.get(0)+","+parsedArgs.get(1)+")";
}
/** user translation */
public static String sqluser(List parsedArgs) throws SQLException{
if (parsedArgs.size()!=0){
throw new PSQLException(GT.tr("{0} function doesn''t take any argument.","user"),
PSQLState.SYNTAX_ERROR);
}
return "user";
}
}
| ekoontz/pgjdbc | org/postgresql/jdbc2/EscapedFunctions.java | Java | bsd-3-clause | 25,872 |
(function(config, models, views, routers, utils, templates) {
// This is the top-level piece of UI.
views.Application = Backbone.View.extend({
// Events
// ------
events: {
'click .toggle-view': 'toggleView'
},
toggleView: function (e) {
e.preventDefault();
e.stopPropagation();
var link = $(e.currentTarget),
route = link.attr('href').replace(/^\//, '');
$('.toggle-view.active').removeClass('active');
link.addClass('active');
router.navigate(route, true);
},
// Initialize
// ----------
initialize: function () {
_.bindAll(this);
var that = this;
this.header = new views.Header({model: this.model});
// No longer needed
// $(window).on('scroll', function() {
// if ($(window).scrollTop()>60) {
// $('#post').addClass('sticky-menu');
// } else {
// $('#post').removeClass('sticky-menu');
// }
// });
function calculateLayout() {
if (that.mainView && that.mainView.refreshCodeMirror) {
that.mainView.refreshCodeMirror();
}
}
var lazyLayout = _.debounce(calculateLayout, 300);
$(window).resize(lazyLayout);
},
// Should be rendered just once
render: function () {
$(this.header.render().el).prependTo(this.el);
return this;
},
// Helpers
// -------
replaceMainView: function (name, view) {
$('body').removeClass().addClass('current-view '+name);
// Make sure the header gets shown
if (name !== "start") $('#header').show();
if (this.mainView) {
this.mainView.remove();
} else {
$('#main').empty();
}
this.mainView = view;
$(view.el).appendTo(this.$('#main'));
},
// Main Views
// ----------
static: function() {
this.header.render();
// No-op ;-)
},
posts: function (user, repo, branch, path) {
this.loading('Loading posts ...');
loadPosts(user, repo, branch, path, _.bind(function (err, data) {
this.loaded();
if (err) return this.notify('error', 'The requested resource could not be found.');
this.header.render();
this.replaceMainView("posts", new views.Posts({ model: data, id: 'posts' }).render());
}, this));
},
post: function (user, repo, branch, path, file, mode) {
this.loading('Loading post ...');
loadPosts(user, repo, branch, path, _.bind(function (err, data) {
if (err) return this.notify('error', 'The requested resource could not be found.');
loadPost(user, repo, branch, path, file, _.bind(function (err, data) {
this.loaded();
this.header.render();
if (err) return this.notify('error', 'The requested resource could not be found.');
data.preview = !(mode === "edit") || !window.authenticated;
data.lang = _.mode(file);
this.replaceMainView(window.authenticated ? "post" : "read-post", new views.Post({ model: data, id: 'post' }).render());
var that = this;
}, this));
this.header.render();
}, this));
},
newPost: function (user, repo, branch, path) {
this.loading('Creating file ...');
loadPosts(user, repo, branch, path, _.bind(function (err, data) {
emptyPost(user, repo, branch, path, _.bind(function(err, data) {
this.loaded();
data.jekyll = _.jekyll(path, data.file);
data.preview = false;
data.markdown = _.markdown(data.file);
this.replaceMainView("post", new views.Post({ model: data, id: 'post' }).render());
this.mainView._makeDirty();
app.state.file = data.file;
this.header.render();
}, this));
}, this));
},
profile: function(username) {
var that = this;
app.state.title = username;
this.loading('Loading profile ...');
loadRepos(username, function(err, data) {
that.header.render();
that.loaded();
data.authenticated = !!window.authenticated;
that.replaceMainView("start", new views.Profile({id: "start", model: data}).render());
});
},
start: function(username) {
var that = this;
app.state.title = "";
this.header.render();
this.replaceMainView("start", new views.Start({
id: "start",
model: _.extend(this.model, { authenticated: !!window.authenticated} )
}).render());
},
notify: function(type, message) {
this.header.render();
this.replaceMainView("notification", new views.Notification(type, message).render());
},
loading: function(msg) {
$('#main').html('<div class="loading"><span>'+ msg || 'Loading ...' +'</span></div>');
},
loaded: function() {
$('#main .loading').remove();
}
});
}).apply(this, window.args);
| dilbapat/dilbapat.github.com | _includes/views/application.js | JavaScript | bsd-3-clause | 4,667 |
<?php
namespace app\modules\aux_planejamento\models\planilhas;
use Yii;
/**
* This is the model class for table "planilhaconsumo_planico".
*
* @property integer $id
* @property string $planilhadecurso_cod
* @property integer $planodeacao_cod
* @property integer $materialconsumo_cod
* @property integer $planico_codMXM
* @property string $planico_descricao
* @property integer $planico_quantidade
* @property double $planico_valor
* @property string $planico_tipo
*
* @property PlanilhadecursoPlacu $planilhadecursoCod
*/
class PlanilhaConsumo extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return 'planilhaconsumo_planico';
}
/**
* @return \yii\db\Connection the database connection used by this AR class.
*/
public static function getDb()
{
return Yii::$app->get('db_apl');
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['planilhadecurso_cod', 'planodeacao_cod', 'materialconsumo_cod', 'planico_codMXM', 'planico_quantidade'], 'integer'],
[['planico_valor'], 'number'],
[['planico_descricao'], 'string', 'max' => 100],
[['planico_tipo'], 'string', 'max' => 45],
[['planilhadecurso_cod'], 'exist', 'skipOnError' => true, 'targetClass' => Planilhadecurso::className(), 'targetAttribute' => ['planilhadecurso_cod' => 'placu_codplanilha']],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => 'ID',
'planilhadecurso_cod' => 'Cod. Planilha',
'planodeacao_cod' => 'Cod. Plano',
'materialconsumo_cod' => 'Cód. Material Consumo',
'planico_codMXM' => 'Cód. MXM',
'planico_descricao' => 'Descrição',
'planico_quantidade' => 'Qnt',
'planico_valor' => 'Valor',
'planico_tipo' => 'Tipo',
];
}
/**
* @return \yii\db\ActiveQuery
*/
public function getPlanilhadecursoCod()
{
return $this->hasOne(PlanilhadecursoPlacu::className(), ['placu_codplanilha' => 'planilhadecurso_cod']);
}
}
| FernandoMauricio/portal-senac | modules/aux_planejamento/models/planilhas/PlanilhaConsumo.php | PHP | bsd-3-clause | 2,231 |
# -*- coding: utf-8 -*-
#
# __init__.py
# cjktools
#
"""
This package contains various tools for Japanese NLP tasks, although some
may be applicable to any python project. See documentation of each module for
details.
"""
__all__ = [
'alternations',
'common',
'enum',
'exceptions',
'kana_table',
'maps',
'scripts',
'smart_cache',
'resources',
]
| larsyencken/cjktools | cjktools/__init__.py | Python | bsd-3-clause | 385 |
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Linq;
using Projac.Sql;
namespace Projac.SqlClient.Legacy
{
public static partial class TSql
{
/// <summary>
/// Returns a T-SQL non query stored procedure.
/// </summary>
/// <param name="text">The text with named parameters.</param>
/// <param name="parameters">The named parameters.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static SqlNonQueryCommand NonQueryProcedure(string text, object parameters = null)
{
return new SqlNonQueryCommand(text, CollectFromAnonymousType(parameters), CommandType.StoredProcedure);
}
/// <summary>
/// Returns a T-SQL non query stored procedure if the condition is satisfied.
/// </summary>
/// <param name="condition">The condition to satisfy</param>
/// <param name="text">The text with named parameters.</param>
/// <param name="parameters">The named parameters.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static IEnumerable<SqlNonQueryCommand> NonQueryProcedureIf(bool condition, string text, object parameters = null)
{
if (condition)
yield return NonQueryProcedure(text, parameters);
}
/// <summary>
/// Returns a T-SQL non query stored procedure unless the condition is satisfied.
/// </summary>
/// <param name="condition">The condition to satisfy</param>
/// <param name="text">The text with named parameters.</param>
/// <param name="parameters">The named parameters.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static IEnumerable<SqlNonQueryCommand> NonQueryProcedureUnless(bool condition, string text,
object parameters = null)
{
if (!condition)
yield return NonQueryProcedure(text, parameters);
}
/// <summary>
/// Returns a T-SQL non query stored procedure.
/// </summary>
/// <param name="format">The text with positional parameters to be formatted.</param>
/// <param name="parameters">The positional parameter values.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static SqlNonQueryCommand NonQueryProcedureFormat(string format, params IDbParameterValue[] parameters)
{
if (parameters == null || parameters.Length == 0)
{
return new SqlNonQueryCommand(format, new DbParameter[0], CommandType.Text);
}
ThrowIfMaxParameterCountExceeded(parameters);
return new SqlNonQueryCommand(
string.Format(format,
parameters.Select((_, index) => (object)FormatDbParameterName("P" + index)).ToArray()),
parameters.Select((value, index) => value.ToDbParameter(FormatDbParameterName("P" + index))).ToArray(),
CommandType.StoredProcedure);
}
/// <summary>
/// Returns a T-SQL non query stored procedure if the condition is satisfied.
/// </summary>
/// <param name="condition">The condition to satisfy.</param>
/// <param name="format">The text with positional parameters to be formatted.</param>
/// <param name="parameters">The positional parameter values.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static IEnumerable<SqlNonQueryCommand> NonQueryProcedureFormatIf(bool condition, string format,
params IDbParameterValue[] parameters)
{
if (condition)
yield return NonQueryProcedureFormat(format, parameters);
}
/// <summary>
/// Returns a T-SQL non query stored procedure unless the condition is satisfied.
/// </summary>
/// <param name="condition">The condition to satisfy.</param>
/// <param name="format">The text with positional parameters to be formatted.</param>
/// <param name="parameters">The positional parameter values.</param>
/// <returns>A <see cref="SqlNonQueryCommand" />.</returns>
public static IEnumerable<SqlNonQueryCommand> NonQueryProcedureFormatUnless(bool condition, string format,
params IDbParameterValue[] parameters)
{
if (!condition)
yield return NonQueryProcedureFormat(format, parameters);
}
}
} | yreynhout/Projac | src/Projac.SqlClient/Legacy/TSql.NonQueryProcedure.cs | C# | bsd-3-clause | 4,626 |
import errno
import os
import types
import typing as t
from werkzeug.utils import import_string
class ConfigAttribute:
"""Makes an attribute forward to the config"""
def __init__(self, name: str, get_converter: t.Optional[t.Callable] = None) -> None:
self.__name__ = name
self.get_converter = get_converter
def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any:
if obj is None:
return self
rv = obj.config[self.__name__]
if self.get_converter is not None:
rv = self.get_converter(rv)
return rv
def __set__(self, obj: t.Any, value: t.Any) -> None:
obj.config[self.__name__] = value
class Config(dict):
"""Works exactly like a dict but provides ways to fill it from files
or special dictionaries. There are two common patterns to populate the
config.
Either you can fill the config from a config file::
app.config.from_pyfile('yourconfig.cfg')
Or alternatively you can define the configuration options in the
module that calls :meth:`from_object` or provide an import path to
a module that should be loaded. It is also possible to tell it to
use the same module and with that provide the configuration values
just before the call::
DEBUG = True
SECRET_KEY = 'development key'
app.config.from_object(__name__)
In both cases (loading from any Python file or loading from modules),
only uppercase keys are added to the config. This makes it possible to use
lowercase values in the config file for temporary values that are not added
to the config or to define the config keys in the same file that implements
the application.
Probably the most interesting way to load configurations is from an
environment variable pointing to a file::
app.config.from_envvar('YOURAPPLICATION_SETTINGS')
In this case before launching the application you have to set this
environment variable to the file you want to use. On Linux and OS X
use the export statement::
export YOURAPPLICATION_SETTINGS='/path/to/config/file'
On windows use `set` instead.
:param root_path: path to which files are read relative from. When the
config object is created by the application, this is
the application's :attr:`~flask.Flask.root_path`.
:param defaults: an optional dictionary of default values
"""
def __init__(self, root_path: str, defaults: t.Optional[dict] = None) -> None:
dict.__init__(self, defaults or {})
self.root_path = root_path
def from_envvar(self, variable_name: str, silent: bool = False) -> bool:
"""Loads a configuration from an environment variable pointing to
a configuration file. This is basically just a shortcut with nicer
error messages for this line of code::
app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS'])
:param variable_name: name of the environment variable
:param silent: set to ``True`` if you want silent failure for missing
files.
:return: bool. ``True`` if able to load config, ``False`` otherwise.
"""
rv = os.environ.get(variable_name)
if not rv:
if silent:
return False
raise RuntimeError(
f"The environment variable {variable_name!r} is not set"
" and as such configuration could not be loaded. Set"
" this variable and make it point to a configuration"
" file"
)
return self.from_pyfile(rv, silent=silent)
def from_pyfile(self, filename: str, silent: bool = False) -> bool:
"""Updates the values in the config from a Python file. This function
behaves as if the file was imported as module with the
:meth:`from_object` function.
:param filename: the filename of the config. This can either be an
absolute filename or a filename relative to the
root path.
:param silent: set to ``True`` if you want silent failure for missing
files.
.. versionadded:: 0.7
`silent` parameter.
"""
filename = os.path.join(self.root_path, filename)
d = types.ModuleType("config")
d.__file__ = filename
try:
with open(filename, mode="rb") as config_file:
exec(compile(config_file.read(), filename, "exec"), d.__dict__)
except OSError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR):
return False
e.strerror = f"Unable to load configuration file ({e.strerror})"
raise
self.from_object(d)
return True
def from_object(self, obj: t.Union[object, str]) -> None:
"""Updates the values from the given object. An object can be of one
of the following two types:
- a string: in this case the object with that name will be imported
- an actual object reference: that object is used directly
Objects are usually either modules or classes. :meth:`from_object`
loads only the uppercase attributes of the module/class. A ``dict``
object will not work with :meth:`from_object` because the keys of a
``dict`` are not attributes of the ``dict`` class.
Example of module-based configuration::
app.config.from_object('yourapplication.default_config')
from yourapplication import default_config
app.config.from_object(default_config)
Nothing is done to the object before loading. If the object is a
class and has ``@property`` attributes, it needs to be
instantiated before being passed to this method.
You should not use this function to load the actual configuration but
rather configuration defaults. The actual config should be loaded
with :meth:`from_pyfile` and ideally from a location not within the
package because the package might be installed system wide.
See :ref:`config-dev-prod` for an example of class-based configuration
using :meth:`from_object`.
:param obj: an import name or object
"""
if isinstance(obj, str):
obj = import_string(obj)
for key in dir(obj):
if key.isupper():
self[key] = getattr(obj, key)
def from_file(
self,
filename: str,
load: t.Callable[[t.IO[t.Any]], t.Mapping],
silent: bool = False,
) -> bool:
"""Update the values in the config from a file that is loaded
using the ``load`` parameter. The loaded data is passed to the
:meth:`from_mapping` method.
.. code-block:: python
import toml
app.config.from_file("config.toml", load=toml.load)
:param filename: The path to the data file. This can be an
absolute path or relative to the config root path.
:param load: A callable that takes a file handle and returns a
mapping of loaded data from the file.
:type load: ``Callable[[Reader], Mapping]`` where ``Reader``
implements a ``read`` method.
:param silent: Ignore the file if it doesn't exist.
.. versionadded:: 2.0
"""
filename = os.path.join(self.root_path, filename)
try:
with open(filename) as f:
obj = load(f)
except OSError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = f"Unable to load configuration file ({e.strerror})"
raise
return self.from_mapping(obj)
def from_json(self, filename: str, silent: bool = False) -> bool:
"""Update the values in the config from a JSON file. The loaded
data is passed to the :meth:`from_mapping` method.
:param filename: The path to the JSON file. This can be an
absolute path or relative to the config root path.
:param silent: Ignore the file if it doesn't exist.
.. deprecated:: 2.0.0
Will be removed in Flask 2.1. Use :meth:`from_file` instead.
This was removed early in 2.0.0, was added back in 2.0.1.
.. versionadded:: 0.11
"""
import warnings
from . import json
warnings.warn(
"'from_json' is deprecated and will be removed in Flask"
" 2.1. Use 'from_file(path, json.load)' instead.",
DeprecationWarning,
stacklevel=2,
)
return self.from_file(filename, json.load, silent=silent)
def from_mapping(
self, mapping: t.Optional[t.Mapping[str, t.Any]] = None, **kwargs: t.Any
) -> bool:
"""Updates the config like :meth:`update` ignoring items with non-upper
keys.
.. versionadded:: 0.11
"""
mappings: t.Dict[str, t.Any] = {}
if mapping is not None:
mappings.update(mapping)
mappings.update(kwargs)
for key, value in mappings.items():
if key.isupper():
self[key] = value
return True
def get_namespace(
self, namespace: str, lowercase: bool = True, trim_namespace: bool = True
) -> t.Dict[str, t.Any]:
"""Returns a dictionary containing a subset of configuration options
that match the specified namespace/prefix. Example usage::
app.config['IMAGE_STORE_TYPE'] = 'fs'
app.config['IMAGE_STORE_PATH'] = '/var/app/images'
app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com'
image_store_config = app.config.get_namespace('IMAGE_STORE_')
The resulting dictionary `image_store_config` would look like::
{
'type': 'fs',
'path': '/var/app/images',
'base_url': 'http://img.website.com'
}
This is often useful when configuration options map directly to
keyword arguments in functions or class constructors.
:param namespace: a configuration namespace
:param lowercase: a flag indicating if the keys of the resulting
dictionary should be lowercase
:param trim_namespace: a flag indicating if the keys of the resulting
dictionary should not include the namespace
.. versionadded:: 0.11
"""
rv = {}
for k, v in self.items():
if not k.startswith(namespace):
continue
if trim_namespace:
key = k[len(namespace) :]
else:
key = k
if lowercase:
key = key.lower()
rv[key] = v
return rv
def __repr__(self) -> str:
return f"<{type(self).__name__} {dict.__repr__(self)}>"
| mitsuhiko/flask | src/flask/config.py | Python | bsd-3-clause | 11,068 |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// HTTP server. See RFC 2616.
package http
import (
"bufio"
"bytes"
"context"
"crypto/tls"
"errors"
"fmt"
"io"
"io/ioutil"
"log"
"net"
"net/textproto"
"net/url"
"os"
"path"
"runtime"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"golang_org/x/net/lex/httplex"
)
// Errors used by the HTTP server.
var (
// ErrBodyNotAllowed is returned by ResponseWriter.Write calls
// when the HTTP method or response code does not permit a
// body.
ErrBodyNotAllowed = errors.New("http: request method or response status code does not allow body")
// ErrHijacked is returned by ResponseWriter.Write calls when
// the underlying connection has been hijacked using the
// Hijacker interface. A zero-byte write on a hijacked
// connection will return ErrHijacked without any other side
// effects.
ErrHijacked = errors.New("http: connection has been hijacked")
// ErrContentLength is returned by ResponseWriter.Write calls
// when a Handler set a Content-Length response header with a
// declared size and then attempted to write more bytes than
// declared.
ErrContentLength = errors.New("http: wrote more than the declared Content-Length")
// Deprecated: ErrWriteAfterFlush is no longer used.
ErrWriteAfterFlush = errors.New("unused")
)
// A Handler responds to an HTTP request.
//
// ServeHTTP should write reply headers and data to the ResponseWriter
// and then return. Returning signals that the request is finished; it
// is not valid to use the ResponseWriter or read from the
// Request.Body after or concurrently with the completion of the
// ServeHTTP call.
//
// Depending on the HTTP client software, HTTP protocol version, and
// any intermediaries between the client and the Go server, it may not
// be possible to read from the Request.Body after writing to the
// ResponseWriter. Cautious handlers should read the Request.Body
// first, and then reply.
//
// Except for reading the body, handlers should not modify the
// provided Request.
//
// If ServeHTTP panics, the server (the caller of ServeHTTP) assumes
// that the effect of the panic was isolated to the active request.
// It recovers the panic, logs a stack trace to the server error log,
// and hangs up the connection. To abort a handler so the client sees
// an interrupted response but the server doesn't log an error, panic
// with the value ErrAbortHandler.
type Handler interface {
ServeHTTP(ResponseWriter, *Request)
}
// A ResponseWriter interface is used by an HTTP handler to
// construct an HTTP response.
//
// A ResponseWriter may not be used after the Handler.ServeHTTP method
// has returned.
type ResponseWriter interface {
// Header returns the header map that will be sent by
// WriteHeader. The Header map also is the mechanism with which
// Handlers can set HTTP trailers.
//
// Changing the header map after a call to WriteHeader (or
// Write) has no effect unless the modified headers are
// trailers.
//
// There are two ways to set Trailers. The preferred way is to
// predeclare in the headers which trailers you will later
// send by setting the "Trailer" header to the names of the
// trailer keys which will come later. In this case, those
// keys of the Header map are treated as if they were
// trailers. See the example. The second way, for trailer
// keys not known to the Handler until after the first Write,
// is to prefix the Header map keys with the TrailerPrefix
// constant value. See TrailerPrefix.
//
// To suppress implicit response headers (such as "Date"), set
// their value to nil.
Header() Header
// Write writes the data to the connection as part of an HTTP reply.
//
// If WriteHeader has not yet been called, Write calls
// WriteHeader(http.StatusOK) before writing the data. If the Header
// does not contain a Content-Type line, Write adds a Content-Type set
// to the result of passing the initial 512 bytes of written data to
// DetectContentType.
//
// Depending on the HTTP protocol version and the client, calling
// Write or WriteHeader may prevent future reads on the
// Request.Body. For HTTP/1.x requests, handlers should read any
// needed request body data before writing the response. Once the
// headers have been flushed (due to either an explicit Flusher.Flush
// call or writing enough data to trigger a flush), the request body
// may be unavailable. For HTTP/2 requests, the Go HTTP server permits
// handlers to continue to read the request body while concurrently
// writing the response. However, such behavior may not be supported
// by all HTTP/2 clients. Handlers should read before writing if
// possible to maximize compatibility.
Write([]byte) (int, error)
// WriteHeader sends an HTTP response header with status code.
// If WriteHeader is not called explicitly, the first call to Write
// will trigger an implicit WriteHeader(http.StatusOK).
// Thus explicit calls to WriteHeader are mainly used to
// send error codes.
WriteHeader(int)
}
// The Flusher interface is implemented by ResponseWriters that allow
// an HTTP handler to flush buffered data to the client.
//
// The default HTTP/1.x and HTTP/2 ResponseWriter implementations
// support Flusher, but ResponseWriter wrappers may not. Handlers
// should always test for this ability at runtime.
//
// Note that even for ResponseWriters that support Flush,
// if the client is connected through an HTTP proxy,
// the buffered data may not reach the client until the response
// completes.
type Flusher interface {
// Flush sends any buffered data to the client.
Flush()
}
// The Hijacker interface is implemented by ResponseWriters that allow
// an HTTP handler to take over the connection.
//
// The default ResponseWriter for HTTP/1.x connections supports
// Hijacker, but HTTP/2 connections intentionally do not.
// ResponseWriter wrappers may also not support Hijacker. Handlers
// should always test for this ability at runtime.
type Hijacker interface {
// Hijack lets the caller take over the connection.
// After a call to Hijack the HTTP server library
// will not do anything else with the connection.
//
// It becomes the caller's responsibility to manage
// and close the connection.
//
// The returned net.Conn may have read or write deadlines
// already set, depending on the configuration of the
// Server. It is the caller's responsibility to set
// or clear those deadlines as needed.
//
// The returned bufio.Reader may contain unprocessed buffered
// data from the client.
Hijack() (net.Conn, *bufio.ReadWriter, error)
}
// The CloseNotifier interface is implemented by ResponseWriters which
// allow detecting when the underlying connection has gone away.
//
// This mechanism can be used to cancel long operations on the server
// if the client has disconnected before the response is ready.
type CloseNotifier interface {
// CloseNotify returns a channel that receives at most a
// single value (true) when the client connection has gone
// away.
//
// CloseNotify may wait to notify until Request.Body has been
// fully read.
//
// After the Handler has returned, there is no guarantee
// that the channel receives a value.
//
// If the protocol is HTTP/1.1 and CloseNotify is called while
// processing an idempotent request (such a GET) while
// HTTP/1.1 pipelining is in use, the arrival of a subsequent
// pipelined request may cause a value to be sent on the
// returned channel. In practice HTTP/1.1 pipelining is not
// enabled in browsers and not seen often in the wild. If this
// is a problem, use HTTP/2 or only use CloseNotify on methods
// such as POST.
CloseNotify() <-chan bool
}
var (
// ServerContextKey is a context key. It can be used in HTTP
// handlers with context.WithValue to access the server that
// started the handler. The associated value will be of
// type *Server.
ServerContextKey = &contextKey{"http-server"}
// LocalAddrContextKey is a context key. It can be used in
// HTTP handlers with context.WithValue to access the address
// the local address the connection arrived on.
// The associated value will be of type net.Addr.
LocalAddrContextKey = &contextKey{"local-addr"}
)
// A conn represents the server side of an HTTP connection.
type conn struct {
// server is the server on which the connection arrived.
// Immutable; never nil.
server *Server
// cancelCtx cancels the connection-level context.
cancelCtx context.CancelFunc
// rwc is the underlying network connection.
// This is never wrapped by other types and is the value given out
// to CloseNotifier callers. It is usually of type *net.TCPConn or
// *tls.Conn.
rwc net.Conn
// remoteAddr is rwc.RemoteAddr().String(). It is not populated synchronously
// inside the Listener's Accept goroutine, as some implementations block.
// It is populated immediately inside the (*conn).serve goroutine.
// This is the value of a Handler's (*Request).RemoteAddr.
remoteAddr string
// tlsState is the TLS connection state when using TLS.
// nil means not TLS.
tlsState *tls.ConnectionState
// werr is set to the first write error to rwc.
// It is set via checkConnErrorWriter{w}, where bufw writes.
werr error
// r is bufr's read source. It's a wrapper around rwc that provides
// io.LimitedReader-style limiting (while reading request headers)
// and functionality to support CloseNotifier. See *connReader docs.
r *connReader
// bufr reads from r.
bufr *bufio.Reader
// bufw writes to checkConnErrorWriter{c}, which populates werr on error.
bufw *bufio.Writer
// lastMethod is the method of the most recent request
// on this connection, if any.
lastMethod string
curReq atomic.Value // of *response (which has a Request in it)
curState atomic.Value // of ConnState
// mu guards hijackedv
mu sync.Mutex
// hijackedv is whether this connection has been hijacked
// by a Handler with the Hijacker interface.
// It is guarded by mu.
hijackedv bool
}
func (c *conn) hijacked() bool {
c.mu.Lock()
defer c.mu.Unlock()
return c.hijackedv
}
// c.mu must be held.
func (c *conn) hijackLocked() (rwc net.Conn, buf *bufio.ReadWriter, err error) {
if c.hijackedv {
return nil, nil, ErrHijacked
}
c.r.abortPendingRead()
c.hijackedv = true
rwc = c.rwc
rwc.SetDeadline(time.Time{})
buf = bufio.NewReadWriter(c.bufr, bufio.NewWriter(rwc))
if c.r.hasByte {
if _, err := c.bufr.Peek(c.bufr.Buffered() + 1); err != nil {
return nil, nil, fmt.Errorf("unexpected Peek failure reading buffered byte: %v", err)
}
}
c.setState(rwc, StateHijacked)
return
}
// This should be >= 512 bytes for DetectContentType,
// but otherwise it's somewhat arbitrary.
const bufferBeforeChunkingSize = 2048
// chunkWriter writes to a response's conn buffer, and is the writer
// wrapped by the response.bufw buffered writer.
//
// chunkWriter also is responsible for finalizing the Header, including
// conditionally setting the Content-Type and setting a Content-Length
// in cases where the handler's final output is smaller than the buffer
// size. It also conditionally adds chunk headers, when in chunking mode.
//
// See the comment above (*response).Write for the entire write flow.
type chunkWriter struct {
res *response
// header is either nil or a deep clone of res.handlerHeader
// at the time of res.WriteHeader, if res.WriteHeader is
// called and extra buffering is being done to calculate
// Content-Type and/or Content-Length.
header Header
// wroteHeader tells whether the header's been written to "the
// wire" (or rather: w.conn.buf). this is unlike
// (*response).wroteHeader, which tells only whether it was
// logically written.
wroteHeader bool
// set by the writeHeader method:
chunking bool // using chunked transfer encoding for reply body
}
var (
crlf = []byte("\r\n")
colonSpace = []byte(": ")
)
func (cw *chunkWriter) Write(p []byte) (n int, err error) {
if !cw.wroteHeader {
cw.writeHeader(p)
}
if cw.res.req.Method == "HEAD" {
// Eat writes.
return len(p), nil
}
if cw.chunking {
_, err = fmt.Fprintf(cw.res.conn.bufw, "%x\r\n", len(p))
if err != nil {
cw.res.conn.rwc.Close()
return
}
}
n, err = cw.res.conn.bufw.Write(p)
if cw.chunking && err == nil {
_, err = cw.res.conn.bufw.Write(crlf)
}
if err != nil {
cw.res.conn.rwc.Close()
}
return
}
func (cw *chunkWriter) flush() {
if !cw.wroteHeader {
cw.writeHeader(nil)
}
cw.res.conn.bufw.Flush()
}
func (cw *chunkWriter) close() {
if !cw.wroteHeader {
cw.writeHeader(nil)
}
if cw.chunking {
bw := cw.res.conn.bufw // conn's bufio writer
// zero chunk to mark EOF
bw.WriteString("0\r\n")
if trailers := cw.res.finalTrailers(); trailers != nil {
trailers.Write(bw) // the writer handles noting errors
}
// final blank line after the trailers (whether
// present or not)
bw.WriteString("\r\n")
}
}
// A response represents the server side of an HTTP response.
type response struct {
conn *conn
req *Request // request for this response
reqBody io.ReadCloser
cancelCtx context.CancelFunc // when ServeHTTP exits
wroteHeader bool // reply header has been (logically) written
wroteContinue bool // 100 Continue response was written
wants10KeepAlive bool // HTTP/1.0 w/ Connection "keep-alive"
wantsClose bool // HTTP request has Connection "close"
w *bufio.Writer // buffers output in chunks to chunkWriter
cw chunkWriter
// handlerHeader is the Header that Handlers get access to,
// which may be retained and mutated even after WriteHeader.
// handlerHeader is copied into cw.header at WriteHeader
// time, and privately mutated thereafter.
handlerHeader Header
calledHeader bool // handler accessed handlerHeader via Header
written int64 // number of bytes written in body
contentLength int64 // explicitly-declared Content-Length; or -1
status int // status code passed to WriteHeader
// close connection after this reply. set on request and
// updated after response from handler if there's a
// "Connection: keep-alive" response header and a
// Content-Length.
closeAfterReply bool
// requestBodyLimitHit is set by requestTooLarge when
// maxBytesReader hits its max size. It is checked in
// WriteHeader, to make sure we don't consume the
// remaining request body to try to advance to the next HTTP
// request. Instead, when this is set, we stop reading
// subsequent requests on this connection and stop reading
// input from it.
requestBodyLimitHit bool
// trailers are the headers to be sent after the handler
// finishes writing the body. This field is initialized from
// the Trailer response header when the response header is
// written.
trailers []string
handlerDone atomicBool // set true when the handler exits
// Buffers for Date and Content-Length
dateBuf [len(TimeFormat)]byte
clenBuf [10]byte
// closeNotifyCh is the channel returned by CloseNotify.
// TODO(bradfitz): this is currently (for Go 1.8) always
// non-nil. Make this lazily-created again as it used to be?
closeNotifyCh chan bool
didCloseNotify int32 // atomic (only 0->1 winner should send)
}
// TrailerPrefix is a magic prefix for ResponseWriter.Header map keys
// that, if present, signals that the map entry is actually for
// the response trailers, and not the response headers. The prefix
// is stripped after the ServeHTTP call finishes and the values are
// sent in the trailers.
//
// This mechanism is intended only for trailers that are not known
// prior to the headers being written. If the set of trailers is fixed
// or known before the header is written, the normal Go trailers mechanism
// is preferred:
// https://golang.org/pkg/net/http/#ResponseWriter
// https://golang.org/pkg/net/http/#example_ResponseWriter_trailers
const TrailerPrefix = "Trailer:"
// finalTrailers is called after the Handler exits and returns a non-nil
// value if the Handler set any trailers.
func (w *response) finalTrailers() Header {
var t Header
for k, vv := range w.handlerHeader {
if strings.HasPrefix(k, TrailerPrefix) {
if t == nil {
t = make(Header)
}
t[strings.TrimPrefix(k, TrailerPrefix)] = vv
}
}
for _, k := range w.trailers {
if t == nil {
t = make(Header)
}
for _, v := range w.handlerHeader[k] {
t.Add(k, v)
}
}
return t
}
type atomicBool int32
func (b *atomicBool) isSet() bool { return atomic.LoadInt32((*int32)(b)) != 0 }
func (b *atomicBool) setTrue() { atomic.StoreInt32((*int32)(b), 1) }
// declareTrailer is called for each Trailer header when the
// response header is written. It notes that a header will need to be
// written in the trailers at the end of the response.
func (w *response) declareTrailer(k string) {
k = CanonicalHeaderKey(k)
switch k {
case "Transfer-Encoding", "Content-Length", "Trailer":
// Forbidden by RFC 2616 14.40.
return
}
w.trailers = append(w.trailers, k)
}
// requestTooLarge is called by maxBytesReader when too much input has
// been read from the client.
func (w *response) requestTooLarge() {
w.closeAfterReply = true
w.requestBodyLimitHit = true
if !w.wroteHeader {
w.Header().Set("Connection", "close")
}
}
// needsSniff reports whether a Content-Type still needs to be sniffed.
func (w *response) needsSniff() bool {
_, haveType := w.handlerHeader["Content-Type"]
return !w.cw.wroteHeader && !haveType && w.written < sniffLen
}
// writerOnly hides an io.Writer value's optional ReadFrom method
// from io.Copy.
type writerOnly struct {
io.Writer
}
func srcIsRegularFile(src io.Reader) (isRegular bool, err error) {
switch v := src.(type) {
case *os.File:
fi, err := v.Stat()
if err != nil {
return false, err
}
return fi.Mode().IsRegular(), nil
case *io.LimitedReader:
return srcIsRegularFile(v.R)
default:
return
}
}
// ReadFrom is here to optimize copying from an *os.File regular file
// to a *net.TCPConn with sendfile.
func (w *response) ReadFrom(src io.Reader) (n int64, err error) {
// Our underlying w.conn.rwc is usually a *TCPConn (with its
// own ReadFrom method). If not, or if our src isn't a regular
// file, just fall back to the normal copy method.
rf, ok := w.conn.rwc.(io.ReaderFrom)
regFile, err := srcIsRegularFile(src)
if err != nil {
return 0, err
}
if !ok || !regFile {
bufp := copyBufPool.Get().(*[]byte)
defer copyBufPool.Put(bufp)
return io.CopyBuffer(writerOnly{w}, src, *bufp)
}
// sendfile path:
if !w.wroteHeader {
w.WriteHeader(StatusOK)
}
if w.needsSniff() {
n0, err := io.Copy(writerOnly{w}, io.LimitReader(src, sniffLen))
n += n0
if err != nil {
return n, err
}
}
w.w.Flush() // get rid of any previous writes
w.cw.flush() // make sure Header is written; flush data to rwc
// Now that cw has been flushed, its chunking field is guaranteed initialized.
if !w.cw.chunking && w.bodyAllowed() {
n0, err := rf.ReadFrom(src)
n += n0
w.written += n0
return n, err
}
n0, err := io.Copy(writerOnly{w}, src)
n += n0
return n, err
}
// debugServerConnections controls whether all server connections are wrapped
// with a verbose logging wrapper.
const debugServerConnections = false
// Create new connection from rwc.
func (srv *Server) newConn(rwc net.Conn) *conn {
c := &conn{
server: srv,
rwc: rwc,
}
if debugServerConnections {
c.rwc = newLoggingConn("server", c.rwc)
}
return c
}
type readResult struct {
n int
err error
b byte // byte read, if n == 1
}
// connReader is the io.Reader wrapper used by *conn. It combines a
// selectively-activated io.LimitedReader (to bound request header
// read sizes) with support for selectively keeping an io.Reader.Read
// call blocked in a background goroutine to wait for activity and
// trigger a CloseNotifier channel.
type connReader struct {
conn *conn
mu sync.Mutex // guards following
hasByte bool
byteBuf [1]byte
bgErr error // non-nil means error happened on background read
cond *sync.Cond
inRead bool
aborted bool // set true before conn.rwc deadline is set to past
remain int64 // bytes remaining
}
func (cr *connReader) lock() {
cr.mu.Lock()
if cr.cond == nil {
cr.cond = sync.NewCond(&cr.mu)
}
}
func (cr *connReader) unlock() { cr.mu.Unlock() }
func (cr *connReader) startBackgroundRead() {
cr.lock()
defer cr.unlock()
if cr.inRead {
panic("invalid concurrent Body.Read call")
}
if cr.hasByte {
return
}
cr.inRead = true
cr.conn.rwc.SetReadDeadline(time.Time{})
go cr.backgroundRead()
}
func (cr *connReader) backgroundRead() {
n, err := cr.conn.rwc.Read(cr.byteBuf[:])
cr.lock()
if n == 1 {
cr.hasByte = true
// We were at EOF already (since we wouldn't be in a
// background read otherwise), so this is a pipelined
// HTTP request.
cr.closeNotifyFromPipelinedRequest()
}
if ne, ok := err.(net.Error); ok && cr.aborted && ne.Timeout() {
// Ignore this error. It's the expected error from
// another goroutine calling abortPendingRead.
} else if err != nil {
cr.handleReadError(err)
}
cr.aborted = false
cr.inRead = false
cr.unlock()
cr.cond.Broadcast()
}
func (cr *connReader) abortPendingRead() {
cr.lock()
defer cr.unlock()
if !cr.inRead {
return
}
cr.aborted = true
cr.conn.rwc.SetReadDeadline(aLongTimeAgo)
for cr.inRead {
cr.cond.Wait()
}
cr.conn.rwc.SetReadDeadline(time.Time{})
}
func (cr *connReader) setReadLimit(remain int64) { cr.remain = remain }
func (cr *connReader) setInfiniteReadLimit() { cr.remain = maxInt64 }
func (cr *connReader) hitReadLimit() bool { return cr.remain <= 0 }
// may be called from multiple goroutines.
func (cr *connReader) handleReadError(err error) {
cr.conn.cancelCtx()
cr.closeNotify()
}
// closeNotifyFromPipelinedRequest simply calls closeNotify.
//
// This method wrapper is here for documentation. The callers are the
// cases where we send on the closenotify channel because of a
// pipelined HTTP request, per the previous Go behavior and
// documentation (that this "MAY" happen).
//
// TODO: consider changing this behavior and making context
// cancelation and closenotify work the same.
func (cr *connReader) closeNotifyFromPipelinedRequest() {
cr.closeNotify()
}
// may be called from multiple goroutines.
func (cr *connReader) closeNotify() {
res, _ := cr.conn.curReq.Load().(*response)
if res != nil {
if atomic.CompareAndSwapInt32(&res.didCloseNotify, 0, 1) {
res.closeNotifyCh <- true
}
}
}
func (cr *connReader) Read(p []byte) (n int, err error) {
cr.lock()
if cr.inRead {
cr.unlock()
panic("invalid concurrent Body.Read call")
}
if cr.hitReadLimit() {
cr.unlock()
return 0, io.EOF
}
if cr.bgErr != nil {
err = cr.bgErr
cr.unlock()
return 0, err
}
if len(p) == 0 {
cr.unlock()
return 0, nil
}
if int64(len(p)) > cr.remain {
p = p[:cr.remain]
}
if cr.hasByte {
p[0] = cr.byteBuf[0]
cr.hasByte = false
cr.unlock()
return 1, nil
}
cr.inRead = true
cr.unlock()
n, err = cr.conn.rwc.Read(p)
cr.lock()
cr.inRead = false
if err != nil {
cr.handleReadError(err)
}
cr.remain -= int64(n)
cr.unlock()
cr.cond.Broadcast()
return n, err
}
var (
bufioReaderPool sync.Pool
bufioWriter2kPool sync.Pool
bufioWriter4kPool sync.Pool
)
var copyBufPool = sync.Pool{
New: func() interface{} {
b := make([]byte, 32*1024)
return &b
},
}
func bufioWriterPool(size int) *sync.Pool {
switch size {
case 2 << 10:
return &bufioWriter2kPool
case 4 << 10:
return &bufioWriter4kPool
}
return nil
}
func newBufioReader(r io.Reader) *bufio.Reader {
if v := bufioReaderPool.Get(); v != nil {
br := v.(*bufio.Reader)
br.Reset(r)
return br
}
// Note: if this reader size is ever changed, update
// TestHandlerBodyClose's assumptions.
return bufio.NewReader(r)
}
func putBufioReader(br *bufio.Reader) {
br.Reset(nil)
bufioReaderPool.Put(br)
}
func newBufioWriterSize(w io.Writer, size int) *bufio.Writer {
pool := bufioWriterPool(size)
if pool != nil {
if v := pool.Get(); v != nil {
bw := v.(*bufio.Writer)
bw.Reset(w)
return bw
}
}
return bufio.NewWriterSize(w, size)
}
func putBufioWriter(bw *bufio.Writer) {
bw.Reset(nil)
if pool := bufioWriterPool(bw.Available()); pool != nil {
pool.Put(bw)
}
}
// DefaultMaxHeaderBytes is the maximum permitted size of the headers
// in an HTTP request.
// This can be overridden by setting Server.MaxHeaderBytes.
const DefaultMaxHeaderBytes = 1 << 20 // 1 MB
func (srv *Server) maxHeaderBytes() int {
if srv.MaxHeaderBytes > 0 {
return srv.MaxHeaderBytes
}
return DefaultMaxHeaderBytes
}
func (srv *Server) initialReadLimitSize() int64 {
return int64(srv.maxHeaderBytes()) + 4096 // bufio slop
}
// wrapper around io.ReadCloser which on first read, sends an
// HTTP/1.1 100 Continue header
type expectContinueReader struct {
resp *response
readCloser io.ReadCloser
closed bool
sawEOF bool
}
func (ecr *expectContinueReader) Read(p []byte) (n int, err error) {
if ecr.closed {
return 0, ErrBodyReadAfterClose
}
if !ecr.resp.wroteContinue && !ecr.resp.conn.hijacked() {
ecr.resp.wroteContinue = true
ecr.resp.conn.bufw.WriteString("HTTP/1.1 100 Continue\r\n\r\n")
ecr.resp.conn.bufw.Flush()
}
n, err = ecr.readCloser.Read(p)
if err == io.EOF {
ecr.sawEOF = true
}
return
}
func (ecr *expectContinueReader) Close() error {
ecr.closed = true
return ecr.readCloser.Close()
}
// TimeFormat is the time format to use when generating times in HTTP
// headers. It is like time.RFC1123 but hard-codes GMT as the time
// zone. The time being formatted must be in UTC for Format to
// generate the correct format.
//
// For parsing this time format, see ParseTime.
const TimeFormat = "Mon, 02 Jan 2006 15:04:05 GMT"
// appendTime is a non-allocating version of []byte(t.UTC().Format(TimeFormat))
func appendTime(b []byte, t time.Time) []byte {
const days = "SunMonTueWedThuFriSat"
const months = "JanFebMarAprMayJunJulAugSepOctNovDec"
t = t.UTC()
yy, mm, dd := t.Date()
hh, mn, ss := t.Clock()
day := days[3*t.Weekday():]
mon := months[3*(mm-1):]
return append(b,
day[0], day[1], day[2], ',', ' ',
byte('0'+dd/10), byte('0'+dd%10), ' ',
mon[0], mon[1], mon[2], ' ',
byte('0'+yy/1000), byte('0'+(yy/100)%10), byte('0'+(yy/10)%10), byte('0'+yy%10), ' ',
byte('0'+hh/10), byte('0'+hh%10), ':',
byte('0'+mn/10), byte('0'+mn%10), ':',
byte('0'+ss/10), byte('0'+ss%10), ' ',
'G', 'M', 'T')
}
var errTooLarge = errors.New("http: request too large")
// Read next request from connection.
func (c *conn) readRequest(ctx context.Context) (w *response, err error) {
if c.hijacked() {
return nil, ErrHijacked
}
var (
wholeReqDeadline time.Time // or zero if none
hdrDeadline time.Time // or zero if none
)
t0 := time.Now()
if d := c.server.readHeaderTimeout(); d != 0 {
hdrDeadline = t0.Add(d)
}
if d := c.server.ReadTimeout; d != 0 {
wholeReqDeadline = t0.Add(d)
}
c.rwc.SetReadDeadline(hdrDeadline)
if d := c.server.WriteTimeout; d != 0 {
defer func() {
c.rwc.SetWriteDeadline(time.Now().Add(d))
}()
}
c.r.setReadLimit(c.server.initialReadLimitSize())
if c.lastMethod == "POST" {
// RFC 2616 section 4.1 tolerance for old buggy clients.
peek, _ := c.bufr.Peek(4) // ReadRequest will get err below
c.bufr.Discard(numLeadingCRorLF(peek))
}
req, err := readRequest(c.bufr, keepHostHeader)
if err != nil {
if c.r.hitReadLimit() {
return nil, errTooLarge
}
return nil, err
}
if !http1ServerSupportsRequest(req) {
return nil, badRequestError("unsupported protocol version")
}
c.lastMethod = req.Method
c.r.setInfiniteReadLimit()
hosts, haveHost := req.Header["Host"]
isH2Upgrade := req.isH2Upgrade()
if req.ProtoAtLeast(1, 1) && (!haveHost || len(hosts) == 0) && !isH2Upgrade {
return nil, badRequestError("missing required Host header")
}
if len(hosts) > 1 {
return nil, badRequestError("too many Host headers")
}
if len(hosts) == 1 && !httplex.ValidHostHeader(hosts[0]) {
return nil, badRequestError("malformed Host header")
}
for k, vv := range req.Header {
if !httplex.ValidHeaderFieldName(k) {
return nil, badRequestError("invalid header name")
}
for _, v := range vv {
if !httplex.ValidHeaderFieldValue(v) {
return nil, badRequestError("invalid header value")
}
}
}
delete(req.Header, "Host")
ctx, cancelCtx := context.WithCancel(ctx)
req.ctx = ctx
req.RemoteAddr = c.remoteAddr
req.TLS = c.tlsState
if body, ok := req.Body.(*body); ok {
body.doEarlyClose = true
}
// Adjust the read deadline if necessary.
if !hdrDeadline.Equal(wholeReqDeadline) {
c.rwc.SetReadDeadline(wholeReqDeadline)
}
w = &response{
conn: c,
cancelCtx: cancelCtx,
req: req,
reqBody: req.Body,
handlerHeader: make(Header),
contentLength: -1,
closeNotifyCh: make(chan bool, 1),
// We populate these ahead of time so we're not
// reading from req.Header after their Handler starts
// and maybe mutates it (Issue 14940)
wants10KeepAlive: req.wantsHttp10KeepAlive(),
wantsClose: req.wantsClose(),
}
if isH2Upgrade {
w.closeAfterReply = true
}
w.cw.res = w
w.w = newBufioWriterSize(&w.cw, bufferBeforeChunkingSize)
return w, nil
}
// http1ServerSupportsRequest reports whether Go's HTTP/1.x server
// supports the given request.
func http1ServerSupportsRequest(req *Request) bool {
if req.ProtoMajor == 1 {
return true
}
// Accept "PRI * HTTP/2.0" upgrade requests, so Handlers can
// wire up their own HTTP/2 upgrades.
if req.ProtoMajor == 2 && req.ProtoMinor == 0 &&
req.Method == "PRI" && req.RequestURI == "*" {
return true
}
// Reject HTTP/0.x, and all other HTTP/2+ requests (which
// aren't encoded in ASCII anyway).
return false
}
func (w *response) Header() Header {
if w.cw.header == nil && w.wroteHeader && !w.cw.wroteHeader {
// Accessing the header between logically writing it
// and physically writing it means we need to allocate
// a clone to snapshot the logically written state.
w.cw.header = w.handlerHeader.clone()
}
w.calledHeader = true
return w.handlerHeader
}
// maxPostHandlerReadBytes is the max number of Request.Body bytes not
// consumed by a handler that the server will read from the client
// in order to keep a connection alive. If there are more bytes than
// this then the server to be paranoid instead sends a "Connection:
// close" response.
//
// This number is approximately what a typical machine's TCP buffer
// size is anyway. (if we have the bytes on the machine, we might as
// well read them)
const maxPostHandlerReadBytes = 256 << 10
func (w *response) WriteHeader(code int) {
if w.conn.hijacked() {
w.conn.server.logf("http: response.WriteHeader on hijacked connection")
return
}
if w.wroteHeader {
w.conn.server.logf("http: multiple response.WriteHeader calls")
return
}
w.wroteHeader = true
w.status = code
if w.calledHeader && w.cw.header == nil {
w.cw.header = w.handlerHeader.clone()
}
if cl := w.handlerHeader.get("Content-Length"); cl != "" {
v, err := strconv.ParseInt(cl, 10, 64)
if err == nil && v >= 0 {
w.contentLength = v
} else {
w.conn.server.logf("http: invalid Content-Length of %q", cl)
w.handlerHeader.Del("Content-Length")
}
}
}
// extraHeader is the set of headers sometimes added by chunkWriter.writeHeader.
// This type is used to avoid extra allocations from cloning and/or populating
// the response Header map and all its 1-element slices.
type extraHeader struct {
contentType string
connection string
transferEncoding string
date []byte // written if not nil
contentLength []byte // written if not nil
}
// Sorted the same as extraHeader.Write's loop.
var extraHeaderKeys = [][]byte{
[]byte("Content-Type"),
[]byte("Connection"),
[]byte("Transfer-Encoding"),
}
var (
headerContentLength = []byte("Content-Length: ")
headerDate = []byte("Date: ")
)
// Write writes the headers described in h to w.
//
// This method has a value receiver, despite the somewhat large size
// of h, because it prevents an allocation. The escape analysis isn't
// smart enough to realize this function doesn't mutate h.
func (h extraHeader) Write(w *bufio.Writer) {
if h.date != nil {
w.Write(headerDate)
w.Write(h.date)
w.Write(crlf)
}
if h.contentLength != nil {
w.Write(headerContentLength)
w.Write(h.contentLength)
w.Write(crlf)
}
for i, v := range []string{h.contentType, h.connection, h.transferEncoding} {
if v != "" {
w.Write(extraHeaderKeys[i])
w.Write(colonSpace)
w.WriteString(v)
w.Write(crlf)
}
}
}
// writeHeader finalizes the header sent to the client and writes it
// to cw.res.conn.bufw.
//
// p is not written by writeHeader, but is the first chunk of the body
// that will be written. It is sniffed for a Content-Type if none is
// set explicitly. It's also used to set the Content-Length, if the
// total body size was small and the handler has already finished
// running.
func (cw *chunkWriter) writeHeader(p []byte) {
if cw.wroteHeader {
return
}
cw.wroteHeader = true
w := cw.res
keepAlivesEnabled := w.conn.server.doKeepAlives()
isHEAD := w.req.Method == "HEAD"
// header is written out to w.conn.buf below. Depending on the
// state of the handler, we either own the map or not. If we
// don't own it, the exclude map is created lazily for
// WriteSubset to remove headers. The setHeader struct holds
// headers we need to add.
header := cw.header
owned := header != nil
if !owned {
header = w.handlerHeader
}
var excludeHeader map[string]bool
delHeader := func(key string) {
if owned {
header.Del(key)
return
}
if _, ok := header[key]; !ok {
return
}
if excludeHeader == nil {
excludeHeader = make(map[string]bool)
}
excludeHeader[key] = true
}
var setHeader extraHeader
// Don't write out the fake "Trailer:foo" keys. See TrailerPrefix.
trailers := false
for k := range cw.header {
if strings.HasPrefix(k, TrailerPrefix) {
if excludeHeader == nil {
excludeHeader = make(map[string]bool)
}
excludeHeader[k] = true
trailers = true
}
}
for _, v := range cw.header["Trailer"] {
trailers = true
foreachHeaderElement(v, cw.res.declareTrailer)
}
te := header.get("Transfer-Encoding")
hasTE := te != ""
// If the handler is done but never sent a Content-Length
// response header and this is our first (and last) write, set
// it, even to zero. This helps HTTP/1.0 clients keep their
// "keep-alive" connections alive.
// Exceptions: 304/204/1xx responses never get Content-Length, and if
// it was a HEAD request, we don't know the difference between
// 0 actual bytes and 0 bytes because the handler noticed it
// was a HEAD request and chose not to write anything. So for
// HEAD, the handler should either write the Content-Length or
// write non-zero bytes. If it's actually 0 bytes and the
// handler never looked at the Request.Method, we just don't
// send a Content-Length header.
// Further, we don't send an automatic Content-Length if they
// set a Transfer-Encoding, because they're generally incompatible.
if w.handlerDone.isSet() && !trailers && !hasTE && bodyAllowedForStatus(w.status) && header.get("Content-Length") == "" && (!isHEAD || len(p) > 0) {
w.contentLength = int64(len(p))
setHeader.contentLength = strconv.AppendInt(cw.res.clenBuf[:0], int64(len(p)), 10)
}
// If this was an HTTP/1.0 request with keep-alive and we sent a
// Content-Length back, we can make this a keep-alive response ...
if w.wants10KeepAlive && keepAlivesEnabled {
sentLength := header.get("Content-Length") != ""
if sentLength && header.get("Connection") == "keep-alive" {
w.closeAfterReply = false
}
}
// Check for a explicit (and valid) Content-Length header.
hasCL := w.contentLength != -1
if w.wants10KeepAlive && (isHEAD || hasCL || !bodyAllowedForStatus(w.status)) {
_, connectionHeaderSet := header["Connection"]
if !connectionHeaderSet {
setHeader.connection = "keep-alive"
}
} else if !w.req.ProtoAtLeast(1, 1) || w.wantsClose {
w.closeAfterReply = true
}
if header.get("Connection") == "close" || !keepAlivesEnabled {
w.closeAfterReply = true
}
// If the client wanted a 100-continue but we never sent it to
// them (or, more strictly: we never finished reading their
// request body), don't reuse this connection because it's now
// in an unknown state: we might be sending this response at
// the same time the client is now sending its request body
// after a timeout. (Some HTTP clients send Expect:
// 100-continue but knowing that some servers don't support
// it, the clients set a timer and send the body later anyway)
// If we haven't seen EOF, we can't skip over the unread body
// because we don't know if the next bytes on the wire will be
// the body-following-the-timer or the subsequent request.
// See Issue 11549.
if ecr, ok := w.req.Body.(*expectContinueReader); ok && !ecr.sawEOF {
w.closeAfterReply = true
}
// Per RFC 2616, we should consume the request body before
// replying, if the handler hasn't already done so. But we
// don't want to do an unbounded amount of reading here for
// DoS reasons, so we only try up to a threshold.
// TODO(bradfitz): where does RFC 2616 say that? See Issue 15527
// about HTTP/1.x Handlers concurrently reading and writing, like
// HTTP/2 handlers can do. Maybe this code should be relaxed?
if w.req.ContentLength != 0 && !w.closeAfterReply {
var discard, tooBig bool
switch bdy := w.req.Body.(type) {
case *expectContinueReader:
if bdy.resp.wroteContinue {
discard = true
}
case *body:
bdy.mu.Lock()
switch {
case bdy.closed:
if !bdy.sawEOF {
// Body was closed in handler with non-EOF error.
w.closeAfterReply = true
}
case bdy.unreadDataSizeLocked() >= maxPostHandlerReadBytes:
tooBig = true
default:
discard = true
}
bdy.mu.Unlock()
default:
discard = true
}
if discard {
_, err := io.CopyN(ioutil.Discard, w.reqBody, maxPostHandlerReadBytes+1)
switch err {
case nil:
// There must be even more data left over.
tooBig = true
case ErrBodyReadAfterClose:
// Body was already consumed and closed.
case io.EOF:
// The remaining body was just consumed, close it.
err = w.reqBody.Close()
if err != nil {
w.closeAfterReply = true
}
default:
// Some other kind of error occurred, like a read timeout, or
// corrupt chunked encoding. In any case, whatever remains
// on the wire must not be parsed as another HTTP request.
w.closeAfterReply = true
}
}
if tooBig {
w.requestTooLarge()
delHeader("Connection")
setHeader.connection = "close"
}
}
code := w.status
if bodyAllowedForStatus(code) {
// If no content type, apply sniffing algorithm to body.
_, haveType := header["Content-Type"]
if !haveType && !hasTE {
setHeader.contentType = DetectContentType(p)
}
} else {
for _, k := range suppressedHeaders(code) {
delHeader(k)
}
}
if _, ok := header["Date"]; !ok {
setHeader.date = appendTime(cw.res.dateBuf[:0], time.Now())
}
if hasCL && hasTE && te != "identity" {
// TODO: return an error if WriteHeader gets a return parameter
// For now just ignore the Content-Length.
w.conn.server.logf("http: WriteHeader called with both Transfer-Encoding of %q and a Content-Length of %d",
te, w.contentLength)
delHeader("Content-Length")
hasCL = false
}
if w.req.Method == "HEAD" || !bodyAllowedForStatus(code) {
// do nothing
} else if code == StatusNoContent {
delHeader("Transfer-Encoding")
} else if hasCL {
delHeader("Transfer-Encoding")
} else if w.req.ProtoAtLeast(1, 1) {
// HTTP/1.1 or greater: Transfer-Encoding has been set to identity, and no
// content-length has been provided. The connection must be closed after the
// reply is written, and no chunking is to be done. This is the setup
// recommended in the Server-Sent Events candidate recommendation 11,
// section 8.
if hasTE && te == "identity" {
cw.chunking = false
w.closeAfterReply = true
} else {
// HTTP/1.1 or greater: use chunked transfer encoding
// to avoid closing the connection at EOF.
cw.chunking = true
setHeader.transferEncoding = "chunked"
if hasTE && te == "chunked" {
// We will send the chunked Transfer-Encoding header later.
delHeader("Transfer-Encoding")
}
}
} else {
// HTTP version < 1.1: cannot do chunked transfer
// encoding and we don't know the Content-Length so
// signal EOF by closing connection.
w.closeAfterReply = true
delHeader("Transfer-Encoding") // in case already set
}
// Cannot use Content-Length with non-identity Transfer-Encoding.
if cw.chunking {
delHeader("Content-Length")
}
if !w.req.ProtoAtLeast(1, 0) {
return
}
if w.closeAfterReply && (!keepAlivesEnabled || !hasToken(cw.header.get("Connection"), "close")) {
delHeader("Connection")
if w.req.ProtoAtLeast(1, 1) {
setHeader.connection = "close"
}
}
w.conn.bufw.WriteString(statusLine(w.req, code))
cw.header.WriteSubset(w.conn.bufw, excludeHeader)
setHeader.Write(w.conn.bufw)
w.conn.bufw.Write(crlf)
}
// foreachHeaderElement splits v according to the "#rule" construction
// in RFC 2616 section 2.1 and calls fn for each non-empty element.
func foreachHeaderElement(v string, fn func(string)) {
v = textproto.TrimString(v)
if v == "" {
return
}
if !strings.Contains(v, ",") {
fn(v)
return
}
for _, f := range strings.Split(v, ",") {
if f = textproto.TrimString(f); f != "" {
fn(f)
}
}
}
// statusLines is a cache of Status-Line strings, keyed by code (for
// HTTP/1.1) or negative code (for HTTP/1.0). This is faster than a
// map keyed by struct of two fields. This map's max size is bounded
// by 2*len(statusText), two protocol types for each known official
// status code in the statusText map.
var (
statusMu sync.RWMutex
statusLines = make(map[int]string)
)
// statusLine returns a response Status-Line (RFC 2616 Section 6.1)
// for the given request and response status code.
func statusLine(req *Request, code int) string {
// Fast path:
key := code
proto11 := req.ProtoAtLeast(1, 1)
if !proto11 {
key = -key
}
statusMu.RLock()
line, ok := statusLines[key]
statusMu.RUnlock()
if ok {
return line
}
// Slow path:
proto := "HTTP/1.0"
if proto11 {
proto = "HTTP/1.1"
}
codestring := fmt.Sprintf("%03d", code)
text, ok := statusText[code]
if !ok {
text = "status code " + codestring
}
line = proto + " " + codestring + " " + text + "\r\n"
if ok {
statusMu.Lock()
defer statusMu.Unlock()
statusLines[key] = line
}
return line
}
// bodyAllowed reports whether a Write is allowed for this response type.
// It's illegal to call this before the header has been flushed.
func (w *response) bodyAllowed() bool {
if !w.wroteHeader {
panic("")
}
return bodyAllowedForStatus(w.status)
}
// The Life Of A Write is like this:
//
// Handler starts. No header has been sent. The handler can either
// write a header, or just start writing. Writing before sending a header
// sends an implicitly empty 200 OK header.
//
// If the handler didn't declare a Content-Length up front, we either
// go into chunking mode or, if the handler finishes running before
// the chunking buffer size, we compute a Content-Length and send that
// in the header instead.
//
// Likewise, if the handler didn't set a Content-Type, we sniff that
// from the initial chunk of output.
//
// The Writers are wired together like:
//
// 1. *response (the ResponseWriter) ->
// 2. (*response).w, a *bufio.Writer of bufferBeforeChunkingSize bytes
// 3. chunkWriter.Writer (whose writeHeader finalizes Content-Length/Type)
// and which writes the chunk headers, if needed.
// 4. conn.buf, a bufio.Writer of default (4kB) bytes, writing to ->
// 5. checkConnErrorWriter{c}, which notes any non-nil error on Write
// and populates c.werr with it if so. but otherwise writes to:
// 6. the rwc, the net.Conn.
//
// TODO(bradfitz): short-circuit some of the buffering when the
// initial header contains both a Content-Type and Content-Length.
// Also short-circuit in (1) when the header's been sent and not in
// chunking mode, writing directly to (4) instead, if (2) has no
// buffered data. More generally, we could short-circuit from (1) to
// (3) even in chunking mode if the write size from (1) is over some
// threshold and nothing is in (2). The answer might be mostly making
// bufferBeforeChunkingSize smaller and having bufio's fast-paths deal
// with this instead.
func (w *response) Write(data []byte) (n int, err error) {
return w.write(len(data), data, "")
}
func (w *response) WriteString(data string) (n int, err error) {
return w.write(len(data), nil, data)
}
// either dataB or dataS is non-zero.
func (w *response) write(lenData int, dataB []byte, dataS string) (n int, err error) {
if w.conn.hijacked() {
if lenData > 0 {
w.conn.server.logf("http: response.Write on hijacked connection")
}
return 0, ErrHijacked
}
if !w.wroteHeader {
w.WriteHeader(StatusOK)
}
if lenData == 0 {
return 0, nil
}
if !w.bodyAllowed() {
return 0, ErrBodyNotAllowed
}
w.written += int64(lenData) // ignoring errors, for errorKludge
if w.contentLength != -1 && w.written > w.contentLength {
return 0, ErrContentLength
}
if dataB != nil {
return w.w.Write(dataB)
} else {
return w.w.WriteString(dataS)
}
}
func (w *response) finishRequest() {
w.handlerDone.setTrue()
if !w.wroteHeader {
w.WriteHeader(StatusOK)
}
w.w.Flush()
putBufioWriter(w.w)
w.cw.close()
w.conn.bufw.Flush()
w.conn.r.abortPendingRead()
// Close the body (regardless of w.closeAfterReply) so we can
// re-use its bufio.Reader later safely.
w.reqBody.Close()
if w.req.MultipartForm != nil {
w.req.MultipartForm.RemoveAll()
}
}
// shouldReuseConnection reports whether the underlying TCP connection can be reused.
// It must only be called after the handler is done executing.
func (w *response) shouldReuseConnection() bool {
if w.closeAfterReply {
// The request or something set while executing the
// handler indicated we shouldn't reuse this
// connection.
return false
}
if w.req.Method != "HEAD" && w.contentLength != -1 && w.bodyAllowed() && w.contentLength != w.written {
// Did not write enough. Avoid getting out of sync.
return false
}
// There was some error writing to the underlying connection
// during the request, so don't re-use this conn.
if w.conn.werr != nil {
return false
}
if w.closedRequestBodyEarly() {
return false
}
return true
}
func (w *response) closedRequestBodyEarly() bool {
body, ok := w.req.Body.(*body)
return ok && body.didEarlyClose()
}
func (w *response) Flush() {
if !w.wroteHeader {
w.WriteHeader(StatusOK)
}
w.w.Flush()
w.cw.flush()
}
func (c *conn) finalFlush() {
if c.bufr != nil {
// Steal the bufio.Reader (~4KB worth of memory) and its associated
// reader for a future connection.
putBufioReader(c.bufr)
c.bufr = nil
}
if c.bufw != nil {
c.bufw.Flush()
// Steal the bufio.Writer (~4KB worth of memory) and its associated
// writer for a future connection.
putBufioWriter(c.bufw)
c.bufw = nil
}
}
// Close the connection.
func (c *conn) close() {
c.finalFlush()
c.rwc.Close()
}
// rstAvoidanceDelay is the amount of time we sleep after closing the
// write side of a TCP connection before closing the entire socket.
// By sleeping, we increase the chances that the client sees our FIN
// and processes its final data before they process the subsequent RST
// from closing a connection with known unread data.
// This RST seems to occur mostly on BSD systems. (And Windows?)
// This timeout is somewhat arbitrary (~latency around the planet).
const rstAvoidanceDelay = 500 * time.Millisecond
type closeWriter interface {
CloseWrite() error
}
var _ closeWriter = (*net.TCPConn)(nil)
// closeWrite flushes any outstanding data and sends a FIN packet (if
// client is connected via TCP), signalling that we're done. We then
// pause for a bit, hoping the client processes it before any
// subsequent RST.
//
// See https://golang.org/issue/3595
func (c *conn) closeWriteAndWait() {
c.finalFlush()
if tcp, ok := c.rwc.(closeWriter); ok {
tcp.CloseWrite()
}
time.Sleep(rstAvoidanceDelay)
}
// validNPN reports whether the proto is not a blacklisted Next
// Protocol Negotiation protocol. Empty and built-in protocol types
// are blacklisted and can't be overridden with alternate
// implementations.
func validNPN(proto string) bool {
switch proto {
case "", "http/1.1", "http/1.0":
return false
}
return true
}
func (c *conn) setState(nc net.Conn, state ConnState) {
srv := c.server
switch state {
case StateNew:
srv.trackConn(c, true)
case StateHijacked, StateClosed:
srv.trackConn(c, false)
}
c.curState.Store(connStateInterface[state])
if hook := srv.ConnState; hook != nil {
hook(nc, state)
}
}
// connStateInterface is an array of the interface{} versions of
// ConnState values, so we can use them in atomic.Values later without
// paying the cost of shoving their integers in an interface{}.
var connStateInterface = [...]interface{}{
StateNew: StateNew,
StateActive: StateActive,
StateIdle: StateIdle,
StateHijacked: StateHijacked,
StateClosed: StateClosed,
}
// badRequestError is a literal string (used by in the server in HTML,
// unescaped) to tell the user why their request was bad. It should
// be plain text without user info or other embedded errors.
type badRequestError string
func (e badRequestError) Error() string { return "Bad Request: " + string(e) }
// ErrAbortHandler is a sentinel panic value to abort a handler.
// While any panic from ServeHTTP aborts the response to the client,
// panicking with ErrAbortHandler also suppresses logging of a stack
// trace to the server's error log.
var ErrAbortHandler = errors.New("net/http: abort Handler")
// isCommonNetReadError reports whether err is a common error
// encountered during reading a request off the network when the
// client has gone away or had its read fail somehow. This is used to
// determine which logs are interesting enough to log about.
func isCommonNetReadError(err error) bool {
if err == io.EOF {
return true
}
if neterr, ok := err.(net.Error); ok && neterr.Timeout() {
return true
}
if oe, ok := err.(*net.OpError); ok && oe.Op == "read" {
return true
}
return false
}
// Serve a new connection.
func (c *conn) serve(ctx context.Context) {
c.remoteAddr = c.rwc.RemoteAddr().String()
defer func() {
if err := recover(); err != nil && err != ErrAbortHandler {
const size = 64 << 10
buf := make([]byte, size)
buf = buf[:runtime.Stack(buf, false)]
c.server.logf("http: panic serving %v: %v\n%s", c.remoteAddr, err, buf)
}
if !c.hijacked() {
c.close()
c.setState(c.rwc, StateClosed)
}
}()
if tlsConn, ok := c.rwc.(*tls.Conn); ok {
if d := c.server.ReadTimeout; d != 0 {
c.rwc.SetReadDeadline(time.Now().Add(d))
}
if d := c.server.WriteTimeout; d != 0 {
c.rwc.SetWriteDeadline(time.Now().Add(d))
}
if err := tlsConn.Handshake(); err != nil {
c.server.logf("http: TLS handshake error from %s: %v", c.rwc.RemoteAddr(), err)
return
}
c.tlsState = new(tls.ConnectionState)
*c.tlsState = tlsConn.ConnectionState()
if proto := c.tlsState.NegotiatedProtocol; validNPN(proto) {
if fn := c.server.TLSNextProto[proto]; fn != nil {
h := initNPNRequest{tlsConn, serverHandler{c.server}}
fn(c.server, tlsConn, h)
}
return
}
}
// HTTP/1.x from here on.
ctx, cancelCtx := context.WithCancel(ctx)
c.cancelCtx = cancelCtx
defer cancelCtx()
c.r = &connReader{conn: c}
c.bufr = newBufioReader(c.r)
c.bufw = newBufioWriterSize(checkConnErrorWriter{c}, 4<<10)
for {
w, err := c.readRequest(ctx)
if c.r.remain != c.server.initialReadLimitSize() {
// If we read any bytes off the wire, we're active.
c.setState(c.rwc, StateActive)
}
if err != nil {
const errorHeaders = "\r\nContent-Type: text/plain; charset=utf-8\r\nConnection: close\r\n\r\n"
if err == errTooLarge {
// Their HTTP client may or may not be
// able to read this if we're
// responding to them and hanging up
// while they're still writing their
// request. Undefined behavior.
const publicErr = "431 Request Header Fields Too Large"
fmt.Fprintf(c.rwc, "HTTP/1.1 "+publicErr+errorHeaders+publicErr)
c.closeWriteAndWait()
return
}
if isCommonNetReadError(err) {
return // don't reply
}
publicErr := "400 Bad Request"
if v, ok := err.(badRequestError); ok {
publicErr = publicErr + ": " + string(v)
}
fmt.Fprintf(c.rwc, "HTTP/1.1 "+publicErr+errorHeaders+publicErr)
return
}
// Expect 100 Continue support
req := w.req
if req.expectsContinue() {
if req.ProtoAtLeast(1, 1) && req.ContentLength != 0 {
// Wrap the Body reader with one that replies on the connection
req.Body = &expectContinueReader{readCloser: req.Body, resp: w}
}
} else if req.Header.get("Expect") != "" {
w.sendExpectationFailed()
return
}
c.curReq.Store(w)
if requestBodyRemains(req.Body) {
registerOnHitEOF(req.Body, w.conn.r.startBackgroundRead)
} else {
if w.conn.bufr.Buffered() > 0 {
w.conn.r.closeNotifyFromPipelinedRequest()
}
w.conn.r.startBackgroundRead()
}
// HTTP cannot have multiple simultaneous active requests.[*]
// Until the server replies to this request, it can't read another,
// so we might as well run the handler in this goroutine.
// [*] Not strictly true: HTTP pipelining. We could let them all process
// in parallel even if their responses need to be serialized.
// But we're not going to implement HTTP pipelining because it
// was never deployed in the wild and the answer is HTTP/2.
serverHandler{c.server}.ServeHTTP(w, w.req)
w.cancelCtx()
if c.hijacked() {
return
}
w.finishRequest()
if !w.shouldReuseConnection() {
if w.requestBodyLimitHit || w.closedRequestBodyEarly() {
c.closeWriteAndWait()
}
return
}
c.setState(c.rwc, StateIdle)
c.curReq.Store((*response)(nil))
if !w.conn.server.doKeepAlives() {
// We're in shutdown mode. We might've replied
// to the user without "Connection: close" and
// they might think they can send another
// request, but such is life with HTTP/1.1.
return
}
if d := c.server.idleTimeout(); d != 0 {
c.rwc.SetReadDeadline(time.Now().Add(d))
if _, err := c.bufr.Peek(4); err != nil {
return
}
}
c.rwc.SetReadDeadline(time.Time{})
}
}
func (w *response) sendExpectationFailed() {
// TODO(bradfitz): let ServeHTTP handlers handle
// requests with non-standard expectation[s]? Seems
// theoretical at best, and doesn't fit into the
// current ServeHTTP model anyway. We'd need to
// make the ResponseWriter an optional
// "ExpectReplier" interface or something.
//
// For now we'll just obey RFC 2616 14.20 which says
// "If a server receives a request containing an
// Expect field that includes an expectation-
// extension that it does not support, it MUST
// respond with a 417 (Expectation Failed) status."
w.Header().Set("Connection", "close")
w.WriteHeader(StatusExpectationFailed)
w.finishRequest()
}
// Hijack implements the Hijacker.Hijack method. Our response is both a ResponseWriter
// and a Hijacker.
func (w *response) Hijack() (rwc net.Conn, buf *bufio.ReadWriter, err error) {
if w.handlerDone.isSet() {
panic("net/http: Hijack called after ServeHTTP finished")
}
if w.wroteHeader {
w.cw.flush()
}
c := w.conn
c.mu.Lock()
defer c.mu.Unlock()
// Release the bufioWriter that writes to the chunk writer, it is not
// used after a connection has been hijacked.
rwc, buf, err = c.hijackLocked()
if err == nil {
putBufioWriter(w.w)
w.w = nil
}
return rwc, buf, err
}
func (w *response) CloseNotify() <-chan bool {
if w.handlerDone.isSet() {
panic("net/http: CloseNotify called after ServeHTTP finished")
}
return w.closeNotifyCh
}
func registerOnHitEOF(rc io.ReadCloser, fn func()) {
switch v := rc.(type) {
case *expectContinueReader:
registerOnHitEOF(v.readCloser, fn)
case *body:
v.registerOnHitEOF(fn)
default:
panic("unexpected type " + fmt.Sprintf("%T", rc))
}
}
// requestBodyRemains reports whether future calls to Read
// on rc might yield more data.
func requestBodyRemains(rc io.ReadCloser) bool {
if rc == NoBody {
return false
}
switch v := rc.(type) {
case *expectContinueReader:
return requestBodyRemains(v.readCloser)
case *body:
return v.bodyRemains()
default:
panic("unexpected type " + fmt.Sprintf("%T", rc))
}
}
// The HandlerFunc type is an adapter to allow the use of
// ordinary functions as HTTP handlers. If f is a function
// with the appropriate signature, HandlerFunc(f) is a
// Handler that calls f.
type HandlerFunc func(ResponseWriter, *Request)
// ServeHTTP calls f(w, r).
func (f HandlerFunc) ServeHTTP(w ResponseWriter, r *Request) {
f(w, r)
}
// Helper handlers
// Error replies to the request with the specified error message and HTTP code.
// It does not otherwise end the request; the caller should ensure no further
// writes are done to w.
// The error message should be plain text.
func Error(w ResponseWriter, error string, code int) {
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
w.Header().Set("X-Content-Type-Options", "nosniff")
w.WriteHeader(code)
fmt.Fprintln(w, error)
}
// NotFound replies to the request with an HTTP 404 not found error.
func NotFound(w ResponseWriter, r *Request) { Error(w, "404 page not found", StatusNotFound) }
// NotFoundHandler returns a simple request handler
// that replies to each request with a ``404 page not found'' reply.
func NotFoundHandler() Handler { return HandlerFunc(NotFound) }
// StripPrefix returns a handler that serves HTTP requests
// by removing the given prefix from the request URL's Path
// and invoking the handler h. StripPrefix handles a
// request for a path that doesn't begin with prefix by
// replying with an HTTP 404 not found error.
func StripPrefix(prefix string, h Handler) Handler {
if prefix == "" {
return h
}
return HandlerFunc(func(w ResponseWriter, r *Request) {
if p := strings.TrimPrefix(r.URL.Path, prefix); len(p) < len(r.URL.Path) {
r2 := new(Request)
*r2 = *r
r2.URL = new(url.URL)
*r2.URL = *r.URL
r2.URL.Path = p
h.ServeHTTP(w, r2)
} else {
NotFound(w, r)
}
})
}
// Redirect replies to the request with a redirect to url,
// which may be a path relative to the request path.
//
// The provided code should be in the 3xx range and is usually
// StatusMovedPermanently, StatusFound or StatusSeeOther.
func Redirect(w ResponseWriter, r *Request, urlStr string, code int) {
if u, err := url.Parse(urlStr); err == nil {
// If url was relative, make absolute by
// combining with request path.
// The browser would probably do this for us,
// but doing it ourselves is more reliable.
// NOTE(rsc): RFC 2616 says that the Location
// line must be an absolute URI, like
// "http://www.google.com/redirect/",
// not a path like "/redirect/".
// Unfortunately, we don't know what to
// put in the host name section to get the
// client to connect to us again, so we can't
// know the right absolute URI to send back.
// Because of this problem, no one pays attention
// to the RFC; they all send back just a new path.
// So do we.
if u.Scheme == "" && u.Host == "" {
oldpath := r.URL.Path
if oldpath == "" { // should not happen, but avoid a crash if it does
oldpath = "/"
}
// no leading http://server
if urlStr == "" || urlStr[0] != '/' {
// make relative path absolute
olddir, _ := path.Split(oldpath)
urlStr = olddir + urlStr
}
var query string
if i := strings.Index(urlStr, "?"); i != -1 {
urlStr, query = urlStr[:i], urlStr[i:]
}
// clean up but preserve trailing slash
trailing := strings.HasSuffix(urlStr, "/")
urlStr = path.Clean(urlStr)
if trailing && !strings.HasSuffix(urlStr, "/") {
urlStr += "/"
}
urlStr += query
}
}
w.Header().Set("Location", hexEscapeNonASCII(urlStr))
w.WriteHeader(code)
// RFC 2616 recommends that a short note "SHOULD" be included in the
// response because older user agents may not understand 301/307.
// Shouldn't send the response for POST or HEAD; that leaves GET.
if r.Method == "GET" {
note := "<a href=\"" + htmlEscape(urlStr) + "\">" + statusText[code] + "</a>.\n"
fmt.Fprintln(w, note)
}
}
var htmlReplacer = strings.NewReplacer(
"&", "&",
"<", "<",
">", ">",
// """ is shorter than """.
`"`, """,
// "'" is shorter than "'" and apos was not in HTML until HTML5.
"'", "'",
)
func htmlEscape(s string) string {
return htmlReplacer.Replace(s)
}
// Redirect to a fixed URL
type redirectHandler struct {
url string
code int
}
func (rh *redirectHandler) ServeHTTP(w ResponseWriter, r *Request) {
Redirect(w, r, rh.url, rh.code)
}
// RedirectHandler returns a request handler that redirects
// each request it receives to the given url using the given
// status code.
//
// The provided code should be in the 3xx range and is usually
// StatusMovedPermanently, StatusFound or StatusSeeOther.
func RedirectHandler(url string, code int) Handler {
return &redirectHandler{url, code}
}
// ServeMux is an HTTP request multiplexer.
// It matches the URL of each incoming request against a list of registered
// patterns and calls the handler for the pattern that
// most closely matches the URL.
//
// Patterns name fixed, rooted paths, like "/favicon.ico",
// or rooted subtrees, like "/images/" (note the trailing slash).
// Longer patterns take precedence over shorter ones, so that
// if there are handlers registered for both "/images/"
// and "/images/thumbnails/", the latter handler will be
// called for paths beginning "/images/thumbnails/" and the
// former will receive requests for any other paths in the
// "/images/" subtree.
//
// Note that since a pattern ending in a slash names a rooted subtree,
// the pattern "/" matches all paths not matched by other registered
// patterns, not just the URL with Path == "/".
//
// If a subtree has been registered and a request is received naming the
// subtree root without its trailing slash, ServeMux redirects that
// request to the subtree root (adding the trailing slash). This behavior can
// be overridden with a separate registration for the path without
// the trailing slash. For example, registering "/images/" causes ServeMux
// to redirect a request for "/images" to "/images/", unless "/images" has
// been registered separately.
//
// Patterns may optionally begin with a host name, restricting matches to
// URLs on that host only. Host-specific patterns take precedence over
// general patterns, so that a handler might register for the two patterns
// "/codesearch" and "codesearch.google.com/" without also taking over
// requests for "http://www.google.com/".
//
// ServeMux also takes care of sanitizing the URL request path,
// redirecting any request containing . or .. elements or repeated slashes
// to an equivalent, cleaner URL.
type ServeMux struct {
mu sync.RWMutex
m map[string]muxEntry
hosts bool // whether any patterns contain hostnames
}
type muxEntry struct {
explicit bool
h Handler
pattern string
}
// NewServeMux allocates and returns a new ServeMux.
func NewServeMux() *ServeMux { return new(ServeMux) }
// DefaultServeMux is the default ServeMux used by Serve.
var DefaultServeMux = &defaultServeMux
var defaultServeMux ServeMux
// Does path match pattern?
func pathMatch(pattern, path string) bool {
if len(pattern) == 0 {
// should not happen
return false
}
n := len(pattern)
if pattern[n-1] != '/' {
return pattern == path
}
return len(path) >= n && path[0:n] == pattern
}
// Return the canonical path for p, eliminating . and .. elements.
func cleanPath(p string) string {
if p == "" {
return "/"
}
if p[0] != '/' {
p = "/" + p
}
np := path.Clean(p)
// path.Clean removes trailing slash except for root;
// put the trailing slash back if necessary.
if p[len(p)-1] == '/' && np != "/" {
np += "/"
}
return np
}
// stripHostPort returns h without any trailing ":<port>".
func stripHostPort(h string) string {
// If no port on host, return unchanged
if strings.IndexByte(h, ':') == -1 {
return h
}
host, _, err := net.SplitHostPort(h)
if err != nil {
return h // on error, return unchanged
}
return host
}
// Find a handler on a handler map given a path string.
// Most-specific (longest) pattern wins.
func (mux *ServeMux) match(path string) (h Handler, pattern string) {
// Check for exact match first.
v, ok := mux.m[path]
if ok {
return v.h, v.pattern
}
// Check for longest valid match.
var n = 0
for k, v := range mux.m {
if !pathMatch(k, path) {
continue
}
if h == nil || len(k) > n {
n = len(k)
h = v.h
pattern = v.pattern
}
}
return
}
// Handler returns the handler to use for the given request,
// consulting r.Method, r.Host, and r.URL.Path. It always returns
// a non-nil handler. If the path is not in its canonical form, the
// handler will be an internally-generated handler that redirects
// to the canonical path. If the host contains a port, it is ignored
// when matching handlers.
//
// The path and host are used unchanged for CONNECT requests.
//
// Handler also returns the registered pattern that matches the
// request or, in the case of internally-generated redirects,
// the pattern that will match after following the redirect.
//
// If there is no registered handler that applies to the request,
// Handler returns a ``page not found'' handler and an empty pattern.
func (mux *ServeMux) Handler(r *Request) (h Handler, pattern string) {
// CONNECT requests are not canonicalized.
if r.Method == "CONNECT" {
return mux.handler(r.Host, r.URL.Path)
}
// All other requests have any port stripped and path cleaned
// before passing to mux.handler.
host := stripHostPort(r.Host)
path := cleanPath(r.URL.Path)
if path != r.URL.Path {
_, pattern = mux.handler(host, path)
url := *r.URL
url.Path = path
return RedirectHandler(url.String(), StatusMovedPermanently), pattern
}
return mux.handler(host, r.URL.Path)
}
// handler is the main implementation of Handler.
// The path is known to be in canonical form, except for CONNECT methods.
func (mux *ServeMux) handler(host, path string) (h Handler, pattern string) {
mux.mu.RLock()
defer mux.mu.RUnlock()
// Host-specific pattern takes precedence over generic ones
if mux.hosts {
h, pattern = mux.match(host + path)
}
if h == nil {
h, pattern = mux.match(path)
}
if h == nil {
h, pattern = NotFoundHandler(), ""
}
return
}
// ServeHTTP dispatches the request to the handler whose
// pattern most closely matches the request URL.
func (mux *ServeMux) ServeHTTP(w ResponseWriter, r *Request) {
if r.RequestURI == "*" {
if r.ProtoAtLeast(1, 1) {
w.Header().Set("Connection", "close")
}
w.WriteHeader(StatusBadRequest)
return
}
h, _ := mux.Handler(r)
h.ServeHTTP(w, r)
}
// Handle registers the handler for the given pattern.
// If a handler already exists for pattern, Handle panics.
func (mux *ServeMux) Handle(pattern string, handler Handler) {
mux.mu.Lock()
defer mux.mu.Unlock()
if pattern == "" {
panic("http: invalid pattern " + pattern)
}
if handler == nil {
panic("http: nil handler")
}
if mux.m[pattern].explicit {
panic("http: multiple registrations for " + pattern)
}
if mux.m == nil {
mux.m = make(map[string]muxEntry)
}
mux.m[pattern] = muxEntry{explicit: true, h: handler, pattern: pattern}
if pattern[0] != '/' {
mux.hosts = true
}
// Helpful behavior:
// If pattern is /tree/, insert an implicit permanent redirect for /tree.
// It can be overridden by an explicit registration.
n := len(pattern)
if n > 0 && pattern[n-1] == '/' && !mux.m[pattern[0:n-1]].explicit {
// If pattern contains a host name, strip it and use remaining
// path for redirect.
path := pattern
if pattern[0] != '/' {
// In pattern, at least the last character is a '/', so
// strings.Index can't be -1.
path = pattern[strings.Index(pattern, "/"):]
}
url := &url.URL{Path: path}
mux.m[pattern[0:n-1]] = muxEntry{h: RedirectHandler(url.String(), StatusMovedPermanently), pattern: pattern}
}
}
// HandleFunc registers the handler function for the given pattern.
func (mux *ServeMux) HandleFunc(pattern string, handler func(ResponseWriter, *Request)) {
mux.Handle(pattern, HandlerFunc(handler))
}
// Handle registers the handler for the given pattern
// in the DefaultServeMux.
// The documentation for ServeMux explains how patterns are matched.
func Handle(pattern string, handler Handler) { DefaultServeMux.Handle(pattern, handler) }
// HandleFunc registers the handler function for the given pattern
// in the DefaultServeMux.
// The documentation for ServeMux explains how patterns are matched.
func HandleFunc(pattern string, handler func(ResponseWriter, *Request)) {
DefaultServeMux.HandleFunc(pattern, handler)
}
// Serve accepts incoming HTTP connections on the listener l,
// creating a new service goroutine for each. The service goroutines
// read requests and then call handler to reply to them.
// Handler is typically nil, in which case the DefaultServeMux is used.
func Serve(l net.Listener, handler Handler) error {
srv := &Server{Handler: handler}
return srv.Serve(l)
}
// A Server defines parameters for running an HTTP server.
// The zero value for Server is a valid configuration.
type Server struct {
Addr string // TCP address to listen on, ":http" if empty
Handler Handler // handler to invoke, http.DefaultServeMux if nil
TLSConfig *tls.Config // optional TLS config, used by ListenAndServeTLS
// ReadTimeout is the maximum duration for reading the entire
// request, including the body.
//
// Because ReadTimeout does not let Handlers make per-request
// decisions on each request body's acceptable deadline or
// upload rate, most users will prefer to use
// ReadHeaderTimeout. It is valid to use them both.
ReadTimeout time.Duration
// ReadHeaderTimeout is the amount of time allowed to read
// request headers. The connection's read deadline is reset
// after reading the headers and the Handler can decide what
// is considered too slow for the body.
ReadHeaderTimeout time.Duration
// WriteTimeout is the maximum duration before timing out
// writes of the response. It is reset whenever a new
// request's header is read. Like ReadTimeout, it does not
// let Handlers make decisions on a per-request basis.
WriteTimeout time.Duration
// IdleTimeout is the maximum amount of time to wait for the
// next request when keep-alives are enabled. If IdleTimeout
// is zero, the value of ReadTimeout is used. If both are
// zero, there is no timeout.
IdleTimeout time.Duration
// MaxHeaderBytes controls the maximum number of bytes the
// server will read parsing the request header's keys and
// values, including the request line. It does not limit the
// size of the request body.
// If zero, DefaultMaxHeaderBytes is used.
MaxHeaderBytes int
// TLSNextProto optionally specifies a function to take over
// ownership of the provided TLS connection when an NPN/ALPN
// protocol upgrade has occurred. The map key is the protocol
// name negotiated. The Handler argument should be used to
// handle HTTP requests and will initialize the Request's TLS
// and RemoteAddr if not already set. The connection is
// automatically closed when the function returns.
// If TLSNextProto is not nil, HTTP/2 support is not enabled
// automatically.
TLSNextProto map[string]func(*Server, *tls.Conn, Handler)
// ConnState specifies an optional callback function that is
// called when a client connection changes state. See the
// ConnState type and associated constants for details.
ConnState func(net.Conn, ConnState)
// ErrorLog specifies an optional logger for errors accepting
// connections and unexpected behavior from handlers.
// If nil, logging goes to os.Stderr via the log package's
// standard logger.
ErrorLog *log.Logger
disableKeepAlives int32 // accessed atomically.
inShutdown int32 // accessed atomically (non-zero means we're in Shutdown)
nextProtoOnce sync.Once // guards setupHTTP2_* init
nextProtoErr error // result of http2.ConfigureServer if used
mu sync.Mutex
listeners map[net.Listener]struct{}
activeConn map[*conn]struct{}
doneChan chan struct{}
}
func (s *Server) getDoneChan() <-chan struct{} {
s.mu.Lock()
defer s.mu.Unlock()
return s.getDoneChanLocked()
}
func (s *Server) getDoneChanLocked() chan struct{} {
if s.doneChan == nil {
s.doneChan = make(chan struct{})
}
return s.doneChan
}
func (s *Server) closeDoneChanLocked() {
ch := s.getDoneChanLocked()
select {
case <-ch:
// Already closed. Don't close again.
default:
// Safe to close here. We're the only closer, guarded
// by s.mu.
close(ch)
}
}
// Close immediately closes all active net.Listeners and any
// connections in state StateNew, StateActive, or StateIdle. For a
// graceful shutdown, use Shutdown.
//
// Close does not attempt to close (and does not even know about)
// any hijacked connections, such as WebSockets.
//
// Close returns any error returned from closing the Server's
// underlying Listener(s).
func (srv *Server) Close() error {
srv.mu.Lock()
defer srv.mu.Unlock()
srv.closeDoneChanLocked()
err := srv.closeListenersLocked()
for c := range srv.activeConn {
c.rwc.Close()
delete(srv.activeConn, c)
}
return err
}
// shutdownPollInterval is how often we poll for quiescence
// during Server.Shutdown. This is lower during tests, to
// speed up tests.
// Ideally we could find a solution that doesn't involve polling,
// but which also doesn't have a high runtime cost (and doesn't
// involve any contentious mutexes), but that is left as an
// exercise for the reader.
var shutdownPollInterval = 500 * time.Millisecond
// Shutdown gracefully shuts down the server without interrupting any
// active connections. Shutdown works by first closing all open
// listeners, then closing all idle connections, and then waiting
// indefinitely for connections to return to idle and then shut down.
// If the provided context expires before the shutdown is complete,
// then the context's error is returned.
//
// Shutdown does not attempt to close nor wait for hijacked
// connections such as WebSockets. The caller of Shutdown should
// separately notify such long-lived connections of shutdown and wait
// for them to close, if desired.
func (srv *Server) Shutdown(ctx context.Context) error {
atomic.AddInt32(&srv.inShutdown, 1)
defer atomic.AddInt32(&srv.inShutdown, -1)
srv.mu.Lock()
lnerr := srv.closeListenersLocked()
srv.closeDoneChanLocked()
srv.mu.Unlock()
ticker := time.NewTicker(shutdownPollInterval)
defer ticker.Stop()
for {
if srv.closeIdleConns() {
return lnerr
}
select {
case <-ctx.Done():
return ctx.Err()
case <-ticker.C:
}
}
}
// closeIdleConns closes all idle connections and reports whether the
// server is quiescent.
func (s *Server) closeIdleConns() bool {
s.mu.Lock()
defer s.mu.Unlock()
quiescent := true
for c := range s.activeConn {
st, ok := c.curState.Load().(ConnState)
if !ok || st != StateIdle {
quiescent = false
continue
}
c.rwc.Close()
delete(s.activeConn, c)
}
return quiescent
}
func (s *Server) closeListenersLocked() error {
var err error
for ln := range s.listeners {
if cerr := ln.Close(); cerr != nil && err == nil {
err = cerr
}
delete(s.listeners, ln)
}
return err
}
// A ConnState represents the state of a client connection to a server.
// It's used by the optional Server.ConnState hook.
type ConnState int
const (
// StateNew represents a new connection that is expected to
// send a request immediately. Connections begin at this
// state and then transition to either StateActive or
// StateClosed.
StateNew ConnState = iota
// StateActive represents a connection that has read 1 or more
// bytes of a request. The Server.ConnState hook for
// StateActive fires before the request has entered a handler
// and doesn't fire again until the request has been
// handled. After the request is handled, the state
// transitions to StateClosed, StateHijacked, or StateIdle.
// For HTTP/2, StateActive fires on the transition from zero
// to one active request, and only transitions away once all
// active requests are complete. That means that ConnState
// cannot be used to do per-request work; ConnState only notes
// the overall state of the connection.
StateActive
// StateIdle represents a connection that has finished
// handling a request and is in the keep-alive state, waiting
// for a new request. Connections transition from StateIdle
// to either StateActive or StateClosed.
StateIdle
// StateHijacked represents a hijacked connection.
// This is a terminal state. It does not transition to StateClosed.
StateHijacked
// StateClosed represents a closed connection.
// This is a terminal state. Hijacked connections do not
// transition to StateClosed.
StateClosed
)
var stateName = map[ConnState]string{
StateNew: "new",
StateActive: "active",
StateIdle: "idle",
StateHijacked: "hijacked",
StateClosed: "closed",
}
func (c ConnState) String() string {
return stateName[c]
}
// serverHandler delegates to either the server's Handler or
// DefaultServeMux and also handles "OPTIONS *" requests.
type serverHandler struct {
srv *Server
}
func (sh serverHandler) ServeHTTP(rw ResponseWriter, req *Request) {
handler := sh.srv.Handler
if handler == nil {
handler = DefaultServeMux
}
if req.RequestURI == "*" && req.Method == "OPTIONS" {
handler = globalOptionsHandler{}
}
handler.ServeHTTP(rw, req)
}
// ListenAndServe listens on the TCP network address srv.Addr and then
// calls Serve to handle requests on incoming connections.
// Accepted connections are configured to enable TCP keep-alives.
// If srv.Addr is blank, ":http" is used.
// ListenAndServe always returns a non-nil error.
func (srv *Server) ListenAndServe() error {
addr := srv.Addr
if addr == "" {
addr = ":http"
}
ln, err := net.Listen("tcp", addr)
if err != nil {
return err
}
return srv.Serve(tcpKeepAliveListener{ln.(*net.TCPListener)})
}
var testHookServerServe func(*Server, net.Listener) // used if non-nil
// shouldDoServeHTTP2 reports whether Server.Serve should configure
// automatic HTTP/2. (which sets up the srv.TLSNextProto map)
func (srv *Server) shouldConfigureHTTP2ForServe() bool {
if srv.TLSConfig == nil {
// Compatibility with Go 1.6:
// If there's no TLSConfig, it's possible that the user just
// didn't set it on the http.Server, but did pass it to
// tls.NewListener and passed that listener to Serve.
// So we should configure HTTP/2 (to set up srv.TLSNextProto)
// in case the listener returns an "h2" *tls.Conn.
return true
}
// The user specified a TLSConfig on their http.Server.
// In this, case, only configure HTTP/2 if their tls.Config
// explicitly mentions "h2". Otherwise http2.ConfigureServer
// would modify the tls.Config to add it, but they probably already
// passed this tls.Config to tls.NewListener. And if they did,
// it's too late anyway to fix it. It would only be potentially racy.
// See Issue 15908.
return strSliceContains(srv.TLSConfig.NextProtos, http2NextProtoTLS)
}
// ErrServerClosed is returned by the Server's Serve, ListenAndServe,
// and ListenAndServeTLS methods after a call to Shutdown or Close.
var ErrServerClosed = errors.New("http: Server closed")
// Serve accepts incoming connections on the Listener l, creating a
// new service goroutine for each. The service goroutines read requests and
// then call srv.Handler to reply to them.
//
// For HTTP/2 support, srv.TLSConfig should be initialized to the
// provided listener's TLS Config before calling Serve. If
// srv.TLSConfig is non-nil and doesn't include the string "h2" in
// Config.NextProtos, HTTP/2 support is not enabled.
//
// Serve always returns a non-nil error. After Shutdown or Close, the
// returned error is ErrServerClosed.
func (srv *Server) Serve(l net.Listener) error {
defer l.Close()
if fn := testHookServerServe; fn != nil {
fn(srv, l)
}
var tempDelay time.Duration // how long to sleep on accept failure
if err := srv.setupHTTP2_Serve(); err != nil {
return err
}
srv.trackListener(l, true)
defer srv.trackListener(l, false)
baseCtx := context.Background() // base is always background, per Issue 16220
ctx := context.WithValue(baseCtx, ServerContextKey, srv)
ctx = context.WithValue(ctx, LocalAddrContextKey, l.Addr())
for {
rw, e := l.Accept()
if e != nil {
select {
case <-srv.getDoneChan():
return ErrServerClosed
default:
}
if ne, ok := e.(net.Error); ok && ne.Temporary() {
if tempDelay == 0 {
tempDelay = 5 * time.Millisecond
} else {
tempDelay *= 2
}
if max := 1 * time.Second; tempDelay > max {
tempDelay = max
}
srv.logf("http: Accept error: %v; retrying in %v", e, tempDelay)
time.Sleep(tempDelay)
continue
}
return e
}
tempDelay = 0
c := srv.newConn(rw)
c.setState(c.rwc, StateNew) // before Serve can return
go c.serve(ctx)
}
}
func (s *Server) trackListener(ln net.Listener, add bool) {
s.mu.Lock()
defer s.mu.Unlock()
if s.listeners == nil {
s.listeners = make(map[net.Listener]struct{})
}
if add {
// If the *Server is being reused after a previous
// Close or Shutdown, reset its doneChan:
if len(s.listeners) == 0 && len(s.activeConn) == 0 {
s.doneChan = nil
}
s.listeners[ln] = struct{}{}
} else {
delete(s.listeners, ln)
}
}
func (s *Server) trackConn(c *conn, add bool) {
s.mu.Lock()
defer s.mu.Unlock()
if s.activeConn == nil {
s.activeConn = make(map[*conn]struct{})
}
if add {
s.activeConn[c] = struct{}{}
} else {
delete(s.activeConn, c)
}
}
func (s *Server) idleTimeout() time.Duration {
if s.IdleTimeout != 0 {
return s.IdleTimeout
}
return s.ReadTimeout
}
func (s *Server) readHeaderTimeout() time.Duration {
if s.ReadHeaderTimeout != 0 {
return s.ReadHeaderTimeout
}
return s.ReadTimeout
}
func (s *Server) doKeepAlives() bool {
return atomic.LoadInt32(&s.disableKeepAlives) == 0 && !s.shuttingDown()
}
func (s *Server) shuttingDown() bool {
return atomic.LoadInt32(&s.inShutdown) != 0
}
// SetKeepAlivesEnabled controls whether HTTP keep-alives are enabled.
// By default, keep-alives are always enabled. Only very
// resource-constrained environments or servers in the process of
// shutting down should disable them.
func (srv *Server) SetKeepAlivesEnabled(v bool) {
if v {
atomic.StoreInt32(&srv.disableKeepAlives, 0)
return
}
atomic.StoreInt32(&srv.disableKeepAlives, 1)
// Close idle HTTP/1 conns:
srv.closeIdleConns()
// Close HTTP/2 conns, as soon as they become idle, but reset
// the chan so future conns (if the listener is still active)
// still work and don't get a GOAWAY immediately, before their
// first request:
srv.mu.Lock()
defer srv.mu.Unlock()
srv.closeDoneChanLocked() // closes http2 conns
srv.doneChan = nil
}
func (s *Server) logf(format string, args ...interface{}) {
if s.ErrorLog != nil {
s.ErrorLog.Printf(format, args...)
} else {
log.Printf(format, args...)
}
}
// ListenAndServe listens on the TCP network address addr
// and then calls Serve with handler to handle requests
// on incoming connections.
// Accepted connections are configured to enable TCP keep-alives.
// Handler is typically nil, in which case the DefaultServeMux is
// used.
//
// A trivial example server is:
//
// package main
//
// import (
// "io"
// "net/http"
// "log"
// )
//
// // hello world, the web server
// func HelloServer(w http.ResponseWriter, req *http.Request) {
// io.WriteString(w, "hello, world!\n")
// }
//
// func main() {
// http.HandleFunc("/hello", HelloServer)
// log.Fatal(http.ListenAndServe(":12345", nil))
// }
//
// ListenAndServe always returns a non-nil error.
func ListenAndServe(addr string, handler Handler) error {
server := &Server{Addr: addr, Handler: handler}
return server.ListenAndServe()
}
// ListenAndServeTLS acts identically to ListenAndServe, except that it
// expects HTTPS connections. Additionally, files containing a certificate and
// matching private key for the server must be provided. If the certificate
// is signed by a certificate authority, the certFile should be the concatenation
// of the server's certificate, any intermediates, and the CA's certificate.
//
// A trivial example server is:
//
// import (
// "log"
// "net/http"
// )
//
// func handler(w http.ResponseWriter, req *http.Request) {
// w.Header().Set("Content-Type", "text/plain")
// w.Write([]byte("This is an example server.\n"))
// }
//
// func main() {
// http.HandleFunc("/", handler)
// log.Printf("About to listen on 10443. Go to https://127.0.0.1:10443/")
// err := http.ListenAndServeTLS(":10443", "cert.pem", "key.pem", nil)
// log.Fatal(err)
// }
//
// One can use generate_cert.go in crypto/tls to generate cert.pem and key.pem.
//
// ListenAndServeTLS always returns a non-nil error.
func ListenAndServeTLS(addr, certFile, keyFile string, handler Handler) error {
server := &Server{Addr: addr, Handler: handler}
return server.ListenAndServeTLS(certFile, keyFile)
}
// ListenAndServeTLS listens on the TCP network address srv.Addr and
// then calls Serve to handle requests on incoming TLS connections.
// Accepted connections are configured to enable TCP keep-alives.
//
// Filenames containing a certificate and matching private key for the
// server must be provided if neither the Server's TLSConfig.Certificates
// nor TLSConfig.GetCertificate are populated. If the certificate is
// signed by a certificate authority, the certFile should be the
// concatenation of the server's certificate, any intermediates, and
// the CA's certificate.
//
// If srv.Addr is blank, ":https" is used.
//
// ListenAndServeTLS always returns a non-nil error.
func (srv *Server) ListenAndServeTLS(certFile, keyFile string) error {
addr := srv.Addr
if addr == "" {
addr = ":https"
}
// Setup HTTP/2 before srv.Serve, to initialize srv.TLSConfig
// before we clone it and create the TLS Listener.
if err := srv.setupHTTP2_ListenAndServeTLS(); err != nil {
return err
}
config := cloneTLSConfig(srv.TLSConfig)
if !strSliceContains(config.NextProtos, "http/1.1") {
config.NextProtos = append(config.NextProtos, "http/1.1")
}
configHasCert := len(config.Certificates) > 0 || config.GetCertificate != nil
if !configHasCert || certFile != "" || keyFile != "" {
var err error
config.Certificates = make([]tls.Certificate, 1)
config.Certificates[0], err = tls.LoadX509KeyPair(certFile, keyFile)
if err != nil {
return err
}
}
ln, err := net.Listen("tcp", addr)
if err != nil {
return err
}
tlsListener := tls.NewListener(tcpKeepAliveListener{ln.(*net.TCPListener)}, config)
return srv.Serve(tlsListener)
}
// setupHTTP2_ListenAndServeTLS conditionally configures HTTP/2 on
// srv and returns whether there was an error setting it up. If it is
// not configured for policy reasons, nil is returned.
func (srv *Server) setupHTTP2_ListenAndServeTLS() error {
srv.nextProtoOnce.Do(srv.onceSetNextProtoDefaults)
return srv.nextProtoErr
}
// setupHTTP2_Serve is called from (*Server).Serve and conditionally
// configures HTTP/2 on srv using a more conservative policy than
// setupHTTP2_ListenAndServeTLS because Serve may be called
// concurrently.
//
// The tests named TestTransportAutomaticHTTP2* and
// TestConcurrentServerServe in server_test.go demonstrate some
// of the supported use cases and motivations.
func (srv *Server) setupHTTP2_Serve() error {
srv.nextProtoOnce.Do(srv.onceSetNextProtoDefaults_Serve)
return srv.nextProtoErr
}
func (srv *Server) onceSetNextProtoDefaults_Serve() {
if srv.shouldConfigureHTTP2ForServe() {
srv.onceSetNextProtoDefaults()
}
}
// onceSetNextProtoDefaults configures HTTP/2, if the user hasn't
// configured otherwise. (by setting srv.TLSNextProto non-nil)
// It must only be called via srv.nextProtoOnce (use srv.setupHTTP2_*).
func (srv *Server) onceSetNextProtoDefaults() {
if strings.Contains(os.Getenv("GODEBUG"), "http2server=0") {
return
}
// Enable HTTP/2 by default if the user hasn't otherwise
// configured their TLSNextProto map.
if srv.TLSNextProto == nil {
srv.nextProtoErr = http2ConfigureServer(srv, nil)
}
}
// TimeoutHandler returns a Handler that runs h with the given time limit.
//
// The new Handler calls h.ServeHTTP to handle each request, but if a
// call runs for longer than its time limit, the handler responds with
// a 503 Service Unavailable error and the given message in its body.
// (If msg is empty, a suitable default message will be sent.)
// After such a timeout, writes by h to its ResponseWriter will return
// ErrHandlerTimeout.
//
// TimeoutHandler buffers all Handler writes to memory and does not
// support the Hijacker or Flusher interfaces.
func TimeoutHandler(h Handler, dt time.Duration, msg string) Handler {
return &timeoutHandler{
handler: h,
body: msg,
dt: dt,
}
}
// ErrHandlerTimeout is returned on ResponseWriter Write calls
// in handlers which have timed out.
var ErrHandlerTimeout = errors.New("http: Handler timeout")
type timeoutHandler struct {
handler Handler
body string
dt time.Duration
// When set, no timer will be created and this channel will
// be used instead.
testTimeout <-chan time.Time
}
func (h *timeoutHandler) errorBody() string {
if h.body != "" {
return h.body
}
return "<html><head><title>Timeout</title></head><body><h1>Timeout</h1></body></html>"
}
func (h *timeoutHandler) ServeHTTP(w ResponseWriter, r *Request) {
var t *time.Timer
timeout := h.testTimeout
if timeout == nil {
t = time.NewTimer(h.dt)
timeout = t.C
}
done := make(chan struct{})
tw := &timeoutWriter{
w: w,
h: make(Header),
}
go func() {
h.handler.ServeHTTP(tw, r)
close(done)
}()
select {
case <-done:
tw.mu.Lock()
defer tw.mu.Unlock()
dst := w.Header()
for k, vv := range tw.h {
dst[k] = vv
}
if !tw.wroteHeader {
tw.code = StatusOK
}
w.WriteHeader(tw.code)
w.Write(tw.wbuf.Bytes())
if t != nil {
t.Stop()
}
case <-timeout:
tw.mu.Lock()
defer tw.mu.Unlock()
w.WriteHeader(StatusServiceUnavailable)
io.WriteString(w, h.errorBody())
tw.timedOut = true
return
}
}
type timeoutWriter struct {
w ResponseWriter
h Header
wbuf bytes.Buffer
mu sync.Mutex
timedOut bool
wroteHeader bool
code int
}
func (tw *timeoutWriter) Header() Header { return tw.h }
func (tw *timeoutWriter) Write(p []byte) (int, error) {
tw.mu.Lock()
defer tw.mu.Unlock()
if tw.timedOut {
return 0, ErrHandlerTimeout
}
if !tw.wroteHeader {
tw.writeHeader(StatusOK)
}
return tw.wbuf.Write(p)
}
func (tw *timeoutWriter) WriteHeader(code int) {
tw.mu.Lock()
defer tw.mu.Unlock()
if tw.timedOut || tw.wroteHeader {
return
}
tw.writeHeader(code)
}
func (tw *timeoutWriter) writeHeader(code int) {
tw.wroteHeader = true
tw.code = code
}
// tcpKeepAliveListener sets TCP keep-alive timeouts on accepted
// connections. It's used by ListenAndServe and ListenAndServeTLS so
// dead TCP connections (e.g. closing laptop mid-download) eventually
// go away.
type tcpKeepAliveListener struct {
*net.TCPListener
}
func (ln tcpKeepAliveListener) Accept() (c net.Conn, err error) {
tc, err := ln.AcceptTCP()
if err != nil {
return
}
tc.SetKeepAlive(true)
tc.SetKeepAlivePeriod(3 * time.Minute)
return tc, nil
}
// globalOptionsHandler responds to "OPTIONS *" requests.
type globalOptionsHandler struct{}
func (globalOptionsHandler) ServeHTTP(w ResponseWriter, r *Request) {
w.Header().Set("Content-Length", "0")
if r.ContentLength != 0 {
// Read up to 4KB of OPTIONS body (as mentioned in the
// spec as being reserved for future use), but anything
// over that is considered a waste of server resources
// (or an attack) and we abort and close the connection,
// courtesy of MaxBytesReader's EOF behavior.
mb := MaxBytesReader(w, r.Body, 4<<10)
io.Copy(ioutil.Discard, mb)
}
}
// initNPNRequest is an HTTP handler that initializes certain
// uninitialized fields in its *Request. Such partially-initialized
// Requests come from NPN protocol handlers.
type initNPNRequest struct {
c *tls.Conn
h serverHandler
}
func (h initNPNRequest) ServeHTTP(rw ResponseWriter, req *Request) {
if req.TLS == nil {
req.TLS = &tls.ConnectionState{}
*req.TLS = h.c.ConnectionState()
}
if req.Body == nil {
req.Body = NoBody
}
if req.RemoteAddr == "" {
req.RemoteAddr = h.c.RemoteAddr().String()
}
h.h.ServeHTTP(rw, req)
}
// loggingConn is used for debugging.
type loggingConn struct {
name string
net.Conn
}
var (
uniqNameMu sync.Mutex
uniqNameNext = make(map[string]int)
)
func newLoggingConn(baseName string, c net.Conn) net.Conn {
uniqNameMu.Lock()
defer uniqNameMu.Unlock()
uniqNameNext[baseName]++
return &loggingConn{
name: fmt.Sprintf("%s-%d", baseName, uniqNameNext[baseName]),
Conn: c,
}
}
func (c *loggingConn) Write(p []byte) (n int, err error) {
log.Printf("%s.Write(%d) = ....", c.name, len(p))
n, err = c.Conn.Write(p)
log.Printf("%s.Write(%d) = %d, %v", c.name, len(p), n, err)
return
}
func (c *loggingConn) Read(p []byte) (n int, err error) {
log.Printf("%s.Read(%d) = ....", c.name, len(p))
n, err = c.Conn.Read(p)
log.Printf("%s.Read(%d) = %d, %v", c.name, len(p), n, err)
return
}
func (c *loggingConn) Close() (err error) {
log.Printf("%s.Close() = ...", c.name)
err = c.Conn.Close()
log.Printf("%s.Close() = %v", c.name, err)
return
}
// checkConnErrorWriter writes to c.rwc and records any write errors to c.werr.
// It only contains one field (and a pointer field at that), so it
// fits in an interface value without an extra allocation.
type checkConnErrorWriter struct {
c *conn
}
func (w checkConnErrorWriter) Write(p []byte) (n int, err error) {
n, err = w.c.rwc.Write(p)
if err != nil && w.c.werr == nil {
w.c.werr = err
w.c.cancelCtx()
}
return
}
func numLeadingCRorLF(v []byte) (n int) {
for _, b := range v {
if b == '\r' || b == '\n' {
n++
continue
}
break
}
return
}
func strSliceContains(ss []string, s string) bool {
for _, v := range ss {
if v == s {
return true
}
}
return false
}
| linux-on-ibm-z/go | src/net/http/server.go | GO | bsd-3-clause | 95,139 |
/*
Copyright (c) 2006-2013, Alexis Royer, http://alexis.royer.free.fr/CLI
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the CLI library project nor the names of its contributors may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "cli/pch.h"
#include <string.h> // strcmp
#include "cli/tk.h"
CLI_NS_BEGIN(cli)
CLI_NS_BEGIN(tk)
bool operator==(const char* const STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) == 0);
}
bool operator==(const String& STR_String1, const char* const STR_String2)
{
return (strcmp(STR_String1, STR_String2) == 0);
}
bool operator==(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) == 0);
}
bool operator!=(const char* const STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) != 0);
}
bool operator!=(const String& STR_String1, const char* const STR_String2)
{
return (strcmp(STR_String1, STR_String2) != 0);
}
bool operator!=(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) != 0);
}
bool operator<(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) < 0);
}
bool operator>(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) > 0);
}
bool operator<=(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) <= 0);
}
bool operator>=(const String& STR_String1, const String& STR_String2)
{
return (strcmp(STR_String1, STR_String2) >= 0);
}
CLI_NS_END(tk)
CLI_NS_END(cli)
| kn65op/cli-toolkit | cpp/src/tk.cpp | C++ | bsd-3-clause | 3,472 |
<?php
use yii\helpers\Html;
$this->context->layout='mobile_layout';
$this->title = '报名详情';
?>
<?= Html::cssFile('@web/css/mobile/zhaosheng.css') ?>
<div class="header">
<div class="title">报名详情</div>
<div class="btn-left">
<a style="color:#ffffff;font-size: 25px;" href="javascript:history.go(-1)"><</a>
</div>
</div>
<div class="light_bg padding">
<div style="height:50px;"></div>
<table class="table table-striped">
<tr>
<th>家长姓名</th>
<th>宝宝年龄</th>
<th>联系电话</th>
</tr>
<?php foreach($post_list as $key=>$post_item) {?>
<tr>
<td><?=$post_item['parent_name']?></td>
<td><?=$post_item['baby_age']?></td>
<td><?=$post_item['parent_mobile']?></td>
</tr>
<?php } ?>
</table>
</div>
<div style="height:80px;"></div>
| 329221391/home | views/zhaosheng/post_list.php | PHP | bsd-3-clause | 906 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides a layer of abstraction for the issue tracker API."""
import logging
from apiclient import discovery
from apiclient import errors
import httplib2
_DISCOVERY_URI = ('https://monorail-prod.appspot.com'
'/_ah/api/discovery/v1/apis/{api}/{apiVersion}/rest')
class IssueTrackerService(object):
"""Class for updating bug issues."""
def __init__(self, http=None, additional_credentials=None):
"""Initializes an object for adding and updating bugs on the issue tracker.
This object can be re-used to make multiple requests without calling
apliclient.discovery.build multiple times.
This class makes requests to the Monorail API.
API explorer: https://goo.gl/xWd0dX
Args:
http: A Http object to pass to request.execute; this should be an
Http object that's already authenticated via OAuth2.
additional_credentials: A credentials object, e.g. an instance of
oauth2client.client.SignedJwtAssertionCredentials. This includes
the email and secret key of a service account.
"""
self._http = http or httplib2.Http()
if additional_credentials:
additional_credentials.authorize(self._http)
self._service = discovery.build(
'monorail', 'v1', discoveryServiceUrl=_DISCOVERY_URI,
http=self._http)
def AddBugComment(self, bug_id, comment, status=None, cc_list=None,
merge_issue=None, labels=None, owner=None):
"""Adds a comment with the bisect results to the given bug.
Args:
bug_id: Bug ID of the issue to update.
comment: Bisect results information.
status: A string status for bug, e.g. Assigned, Duplicate, WontFix, etc.
cc_list: List of email addresses of users to add to the CC list.
merge_issue: ID of the issue to be merged into; specifying this option
implies that the status should be "Duplicate".
labels: List of labels for bug.
owner: Owner of the bug.
Returns:
True if successful, False otherwise.
"""
if not bug_id or bug_id < 0:
return False
body = {'content': comment}
updates = {}
# Mark issue as duplicate when relevant bug ID is found in the datastore.
# Avoid marking an issue as duplicate of itself.
if merge_issue and int(merge_issue) != bug_id:
status = 'Duplicate'
updates['mergedInto'] = merge_issue
logging.info('Bug %s marked as duplicate of %s', bug_id, merge_issue)
if status:
updates['status'] = status
if cc_list:
updates['cc'] = cc_list
if labels:
updates['labels'] = labels
if owner:
updates['owner'] = owner
body['updates'] = updates
return self._MakeCommentRequest(bug_id, body)
def List(self, **kwargs):
"""Make a request to the issue tracker to list bugs."""
request = self._service.issues().list(projectId='chromium', **kwargs)
return self._ExecuteRequest(request)
def _MakeCommentRequest(self, bug_id, body):
"""Make a request to the issue tracker to update a bug."""
request = self._service.issues().comments().insert(
projectId='chromium',
issueId=bug_id,
body=body)
response = self._ExecuteRequest(request)
if not response:
logging.error('Error updating bug %s with body %s', bug_id, body)
return False
return True
def NewBug(self, title, description, labels=None, components=None,
owner=None):
"""Creates a new bug.
Args:
title: The short title text of the bug.
description: The body text for the bug.
labels: Starting labels for the bug.
components: Starting components for the bug.
owner: Starting owner account name.
Returns:
The new bug ID if successfully created, or None.
"""
body = {
'title': title,
'summary': title,
'description': description,
'labels': labels or [],
'components': components or [],
'status': 'Assigned',
}
if owner:
body['owner'] = {'name': owner}
return self._MakeCreateRequest(body)
def _MakeCreateRequest(self, body):
"""Makes a request to create a new bug.
Args:
body: The request body parameter dictionary.
Returns:
A bug ID if successful, or None otherwise.
"""
request = self._service.issues().insert(projectId='chromium', body=body)
response = self._ExecuteRequest(request)
if response and 'id' in response:
return response['id']
return None
def GetLastBugCommentsAndTimestamp(self, bug_id):
"""Gets last updated comments and timestamp in the given bug.
Args:
bug_id: Bug ID of the issue to update.
Returns:
A dictionary with last comment and timestamp, or None on failure.
"""
if not bug_id or bug_id < 0:
return None
response = self._MakeGetCommentsRequest(bug_id)
if response and all(v in response.keys()
for v in ['totalResults', 'items']):
bug_comments = response.get('items')[response.get('totalResults') - 1]
if bug_comments.get('content') and bug_comments.get('published'):
return {
'comment': bug_comments.get('content'),
'timestamp': bug_comments.get('published')
}
return None
def _MakeGetCommentsRequest(self, bug_id):
"""Make a request to the issue tracker to get comments in the bug."""
# TODO (prasadv): By default the max number of comments retrieved in
# one request is 100. Since bisect-fyi jobs may have more then 100
# comments for now we set this maxResults count as 10000.
# Remove this max count once we find a way to clear old comments
# on FYI issues.
request = self._service.issues().comments().list(
projectId='chromium',
issueId=bug_id,
maxResults=10000)
return self._ExecuteRequest(request)
def _ExecuteRequest(self, request):
"""Make a request to the issue tracker.
Args:
request: The request object, which has a execute method.
Returns:
The response if there was one, or else None.
"""
try:
response = request.execute(http=self._http)
return response
except errors.HttpError as e:
logging.error(e)
return None
| SummerLW/Perf-Insight-Report | dashboard/dashboard/issue_tracker_service.py | Python | bsd-3-clause | 6,428 |
using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Orchard.Profile")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyProduct("Orchard")]
[assembly: AssemblyCopyright("Copyright © .NET Foundation")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("669564db-5b8e-4378-b40a-03d3b9bec4c9")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
[assembly: AssemblyVersion("1.9.3")]
[assembly: AssemblyFileVersion("1.9.3")]
| RoyalVeterinaryCollege/Orchard | src/Orchard.Profile/Properties/AssemblyInfo.cs | C# | bsd-3-clause | 1,294 |
#ifndef DUNE_AX1_ELECTROLYTE_HH
#define DUNE_AX1_ELECTROLYTE_HH
#include <valarray>
#include <vector>
#include <dune/ax1/common/constants.hh>
// Ion
template<class T>
class Ion
{
public:
Ion (T valence_, std::string name_, T relCon_=1.0)
: valence(valence_), relCon(relCon_), name(name_)
{}
T getValence () const
{
return valence;
}
T getRelCon () const
{
return relCon;
}
std::string getName() const
{
return name;
}
private:
T valence;
T relCon; // relative concentration for stationary case
T diffConst;
std::string name;
};
// Solvent
template<class T>
class Solvent
{
private:
T permittivity;
public:
Solvent (T permittivity_)
: permittivity(permittivity_)
{}
T getPermittivity () const { return permittivity; }
};
// Electrolyte
template<class T>
class Electrolyte
{
public:
Electrolyte (const T permittivity_, const T temperature_, const T stdCon_, const T lengthScale)
: permittivity(permittivity_), temperature(temperature_), stdCon(stdCon_)
{
debyeLength = std::sqrt( 0.5 * con_eps0 * con_k * temperature / ( con_e * con_e * stdCon ) );
//lengthConstantSqr = con_eps0 * con_k * temperature / ( con_e * con_e * stdCon );
poissonConstant = con_e * con_e * stdCon * lengthScale * lengthScale / ( con_eps0 * con_k * temperature );
}
T getDebyeLength () const
{
return debyeLength;
}
T getPoissonConstant () const
{
return poissonConstant;
}
T getPermittivity () const
{
return permittivity;
}
void setPermittivity(T perm)
{
permittivity = perm;
}
T getTemperature () const
{
return temperature;
}
T getStdCon () const
{
return stdCon;
}
// add ion to electrolyte
void addIon (Ion<T> ion)
{
ions.push_back(ion);
con_diffWater.resize(ions.size());
}
// number of ion species
int numOfSpecies () const
{
return ions.size();
}
// right hand side for the Poisson Boltzmann equation
T rhsPoissonBoltzmann (const T phi) const
{
T sum = 0.0;
for (int i=0; i<ions.size(); ++i)
{
sum = sum + ions[i].getValence() * ions[i].getRelCon() * exp(-ions[i].getValence() * phi);
}
return - 0.5 * sum / ( debyeLength * debyeLength );
}
// concentration of ion species for stationary case
T getConcentration (const int& i, const T& phi) const
{
return stdCon * ions[i].getRelCon() * exp(-ions[i].getValence() * phi);
}
// get diffusion constant
T getDiffConst ( const unsigned int ionSpecies ) const
{
assert(ionSpecies <= con_diffWater.size());
return con_diffWater[ionSpecies];
}
void setDiffConst ( const unsigned int ionSpecies, T diffCoeff )
{
assert(ionSpecies <= con_diffWater.size());
con_diffWater[ionSpecies] = diffCoeff;
}
// valence of ion species
T getValence ( const unsigned int ionSpecies ) const
{
return ions[ionSpecies].getValence();
}
// name of ion species
std::string getIonName ( const unsigned int ionSpecies ) const
{
return ions[ionSpecies].getName();
}
// charge density
void addToChargeDensity(std::valarray<T>& chargeDensity,
const std::valarray<T>& concentrations,
const unsigned int ionSpecies)
{
chargeDensity += ions[ionSpecies].getValence() * concentrations;
}
private:
T permittivity;
std::vector<Ion<T> > ions; // collection of ion species
std::vector<T> con_diffWater; // corresponding diff coeffs for ions
T temperature;
T stdCon; // scaling concentration
T debyeLength;
T poissonConstant;
};
#endif // DUNE_AX1_ELECTROLYTE_HH
| pederpansen/dune-ax1 | dune/ax1/common/electrolyte.hh | C++ | bsd-3-clause | 3,901 |
package edu.umass.cs.jfoley.coop.conll.classifier;
import ciir.jfoley.chai.collections.Pair;
import gnu.trove.map.hash.TIntFloatHashMap;
import java.util.Iterator;
/**
* @author jfoley.
*/
public class SparseFloatFeatures implements FeatureVector {
TIntFloatHashMap features;
@Override
public Iterator<Pair<Integer, Float>> iterator() {
final int[] keys = features.keys();
return new Iterator<Pair<Integer, Float>>() {
int pos = 0;
@Override
public boolean hasNext() {
return pos < keys.length;
}
@Override
public Pair<Integer, Float> next() {
float val = features.get(keys[pos]);
return Pair.of(pos++, val);
}
};
}
}
| jjfiv/coop | src/main/java/edu/umass/cs/jfoley/coop/conll/classifier/SparseFloatFeatures.java | Java | bsd-3-clause | 712 |
/*
* This file is part of RHexLib,
*
* Copyright (c) 2001 The University of Michigan, its Regents,
* Fellows, Employees and Agents. All rights reserved, and distributed as
* free software under the following license.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions, the following disclaimer and the
* file called "CREDITS" which accompanies this distribution.
*
* 2) Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions, the following disclaimer and the file
* called "CREDITS" which accompanies this distribution in the
* documentation and/or other materials provided with the distribution.
*
* 3) Neither the name of the University of Michigan, Ann Arbor or the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*********************************************************************
* $Id: MAnalogProbe.cc,v 1.3 2001/08/05 18:13:24 ulucs Exp $
*
* The MAnalogProbe module. Handles the analog inputs in the Michigan Hardware.
*
* Created : Uluc Saranli, 01/19/2001
* Last Modified : Uluc Saranli, 06/27/2001
*
********************************************************************/
#include <stdio.h>
#include <math.h>
#include "io/mpc550.hh"
#include "MichiganHW.hh"
#include "MComponents.hh"
#include "MAnalogProbe.hh"
// MAnalogProbe::MAnalogProbe : Class constructor
MAnalogProbe::MAnalogProbe( uint numchan, bool polling )
: Module( "manalogprobe", 0, false, polling ) {
channels = numchan;
controlWord = 0x40 | ADIO_RANGE5V | ADIO_UNIPOLAR;
curChannel = lastChannel = 0;
stepMark = 0;
convCount = 0;
conversion = false;
memset( value, 0, 16 * sizeof( float ) );
memset( timestamp, 0, 16 * sizeof( CLOCK ) );
}
void MAnalogProbe::update( void ) {
if ( curChannel >= channels ) {
if ( MMGetStepCount() == stepMark ) {
// Done with all the channels and waiting for a new module manager step.
// meaningful only when the module is polling
return;
} else {
// New module manager step. We can restart acquisition of the channels
curChannel = 0;
stepMark = MMGetStepCount();
}
}
// Still not done with all the channels for the current run. Check pending
// conversions and issue a new one if necessary
if ( conversion ) {
// There is a pending conversion, so try to read the data.
if ( MPC550Card->checkADC( 1 ) ) {
// The conversion was from AD1
value[ lastChannel ] = 5.0 * MPC550Card->readADC( 1 ) / 0xfff;
conversion = false;
} else if ( MPC550Card->checkADC( 2 ) ) {
// The conversion was from AD2
value[ lastChannel ] = 5.0 * MPC550Card->readADC( 2 ) / 0xfff;
conversion = false;
}
if ( !conversion ) {
timestamp[ lastChannel ] = MMReadClock();
curChannel++;
}
}
if ( !conversion && ( curChannel < channels ) ) {
// If the previous conversion is done and there are still more channels
// to go, start another conversion
if ( curChannel < 8 ) {
MPC550Card->acquireADC( 1, controlWord, curChannel );
} else {
MPC550Card->acquireADC( 2, controlWord, curChannel - 8 );
}
// printf( "Starting conversion of channel %i\n", curChannel );
conversion = true;
lastChannel = curChannel;
}
}
| kiik/RHexLib | hardware/UofM/MAnalogProbe.cc | C++ | bsd-3-clause | 4,389 |
import java.util.*;
public class Row<T>
{
private HashMap<String, T> rowValues;
private int nColumns;
public Row(int c)
{
nColumns = c;
rowValues = new HashMap<String, T>(nColumns);
}
public HashMap<String, T> getRowValues()
{
return rowValues;
}
public void setRowValues(HashMap<String, T> rowValues)
{
this.rowValues = rowValues;
}
public void insert(String columnName, T value)
{
rowValues.put(columnName, value);
}
public T getFromRow(String columnName)
{
return rowValues.get(columnName);
}
public String toString()
{
Iterator<String> iterator = rowValues.keySet().iterator();
String s = "Row Info ";
while(iterator.hasNext())
{
s += rowValues.get(iterator.next()) + " | ";
}
return s;
}
}
| Nelspike/UniversityStuff | Spam ID3/src/Row.java | Java | bsd-3-clause | 760 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "native_client/src/trusted/plugin/pnacl_coordinator.h"
#include <utility>
#include <vector>
#include "native_client/src/include/checked_cast.h"
#include "native_client/src/include/portability_io.h"
#include "native_client/src/shared/platform/nacl_check.h"
#include "native_client/src/trusted/plugin/local_temp_file.h"
#include "native_client/src/trusted/plugin/manifest.h"
#include "native_client/src/trusted/plugin/plugin.h"
#include "native_client/src/trusted/plugin/plugin_error.h"
#include "native_client/src/trusted/plugin/pnacl_translate_thread.h"
#include "native_client/src/trusted/plugin/service_runtime.h"
#include "native_client/src/trusted/plugin/temporary_file.h"
#include "native_client/src/trusted/service_runtime/include/sys/stat.h"
#include "ppapi/c/pp_bool.h"
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_file_io.h"
#include "ppapi/c/private/ppb_uma_private.h"
#include "ppapi/cpp/file_io.h"
namespace {
const char kPnaclTempDir[] = "/.pnacl";
const uint32_t kCopyBufSize = 512 << 10;
}
namespace plugin {
//////////////////////////////////////////////////////////////////////
// Pnacl-specific manifest support.
//////////////////////////////////////////////////////////////////////
class PnaclManifest : public Manifest {
public:
PnaclManifest(const pp::URLUtil_Dev* url_util, bool use_extension)
: url_util_(url_util),
manifest_base_url_(PnaclUrls::GetBaseUrl(use_extension)) {
// TODO(jvoung): get rid of use_extension when we no longer rely
// on the chrome webstore extension. Most of this Manifest stuff
// can also be simplified then.
}
virtual ~PnaclManifest() { }
virtual bool GetProgramURL(nacl::string* full_url,
nacl::string* cache_identity,
ErrorInfo* error_info,
bool* pnacl_translate) const {
// Does not contain program urls.
UNREFERENCED_PARAMETER(full_url);
UNREFERENCED_PARAMETER(cache_identity);
UNREFERENCED_PARAMETER(error_info);
UNREFERENCED_PARAMETER(pnacl_translate);
PLUGIN_PRINTF(("PnaclManifest does not contain a program\n"));
error_info->SetReport(ERROR_MANIFEST_GET_NEXE_URL,
"pnacl manifest does not contain a program.");
return false;
}
virtual bool ResolveURL(const nacl::string& relative_url,
nacl::string* full_url,
ErrorInfo* error_info) const {
// Does not do general URL resolution, simply appends relative_url to
// the end of manifest_base_url_.
UNREFERENCED_PARAMETER(error_info);
*full_url = manifest_base_url_ + relative_url;
return true;
}
virtual bool GetFileKeys(std::set<nacl::string>* keys) const {
// Does not support enumeration.
PLUGIN_PRINTF(("PnaclManifest does not support key enumeration\n"));
UNREFERENCED_PARAMETER(keys);
return false;
}
virtual bool ResolveKey(const nacl::string& key,
nacl::string* full_url,
nacl::string* cache_identity,
ErrorInfo* error_info,
bool* pnacl_translate) const {
// All of the extension files are native (do not require pnacl translate).
*pnacl_translate = false;
// Do not cache these entries.
*cache_identity = "";
// We can only resolve keys in the files/ namespace.
const nacl::string kFilesPrefix = "files/";
size_t files_prefix_pos = key.find(kFilesPrefix);
if (files_prefix_pos == nacl::string::npos) {
error_info->SetReport(ERROR_MANIFEST_RESOLVE_URL,
"key did not start with files/");
return false;
}
// Append what follows files to the pnacl URL prefix.
nacl::string key_basename = key.substr(kFilesPrefix.length());
return ResolveURL(key_basename, full_url, error_info);
}
private:
NACL_DISALLOW_COPY_AND_ASSIGN(PnaclManifest);
const pp::URLUtil_Dev* url_util_;
nacl::string manifest_base_url_;
};
// TEMPORARY: ld needs to look up dynamic libraries in the nexe's manifest
// until metadata is complete in pexes. This manifest lookup allows looking
// for whether a resource requested by ld is in the nexe manifest first, and
// if not, then consults the extension manifest.
// TODO(sehr,jvoung,pdox): remove this when metadata is correct.
class PnaclLDManifest : public Manifest {
public:
PnaclLDManifest(const Manifest* nexe_manifest,
const Manifest* extension_manifest)
: nexe_manifest_(nexe_manifest),
extension_manifest_(extension_manifest) {
CHECK(nexe_manifest != NULL);
CHECK(extension_manifest != NULL);
}
virtual ~PnaclLDManifest() { }
virtual bool GetProgramURL(nacl::string* full_url,
nacl::string* cache_identity,
ErrorInfo* error_info,
bool* pnacl_translate) const {
if (nexe_manifest_->GetProgramURL(full_url, cache_identity,
error_info, pnacl_translate)) {
return true;
}
return extension_manifest_->GetProgramURL(full_url, cache_identity,
error_info, pnacl_translate);
}
virtual bool ResolveURL(const nacl::string& relative_url,
nacl::string* full_url,
ErrorInfo* error_info) const {
if (nexe_manifest_->ResolveURL(relative_url, full_url, error_info)) {
return true;
}
return extension_manifest_->ResolveURL(relative_url, full_url, error_info);
}
virtual bool GetFileKeys(std::set<nacl::string>* keys) const {
if (nexe_manifest_->GetFileKeys(keys)) {
return true;
}
return extension_manifest_->GetFileKeys(keys);
}
virtual bool ResolveKey(const nacl::string& key,
nacl::string* full_url,
nacl::string* cache_identity,
ErrorInfo* error_info,
bool* pnacl_translate) const {
if (nexe_manifest_->ResolveKey(key, full_url, cache_identity,
error_info, pnacl_translate)) {
return true;
}
return extension_manifest_->ResolveKey(key, full_url, cache_identity,
error_info, pnacl_translate);
}
private:
NACL_DISALLOW_COPY_AND_ASSIGN(PnaclLDManifest);
const Manifest* nexe_manifest_;
const Manifest* extension_manifest_;
};
//////////////////////////////////////////////////////////////////////
// UMA stat helpers.
//////////////////////////////////////////////////////////////////////
namespace {
// Assume translation time metrics *can be* large.
// Up to 12 minutes.
const int64_t kTimeLargeMin = 10; // in ms
const int64_t kTimeLargeMax = 720000; // in ms
const uint32_t kTimeLargeBuckets = 100;
const int32_t kSizeKBMin = 1;
const int32_t kSizeKBMax = 512*1024; // very large .pexe / .nexe.
const uint32_t kSizeKBBuckets = 100;
const int32_t kRatioMin = 10;
const int32_t kRatioMax = 10*100; // max of 10x difference.
const uint32_t kRatioBuckets = 100;
const int32_t kKBPSMin = 1;
const int32_t kKBPSMax = 30*1000; // max of 30 MB / sec.
const uint32_t kKBPSBuckets = 100;
const PPB_UMA_Private* GetUMAInterface() {
pp::Module *module = pp::Module::Get();
DCHECK(module);
return static_cast<const PPB_UMA_Private*>(
module->GetBrowserInterface(PPB_UMA_PRIVATE_INTERFACE));
}
void HistogramTime(const std::string& name, int64_t ms) {
if (ms < 0) return;
const PPB_UMA_Private* ptr = GetUMAInterface();
if (ptr == NULL) return;
ptr->HistogramCustomTimes(pp::Var(name).pp_var(),
ms,
kTimeLargeMin, kTimeLargeMax,
kTimeLargeBuckets);
}
void HistogramSizeKB(const std::string& name, int32_t kb) {
if (kb < 0) return;
const PPB_UMA_Private* ptr = GetUMAInterface();
if (ptr == NULL) return;
ptr->HistogramCustomCounts(pp::Var(name).pp_var(),
kb,
kSizeKBMin, kSizeKBMax,
kSizeKBBuckets);
}
void HistogramRatio(const std::string& name, int64_t a, int64_t b) {
if (a < 0 || b <= 0) return;
const PPB_UMA_Private* ptr = GetUMAInterface();
if (ptr == NULL) return;
ptr->HistogramCustomCounts(pp::Var(name).pp_var(),
100 * a / b,
kRatioMin, kRatioMax,
kRatioBuckets);
}
void HistogramKBPerSec(const std::string& name, double kb, double s) {
if (kb < 0.0 || s <= 0.0) return;
const PPB_UMA_Private* ptr = GetUMAInterface();
if (ptr == NULL) return;
ptr->HistogramCustomCounts(pp::Var(name).pp_var(),
static_cast<int64_t>(kb / s),
kKBPSMin, kKBPSMax,
kKBPSBuckets);
}
void HistogramEnumerateTranslationCache(bool hit) {
const PPB_UMA_Private* ptr = GetUMAInterface();
if (ptr == NULL) return;
ptr->HistogramEnumeration(pp::Var("NaCl.Perf.PNaClCache.IsHit").pp_var(),
hit, 2);
}
} // namespace
//////////////////////////////////////////////////////////////////////
// The coordinator class.
//////////////////////////////////////////////////////////////////////
// Out-of-line destructor to keep it from getting put in every .o where
// callback_source.h is included
template<>
CallbackSource<FileStreamData>::~CallbackSource() {}
PnaclCoordinator* PnaclCoordinator::BitcodeToNative(
Plugin* plugin,
const nacl::string& pexe_url,
const nacl::string& cache_identity,
const pp::CompletionCallback& translate_notify_callback) {
PLUGIN_PRINTF(("PnaclCoordinator::BitcodeToNative (plugin=%p, pexe=%s)\n",
static_cast<void*>(plugin), pexe_url.c_str()));
PnaclCoordinator* coordinator =
new PnaclCoordinator(plugin, pexe_url,
cache_identity, translate_notify_callback);
coordinator->pnacl_init_time_ = NaClGetTimeOfDayMicroseconds();
coordinator->off_the_record_ =
plugin->nacl_interface()->IsOffTheRecord();
PLUGIN_PRINTF(("PnaclCoordinator::BitcodeToNative (manifest=%p, "
"off_the_record=%d)\n",
reinterpret_cast<const void*>(coordinator->manifest_.get()),
coordinator->off_the_record_));
// Load llc and ld.
std::vector<nacl::string> resource_urls;
resource_urls.push_back(PnaclUrls::GetLlcUrl());
resource_urls.push_back(PnaclUrls::GetLdUrl());
pp::CompletionCallback resources_cb =
coordinator->callback_factory_.NewCallback(
&PnaclCoordinator::ResourcesDidLoad);
coordinator->resources_.reset(
new PnaclResources(plugin,
coordinator,
coordinator->manifest_.get(),
resource_urls,
resources_cb));
CHECK(coordinator->resources_ != NULL);
coordinator->resources_->StartLoad();
// ResourcesDidLoad will be invoked when all resources have been received.
return coordinator;
}
int32_t PnaclCoordinator::GetLoadedFileDesc(int32_t pp_error,
const nacl::string& url,
const nacl::string& component) {
PLUGIN_PRINTF(("PnaclCoordinator::GetLoadedFileDesc (pp_error=%"
NACL_PRId32", url=%s, component=%s)\n", pp_error,
url.c_str(), component.c_str()));
ErrorInfo error_info;
int32_t file_desc_ok_to_close = plugin_->GetPOSIXFileDesc(url);
if (pp_error != PP_OK || file_desc_ok_to_close == NACL_NO_FILE_DESC) {
if (pp_error == PP_ERROR_ABORTED) {
plugin_->ReportLoadAbort();
} else {
ReportPpapiError(ERROR_PNACL_RESOURCE_FETCH,
pp_error,
component + " load failed.");
}
return NACL_NO_FILE_DESC;
}
return file_desc_ok_to_close;
}
PnaclCoordinator::PnaclCoordinator(
Plugin* plugin,
const nacl::string& pexe_url,
const nacl::string& cache_identity,
const pp::CompletionCallback& translate_notify_callback)
: translate_finish_error_(PP_OK),
plugin_(plugin),
translate_notify_callback_(translate_notify_callback),
file_system_(new pp::FileSystem(plugin, PP_FILESYSTEMTYPE_LOCALTEMPORARY)),
manifest_(new PnaclManifest(
plugin->url_util(),
plugin::PnaclUrls::UsePnaclExtension(plugin))),
pexe_url_(pexe_url),
cache_identity_(cache_identity),
error_already_reported_(false),
off_the_record_(false),
pnacl_init_time_(0),
pexe_size_(0),
pexe_bytes_compiled_(0),
expected_pexe_size_(-1) {
PLUGIN_PRINTF(("PnaclCoordinator::PnaclCoordinator (this=%p, plugin=%p)\n",
static_cast<void*>(this), static_cast<void*>(plugin)));
callback_factory_.Initialize(this);
ld_manifest_.reset(new PnaclLDManifest(plugin_->manifest(), manifest_.get()));
}
PnaclCoordinator::~PnaclCoordinator() {
PLUGIN_PRINTF(("PnaclCoordinator::~PnaclCoordinator (this=%p, "
"translate_thread=%p\n",
static_cast<void*>(this), translate_thread_.get()));
// Stopping the translate thread will cause the translate thread to try to
// run translation_complete_callback_ on the main thread. This destructor is
// running from the main thread, and by the time it exits, callback_factory_
// will have been destroyed. This will result in the cancellation of
// translation_complete_callback_, so no notification will be delivered.
if (translate_thread_.get() != NULL) {
translate_thread_->AbortSubprocesses();
}
}
void PnaclCoordinator::ReportNonPpapiError(enum PluginErrorCode err_code,
const nacl::string& message) {
error_info_.SetReport(err_code,
nacl::string("PnaclCoordinator: ") + message);
ExitWithError();
}
void PnaclCoordinator::ReportPpapiError(enum PluginErrorCode err_code,
int32_t pp_error,
const nacl::string& message) {
nacl::stringstream ss;
ss << "PnaclCoordinator: " << message << " (pp_error=" << pp_error << ").";
error_info_.SetReport(err_code, ss.str());
ExitWithError();
}
void PnaclCoordinator::ExitWithError() {
PLUGIN_PRINTF(("PnaclCoordinator::ExitWithError (error_code=%d, "
"message='%s')\n",
error_info_.error_code(),
error_info_.message().c_str()));
plugin_->ReportLoadError(error_info_);
// Free all the intermediate callbacks we ever created.
// Note: this doesn't *cancel* the callbacks from the factories attached
// to the various helper classes (e.g., pnacl_resources). Thus, those
// callbacks may still run asynchronously. We let those run but ignore
// any other errors they may generate so that they do not end up running
// translate_notify_callback_, which has already been freed.
callback_factory_.CancelAll();
if (!error_already_reported_) {
error_already_reported_ = true;
translate_notify_callback_.Run(PP_ERROR_FAILED);
} else {
PLUGIN_PRINTF(("PnaclCoordinator::ExitWithError an earlier error was "
"already reported -- Skipping.\n"));
}
}
// Signal that Pnacl translation completed normally.
void PnaclCoordinator::TranslateFinished(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::TranslateFinished (pp_error=%"
NACL_PRId32")\n", pp_error));
// Bail out if there was an earlier error (e.g., pexe load failure).
if (translate_finish_error_ != PP_OK) {
ExitWithError();
return;
}
// Bail out if there is an error from the translation thread.
if (pp_error != PP_OK) {
ExitWithError();
return;
}
// If there are no errors, report stats from this thread (the main thread).
const plugin::PnaclTimeStats& time_stats = translate_thread_->GetTimeStats();
HistogramTime("NaCl.Perf.PNaClLoadTime.LoadCompiler",
time_stats.pnacl_llc_load_time / NACL_MICROS_PER_MILLI);
HistogramTime("NaCl.Perf.PNaClLoadTime.CompileTime",
time_stats.pnacl_compile_time / NACL_MICROS_PER_MILLI);
HistogramKBPerSec("NaCl.Perf.PNaClLoadTime.CompileKBPerSec",
pexe_size_ / 1024.0,
time_stats.pnacl_compile_time / 1000000.0);
HistogramTime("NaCl.Perf.PNaClLoadTime.LoadLinker",
time_stats.pnacl_ld_load_time / NACL_MICROS_PER_MILLI);
HistogramTime("NaCl.Perf.PNaClLoadTime.LinkTime",
time_stats.pnacl_link_time / NACL_MICROS_PER_MILLI);
HistogramSizeKB("NaCl.Perf.Size.Pexe",
static_cast<int64_t>(pexe_size_ / 1024));
struct nacl_abi_stat stbuf;
struct NaClDesc* desc = temp_nexe_file_->read_wrapper()->desc();
int stat_ret;
if (0 != (stat_ret = (*((struct NaClDescVtbl const *) desc->base.vtbl)->
Fstat)(desc, &stbuf))) {
PLUGIN_PRINTF(("PnaclCoordinator::TranslateFinished can't stat nexe.\n"));
} else {
size_t nexe_size = stbuf.nacl_abi_st_size;
HistogramSizeKB("NaCl.Perf.Size.PNaClTranslatedNexe",
static_cast<int64_t>(nexe_size / 1024));
HistogramRatio("NaCl.Perf.Size.PexeNexeSizePct", pexe_size_, nexe_size);
}
// The nexe is written to the temp_nexe_file_. We must Reset() the file
// pointer to be able to read it again from the beginning.
temp_nexe_file_->Reset();
if (cache_identity_ != "" && cached_nexe_file_ != NULL) {
// We are using a cache, but had a cache miss, which is why we did the
// translation. Reset cached_nexe_file_ to have a random name,
// for scratch purposes, before renaming to the final cache_identity_.
cached_nexe_file_.reset(new LocalTempFile(plugin_, file_system_.get(),
nacl::string(kPnaclTempDir)));
pp::CompletionCallback cb = callback_factory_.NewCallback(
&PnaclCoordinator::CachedNexeOpenedForWrite);
cached_nexe_file_->OpenWrite(cb);
} else {
// For now, tolerate bitcode that is missing a cache identity, and
// tolerate the lack of caching in incognito mode.
PLUGIN_PRINTF(("PnaclCoordinator -- not caching.\n"));
NexeReadDidOpen(PP_OK);
}
}
void PnaclCoordinator::CachedNexeOpenedForWrite(int32_t pp_error) {
if (pp_error != PP_OK) {
if (pp_error == PP_ERROR_NOACCESS) {
ReportPpapiError(
ERROR_PNACL_CACHE_FILEOPEN_NOACCESS,
pp_error,
"PNaCl translation cache failed to open file for write "
"(no access).");
return;
}
if (pp_error == PP_ERROR_NOQUOTA) {
ReportPpapiError(
ERROR_PNACL_CACHE_FILEOPEN_NOQUOTA,
pp_error,
"PNaCl translation cache failed to open file for write "
"(no quota).");
return;
}
if (pp_error == PP_ERROR_NOSPACE) {
ReportPpapiError(
ERROR_PNACL_CACHE_FILEOPEN_NOSPACE,
pp_error,
"PNaCl translation cache failed to open file for write "
"(no space).");
return;
}
if (pp_error == PP_ERROR_NOTAFILE) {
ReportPpapiError(ERROR_PNACL_CACHE_FILEOPEN_NOTAFILE,
pp_error,
"PNaCl translation cache failed to open file for write."
" File already exists as a directory.");
return;
}
ReportPpapiError(ERROR_PNACL_CACHE_FILEOPEN_OTHER,
pp_error,
"PNaCl translation cache failed to open file for write.");
return;
}
// Copy the contents from temp_nexe_file_ -> cached_nexe_file_,
// then rename the cached_nexe_file_ file to the cache id.
int64_t cur_offset = 0;
nacl::DescWrapper* read_wrapper = temp_nexe_file_->read_wrapper();
char buf[kCopyBufSize];
int32_t num_read =
nacl::assert_cast<int32_t>(read_wrapper->Read(buf, sizeof buf));
// Hit EOF or something.
if (num_read == 0) {
NexeWasCopiedToCache(PP_OK);
return;
}
if (num_read < 0) {
PLUGIN_PRINTF(("PnaclCoordinator::CachedNexeOpenedForWrite read failed "
"(error=%"NACL_PRId32")\n", num_read));
NexeWasCopiedToCache(PP_ERROR_FAILED);
return;
}
pp::CompletionCallback cb = callback_factory_.NewCallback(
&PnaclCoordinator::DidCopyNexeToCachePartial, num_read, cur_offset);
cached_nexe_file_->write_file_io()->Write(cur_offset, buf, num_read, cb);
}
void PnaclCoordinator::DidCopyNexeToCachePartial(int32_t pp_error,
int32_t num_read_prev,
int64_t cur_offset) {
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial "
"(pp_error=%"NACL_PRId32", num_read_prev=%"NACL_PRId32""
", cur_offset=%"NACL_PRId64").\n",
pp_error, num_read_prev, cur_offset));
// Assume we are done.
if (pp_error == PP_OK) {
NexeWasCopiedToCache(PP_OK);
return;
}
if (pp_error < PP_OK) {
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial failed (err=%"
NACL_PRId32")\n", pp_error));
NexeWasCopiedToCache(pp_error);
return;
}
// Check if we wrote as much as we read.
nacl::DescWrapper* read_wrapper = temp_nexe_file_->read_wrapper();
if (pp_error != num_read_prev) {
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial partial "
"write (bytes_written=%"NACL_PRId32" vs "
"read=%"NACL_PRId32")\n", pp_error, num_read_prev));
CHECK(pp_error < num_read_prev);
// Seek back to re-read the bytes that were not written.
nacl_off64_t seek_result =
read_wrapper->Seek(pp_error - num_read_prev, SEEK_CUR);
if (seek_result < 0) {
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial seek failed "
"(err=%"NACL_PRId64")\n", seek_result));
NexeWasCopiedToCache(PP_ERROR_FAILED);
return;
}
}
int64_t next_offset = cur_offset + pp_error;
char buf[kCopyBufSize];
int32_t num_read =
nacl::assert_cast<int32_t>(read_wrapper->Read(buf, sizeof buf));
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial read (bytes=%"
NACL_PRId32")\n", num_read));
// Hit EOF or something.
if (num_read == 0) {
NexeWasCopiedToCache(PP_OK);
return;
}
if (num_read < 0) {
PLUGIN_PRINTF(("PnaclCoordinator::DidCopyNexeToCachePartial read failed "
"(error=%"NACL_PRId32")\n", num_read));
NexeWasCopiedToCache(PP_ERROR_FAILED);
return;
}
pp::CompletionCallback cb = callback_factory_.NewCallback(
&PnaclCoordinator::DidCopyNexeToCachePartial, num_read, next_offset);
PLUGIN_PRINTF(("PnaclCoordinator::CopyNexeToCache Writing ("
"bytes=%"NACL_PRId32", buf=%p, file_io=%p)\n", num_read, buf,
cached_nexe_file_->write_file_io()));
cached_nexe_file_->write_file_io()->Write(next_offset, buf, num_read, cb);
}
void PnaclCoordinator::NexeWasCopiedToCache(int32_t pp_error) {
if (pp_error != PP_OK) {
// Try to delete the partially written not-yet-committed cache file before
// returning. We pass the current pp_error along so that it can be reported
// before returning.
pp::CompletionCallback cb = callback_factory_.NewCallback(
&PnaclCoordinator::CorruptCacheFileWasDeleted, pp_error);
cached_nexe_file_->Delete(cb);
return;
}
// Rename the cached_nexe_file_ file to the cache id, to finalize.
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::NexeFileWasRenamed);
cached_nexe_file_->Rename(cache_identity_, cb);
}
void PnaclCoordinator::CorruptCacheFileWasDeleted(int32_t delete_pp_error,
int32_t orig_pp_error) {
if (delete_pp_error != PP_OK) {
// The cache file was certainly already opened by the time we tried
// to write to it, so it should certainly be deletable.
PLUGIN_PRINTF(("PnaclCoordinator::CorruptCacheFileWasDeleted "
"delete failed with pp_error=%"NACL_PRId32"\n",
delete_pp_error));
// fall through and report the original error.
}
// Report the original error that caused us to consider the
// cache file corrupted.
if (orig_pp_error == PP_ERROR_NOQUOTA) {
ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_NOQUOTA,
orig_pp_error,
"Failed to copy translated nexe to cache (no quota).");
return;
}
if (orig_pp_error == PP_ERROR_NOSPACE) {
ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_NOSPACE,
orig_pp_error,
"Failed to copy translated nexe to cache (no space).");
return;
}
ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_COPY_OTHER,
orig_pp_error,
"Failed to copy translated nexe to cache.");
return;
}
void PnaclCoordinator::NexeFileWasRenamed(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::NexeFileWasRenamed (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
if (pp_error == PP_ERROR_NOACCESS) {
ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_RENAME_NOACCESS,
pp_error,
"Failed to finalize cached translation (no access).");
return;
} else if (pp_error != PP_ERROR_FILEEXISTS) {
ReportPpapiError(ERROR_PNACL_CACHE_FINALIZE_RENAME_OTHER,
pp_error,
"Failed to finalize cached translation.");
return;
} else { // pp_error == PP_ERROR_FILEEXISTS.
// NOTE: if the file already existed, it looks like the rename will
// happily succeed. However, we should add a test for this.
// Could be a hash collision, or it could also be two tabs racing to
// translate the same pexe. We may want UMA stats to know if this happens.
// For now, assume that it is a race and try to continue.
// If there is truly a corrupted file, then sel_ldr should prevent the
// file from loading due to the file size not matching the ELF header.
PLUGIN_PRINTF(("PnaclCoordinator::NexeFileWasRenamed file existed\n"));
}
}
cached_nexe_file_->FinishRename();
int64_t total_time = NaClGetTimeOfDayMicroseconds() - pnacl_init_time_;
HistogramTime("NaCl.Perf.PNaClLoadTime.TotalUncachedTime",
total_time / NACL_MICROS_PER_MILLI);
HistogramKBPerSec("NaCl.Perf.PNaClLoadTime.TotalUncachedKBPerSec",
pexe_size_ / 1024.0,
total_time / 1000000.0);
// Open the cache file for reading.
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::NexeReadDidOpen);
cached_nexe_file_->OpenRead(cb);
}
void PnaclCoordinator::NexeReadDidOpen(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::NexeReadDidOpen (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
if (pp_error == PP_ERROR_FILENOTFOUND) {
ReportPpapiError(ERROR_PNACL_CACHE_FETCH_NOTFOUND,
pp_error,
"Failed to open translated nexe (not found).");
return;
}
if (pp_error == PP_ERROR_NOACCESS) {
ReportPpapiError(ERROR_PNACL_CACHE_FETCH_NOACCESS,
pp_error,
"Failed to open translated nexe (no access).");
return;
}
ReportPpapiError(ERROR_PNACL_CACHE_FETCH_OTHER,
pp_error,
"Failed to open translated nexe.");
return;
}
// Transfer ownership of cache/temp file's wrapper to the coordinator.
if (cached_nexe_file_ != NULL) {
translated_fd_.reset(cached_nexe_file_->release_read_wrapper());
} else {
translated_fd_.reset(temp_nexe_file_->release_read_wrapper());
}
translate_notify_callback_.Run(pp_error);
}
void PnaclCoordinator::ResourcesDidLoad(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::ResourcesDidLoad (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
// Finer-grained error code should have already been reported by
// the PnaclResources class.
return;
}
if (!off_the_record_) {
// Open the local temporary FS to see if we get a hit in the cache.
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::FileSystemDidOpen);
int32_t open_error = file_system_->Open(0, cb);
if (open_error != PP_OK_COMPLETIONPENDING) {
// At this point, no async request has kicked off to check for
// permissions, space, etc., so the only error that can be detected
// now is that an open() is already in progress (or a really terrible
// error).
if (pp_error == PP_ERROR_INPROGRESS) {
ReportPpapiError(
ERROR_PNACL_CACHE_OPEN_INPROGRESS,
pp_error,
"File system for PNaCl translation cache failed to open "
"(in progress).");
return;
}
ReportPpapiError(
ERROR_PNACL_CACHE_OPEN_OTHER,
pp_error,
"File system for PNaCl translation cache failed to open.");
}
} else {
// We don't have a cache, so do the non-cached codepath.
CachedFileDidOpen(PP_ERROR_FAILED);
}
}
void PnaclCoordinator::FileSystemDidOpen(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::FileSystemDidOpen (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
if (pp_error == PP_ERROR_NOACCESS) {
ReportPpapiError(
ERROR_PNACL_CACHE_OPEN_NOACCESS,
pp_error,
"File system for PNaCl translation cache failed to open "
"(no access).");
return;
}
if (pp_error == PP_ERROR_NOQUOTA) {
ReportPpapiError(
ERROR_PNACL_CACHE_OPEN_NOQUOTA,
pp_error,
"File system for PNaCl translation cache failed to open "
"(no quota).");
return;
}
if (pp_error == PP_ERROR_NOSPACE) {
ReportPpapiError(
ERROR_PNACL_CACHE_OPEN_NOSPACE,
pp_error,
"File system for PNaCl translation cache failed to open "
"(no space).");
return;
}
ReportPpapiError(ERROR_PNACL_CACHE_OPEN_OTHER,
pp_error,
"File system for PNaCl translation cache failed to open.");
}
dir_ref_.reset(new pp::FileRef(*file_system_, kPnaclTempDir));
// Attempt to create the directory.
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::DirectoryWasCreated);
dir_ref_->MakeDirectory(cb);
}
void PnaclCoordinator::DirectoryWasCreated(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::DirectoryWasCreated (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_ERROR_FILEEXISTS && pp_error != PP_OK) {
// Directory did not exist and could not be created.
if (pp_error == PP_ERROR_NOACCESS) {
ReportPpapiError(
ERROR_PNACL_CACHE_DIRECTORY_CREATE,
pp_error,
"PNaCl translation cache directory creation/check failed "
"(no access).");
return;
}
ReportPpapiError(
ERROR_PNACL_CACHE_DIRECTORY_CREATE,
pp_error,
"PNaCl translation cache directory creation/check failed.");
return;
}
if (cache_identity_ != "") {
cached_nexe_file_.reset(new LocalTempFile(plugin_, file_system_.get(),
nacl::string(kPnaclTempDir),
cache_identity_));
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::CachedFileDidOpen);
cached_nexe_file_->OpenRead(cb);
} else {
// For now, tolerate lack of cache identity...
CachedFileDidOpen(PP_ERROR_FAILED);
}
}
void PnaclCoordinator::CachedFileDidOpen(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::CachedFileDidOpen (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error == PP_OK) {
HistogramEnumerateTranslationCache(true);
NexeReadDidOpen(PP_OK);
return;
}
// Otherwise, the cache file is missing, or the cache simply
// cannot be created (e.g., incognito mode), so we must translate.
HistogramEnumerateTranslationCache(false);
// Create the translation thread object immediately. This ensures that any
// pieces of the file that get downloaded before the compilation thread
// is accepting SRPCs won't get dropped.
translate_thread_.reset(new PnaclTranslateThread());
if (translate_thread_ == NULL) {
ReportNonPpapiError(ERROR_PNACL_THREAD_CREATE,
"could not allocate translation thread.");
return;
}
// We also want to open the object file now so the
// translator can start writing to it during streaming translation.
obj_file_.reset(new TempFile(plugin_));
pp::CompletionCallback obj_cb =
callback_factory_.NewCallback(&PnaclCoordinator::ObjectFileDidOpen);
obj_file_->Open(obj_cb);
streaming_downloader_.reset(new FileDownloader());
streaming_downloader_->Initialize(plugin_);
pp::CompletionCallback cb =
callback_factory_.NewCallback(
&PnaclCoordinator::BitcodeStreamDidFinish);
if (!streaming_downloader_->OpenStream(pexe_url_, cb, this)) {
ReportNonPpapiError(ERROR_PNACL_PEXE_FETCH_OTHER,
nacl::string("failed to open stream ") + pexe_url_);
}
}
void PnaclCoordinator::BitcodeStreamDidFinish(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::BitcodeStreamDidFinish (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
// Defer reporting the error and cleanup until after the translation
// thread returns, because it may be accessing the coordinator's
// objects or writing to the files.
translate_finish_error_ = pp_error;
if (pp_error == PP_ERROR_ABORTED) {
error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_ABORTED,
"PnaclCoordinator: pexe load failed (aborted).");
}
if (pp_error == PP_ERROR_NOACCESS) {
error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_NOACCESS,
"PnaclCoordinator: pexe load failed (no access).");
} else {
nacl::stringstream ss;
ss << "PnaclCoordinator: pexe load failed (pp_error=" << pp_error << ").";
error_info_.SetReport(ERROR_PNACL_PEXE_FETCH_OTHER, ss.str());
}
translate_thread_->AbortSubprocesses();
} else {
// Compare download completion pct (100% now), to compile completion pct.
HistogramRatio("NaCl.Perf.PNaClLoadTime.PctCompiledWhenFullyDownloaded",
pexe_bytes_compiled_, pexe_size_);
}
}
void PnaclCoordinator::BitcodeStreamGotData(int32_t pp_error,
FileStreamData data) {
PLUGIN_PRINTF(("PnaclCoordinator::BitcodeStreamGotData (pp_error=%"
NACL_PRId32", data=%p)\n", pp_error, data ? &(*data)[0] : 0));
DCHECK(translate_thread_.get());
translate_thread_->PutBytes(data, pp_error);
// If pp_error > 0, then it represents the number of bytes received.
if (data && pp_error > 0) {
pexe_size_ += pp_error;
}
}
StreamCallback PnaclCoordinator::GetCallback() {
return callback_factory_.NewCallbackWithOutput(
&PnaclCoordinator::BitcodeStreamGotData);
}
void PnaclCoordinator::BitcodeGotCompiled(int32_t pp_error,
int64_t bytes_compiled) {
// If we don't know the expected total yet, ask.
pexe_bytes_compiled_ += bytes_compiled;
if (expected_pexe_size_ == -1) {
int64_t amount_downloaded; // dummy variable.
streaming_downloader_->GetDownloadProgress(&amount_downloaded,
&expected_pexe_size_);
}
bool length_computable = (expected_pexe_size_ != -1);
plugin_->EnqueueProgressEvent(plugin::Plugin::kProgressEventProgress,
pexe_url_,
(length_computable ?
plugin::Plugin::LENGTH_IS_COMPUTABLE :
plugin::Plugin::LENGTH_IS_NOT_COMPUTABLE),
pexe_bytes_compiled_,
expected_pexe_size_);
}
pp::CompletionCallback PnaclCoordinator::GetCompileProgressCallback(
int64_t bytes_compiled) {
return callback_factory_.NewCallback(&PnaclCoordinator::BitcodeGotCompiled,
bytes_compiled);
}
void PnaclCoordinator::GetCurrentProgress(int64_t* bytes_loaded,
int64_t* bytes_total) {
*bytes_loaded = pexe_bytes_compiled_;
*bytes_total = expected_pexe_size_;
}
void PnaclCoordinator::ObjectFileDidOpen(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::ObjectFileDidOpen (pp_error=%"
NACL_PRId32")\n", pp_error));
if (pp_error != PP_OK) {
ReportPpapiError(ERROR_PNACL_CREATE_TEMP,
pp_error,
"Failed to open scratch object file.");
return;
}
// Create the nexe file for connecting ld and sel_ldr.
// Start translation when done with this last step of setup!
temp_nexe_file_.reset(new TempFile(plugin_));
pp::CompletionCallback cb =
callback_factory_.NewCallback(&PnaclCoordinator::RunTranslate);
temp_nexe_file_->Open(cb);
}
void PnaclCoordinator::RunTranslate(int32_t pp_error) {
PLUGIN_PRINTF(("PnaclCoordinator::RunTranslate (pp_error=%"
NACL_PRId32")\n", pp_error));
// Invoke llc followed by ld off the main thread. This allows use of
// blocking RPCs that would otherwise block the JavaScript main thread.
pp::CompletionCallback report_translate_finished =
callback_factory_.NewCallback(&PnaclCoordinator::TranslateFinished);
CHECK(translate_thread_ != NULL);
translate_thread_->RunTranslate(report_translate_finished,
manifest_.get(),
ld_manifest_.get(),
obj_file_.get(),
temp_nexe_file_.get(),
&error_info_,
resources_.get(),
this,
plugin_);
}
} // namespace plugin
| zcbenz/cefode-chromium | ppapi/native_client/src/trusted/plugin/pnacl_coordinator.cc | C++ | bsd-3-clause | 38,753 |
# -*- coding: utf-8 -*-
from flask import render_template, redirect, url_for, flash, abort
from purchasing.decorators import requires_roles
from purchasing.data.stages import Stage
from purchasing.data.flows import Flow
from purchasing.conductor.forms import FlowForm, NewFlowForm
from purchasing.conductor.manager import blueprint
@blueprint.route('/flow/new', methods=['GET', 'POST'])
@requires_roles('conductor', 'admin', 'superadmin')
def new_flow():
'''Create a new flow
:status 200: Render the new flow template
:status 302: Try to create a new flow using the
:py:class:`~purchasing.conductor.forms.NewFlowForm`, redirect
to the flows list view if successful
'''
stages = Stage.choices_factory()
form = NewFlowForm(stages=stages)
if form.validate_on_submit():
stage_order = []
for entry in form.stage_order.entries:
# try to evaluate the return value as an ID
try:
stage_id = int(entry.data)
# otherwise it's a new stage
except ValueError:
new_stage = Stage.create(name=entry.data)
stage_id = new_stage.id
stage_order.append(stage_id)
Flow.create(flow_name=form.flow_name.data, stage_order=stage_order)
flash('Flow created successfully!', 'alert-success')
return redirect(url_for('conductor.flows_list'))
return render_template('conductor/flows/new.html', stages=stages, form=form)
@blueprint.route('/flows')
@requires_roles('conductor', 'admin', 'superadmin')
def flows_list():
'''List all flows
:status 200: Render the all flows list template
'''
flows = Flow.query.order_by(Flow.flow_name).all()
active, archived = [], []
for flow in flows:
if flow.is_archived:
archived.append(flow)
else:
active.append(flow)
return render_template('conductor/flows/browse.html', active=active, archived=archived)
@blueprint.route('/flow/<int:flow_id>', methods=['GET', 'POST'])
@requires_roles('conductor', 'admin', 'superadmin')
def flow_detail(flow_id):
'''View/edit a flow's details
:status 200: Render the flow edit template
:status 302: Post changes to the a flow using the submitted
:py:class:`~purchasing.conductor.forms.FlowForm`, redirect back to
the current flow's detail page if successful
'''
flow = Flow.query.get(flow_id)
if flow:
form = FlowForm(obj=flow)
if form.validate_on_submit():
flow.update(
flow_name=form.data['flow_name'],
is_archived=form.data['is_archived']
)
flash('Flow successfully updated', 'alert-success')
return redirect(url_for('conductor.flow_detail', flow_id=flow.id))
return render_template('conductor/flows/edit.html', form=form, flow=flow)
abort(404)
| codeforamerica/pittsburgh-purchasing-suite | purchasing/conductor/manager/flow_management.py | Python | bsd-3-clause | 2,911 |
/*
Copyright (c) 2009-2012, Jack Poulson
All rights reserved.
This file is part of Elemental.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the owner nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef ELEMENTAL_MATRIX_HPP
#define ELEMENTAL_MATRIX_HPP 1
#include "elemental/core/environment.hpp"
namespace elem {
// Matrix base for arbitrary rings
template<typename T,typename Int=int>
class Matrix
{
public:
//
// Constructors
//
Matrix();
Matrix( Int height, Int width );
Matrix( Int height, Int width, Int ldim );
Matrix( Int height, Int width, const T* buffer, Int ldim );
Matrix( Int height, Int width, T* buffer, Int ldim );
Matrix( const Matrix<T,Int>& A );
//
// Destructor
//
~Matrix();
//
// Basic information
//
Int Height() const;
Int Width() const;
Int DiagonalLength( Int offset=0 ) const;
Int LDim() const;
Int MemorySize() const;
T* Buffer();
T* Buffer( Int i, Int j );
const T* LockedBuffer() const;
const T* LockedBuffer( Int i, Int j ) const;
//
// I/O
//
void Print( const std::string msg="" ) const;
void Print( std::ostream& os, const std::string msg="" ) const;
//
// Entry manipulation
//
T Get( Int i, Int j ) const;
void Set( Int i, Int j, T alpha );
void Update( Int i, Int j, T alpha );
void GetDiagonal( Matrix<T,Int>& d, Int offset=0 ) const;
void SetDiagonal( const Matrix<T,Int>& d, Int offset=0 );
void UpdateDiagonal( const Matrix<T,Int>& d, Int offset=0 );
//
// Though the following routines are meant for complex data, all but four
// logically apply to real data.
//
typename Base<T>::type GetReal( Int i, Int j ) const;
typename Base<T>::type GetImag( Int i, Int j ) const;
void SetReal( Int i, Int j, typename Base<T>::type alpha );
// Only valid for complex data
void SetImag( Int i, Int j, typename Base<T>::type alpha );
void UpdateReal( Int i, Int j, typename Base<T>::type alpha );
// Only valid for complex data
void UpdateImag( Int i, Int j, typename Base<T>::type alpha );
void GetRealDiagonal
( Matrix<typename Base<T>::type>& d, Int offset=0 ) const;
void GetImagDiagonal
( Matrix<typename Base<T>::type>& d, Int offset=0 ) const;
void SetRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
// Only valid for complex data
void SetImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
void UpdateRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
// Only valid for complex data
void UpdateImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
//
// Viewing other matrix instances (or buffers)
//
bool Viewing() const;
bool LockedView() const;
void View( Int height, Int width, T* buffer, Int ldim );
void View( Matrix<T,Int>& A);
void View( Matrix<T,Int>& A, Int i, Int j, Int height, Int width );
void View1x2( Matrix<T,Int>& AL, Matrix<T,Int>& AR );
void View2x1( Matrix<T,Int>& AT,
Matrix<T,Int>& AB );
void View2x2( Matrix<T,Int>& ATL, Matrix<T,Int>& ATR,
Matrix<T,Int>& ABL, Matrix<T,Int>& ABR );
void LockedView( Int height, Int width, const T* buffer, Int ldim );
void LockedView( const Matrix<T,Int>& A );
void LockedView
( const Matrix<T,Int>& A, Int i, Int j, Int height, Int width );
void LockedView1x2
( const Matrix<T,Int>& AL, const Matrix<T,Int>& AR );
void LockedView2x1
( const Matrix<T,Int>& AT,
const Matrix<T,Int>& AB );
void LockedView2x2
( const Matrix<T,Int>& ATL, const Matrix<T,Int>& ATR,
const Matrix<T,Int>& ABL, const Matrix<T,Int>& ABR );
//
// Utilities
//
const Matrix<T,Int>& operator=( const Matrix<T,Int>& A );
void Empty();
void ResizeTo( Int height, Int width );
void ResizeTo( Int height, Int width, Int ldim );
private:
bool viewing_, lockedView_;
Int height_, width_, ldim_;
T* data_;
const T* lockedData_;
Memory<T> memory_;
void AssertValidEntry( Int i, Int j ) const;
template<typename Z>
struct SetRealHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct SetRealHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct SetRealHelper;
template<typename Z>
struct SetImagHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct SetImagHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct SetImagHelper;
template<typename Z>
struct UpdateRealHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct UpdateRealHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct UpdateRealHelper;
template<typename Z>
struct UpdateImagHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct UpdateImagHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct UpdateImagHelper;
};
//----------------------------------------------------------------------------//
// Implementation begins here //
//----------------------------------------------------------------------------//
//
// Constructors
//
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix()
: viewing_(false), lockedView_(false),
height_(0), width_(0), data_(0), lockedData_(0), ldim_(1),
memory_()
{ }
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix( Int height, Int width )
: viewing_(false), lockedView_(false),
height_(height), width_(width), lockedData_(0), ldim_(std::max(height,1))
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
#endif
memory_.Require( ldim_*width );
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, Int ldim )
: viewing_(false), lockedView_(false),
height_(height), width_(width), lockedData_(0), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
#endif
memory_.Require( ldim*width );
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, const T* buffer, Int ldim )
: viewing_(true), lockedView_(true),
height_(height), width_(width), data_(0), lockedData_(buffer), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, T* buffer, Int ldim )
: viewing_(true), lockedView_(false),
height_(height), width_(width), data_(buffer), lockedData_(0), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( const Matrix<T,Int>& A )
: viewing_(false), lockedView_(false),
height_(0), width_(0), data_(0), lockedData_(0), ldim_(1)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix( const Matrix& )");
#endif
if( &A != this )
*this = A;
else
throw std::logic_error
("You just tried to construct a Matrix with itself!");
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Destructor
//
template<typename T,typename Int>
inline
Matrix<T,Int>::~Matrix()
{ }
//
// Basic information
//
template<typename T,typename Int>
inline Int
Matrix<T,Int>::Height() const
{ return height_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::Width() const
{ return width_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::DiagonalLength( Int offset ) const
{ return elem::DiagonalLength(height_,width_,offset); }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::LDim() const
{ return ldim_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::MemorySize() const
{ return memory_.Size(); }
template<typename T,typename Int>
inline T*
Matrix<T,Int>::Buffer()
{
#ifndef RELEASE
PushCallStack("Matrix::Buffer");
if( lockedView_ )
throw std::logic_error
("Cannot return non-const buffer of locked Matrix");
PopCallStack();
#endif
return data_;
}
template<typename T,typename Int>
inline const T*
Matrix<T,Int>::LockedBuffer() const
{
if( lockedView_ )
return lockedData_;
else
return data_;
}
template<typename T,typename Int>
inline T*
Matrix<T,Int>::Buffer( Int i, Int j )
{
#ifndef RELEASE
PushCallStack("Matrix::Buffer");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( lockedView_ )
throw std::logic_error
("Cannot return non-const buffer of locked Matrix");
PopCallStack();
#endif
return &data_[i+j*ldim_];
}
template<typename T,typename Int>
inline const T*
Matrix<T,Int>::LockedBuffer( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::LockedBuffer");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
PopCallStack();
#endif
if( lockedView_ )
return &lockedData_[i+j*ldim_];
else
return &data_[i+j*ldim_];
}
//
// I/O
//
template<typename T,typename Int>
inline void
Matrix<T,Int>::Print( std::ostream& os, const std::string msg ) const
{
#ifndef RELEASE
PushCallStack("Matrix::Print");
#endif
if( msg != "" )
os << msg << std::endl;
const Int height = Height();
const Int width = Width();
for( Int i=0; i<height; ++i )
{
for( Int j=0; j<width; ++j )
os << Get(i,j) << " ";
os << std::endl;
}
os << std::endl;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Print( const std::string msg ) const
{ Print( std::cout, msg ); }
//
// Entry manipulation
//
template<typename T,typename Int>
inline T
Matrix<T,Int>::Get( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::Get");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return lockedData_[i+j*ldim_];
else
return data_[i+j*ldim_];
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Set( Int i, Int j, T alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::Set");
AssertValidEntry( i, j );
if( lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
#endif
data_[i+j*ldim_] = alpha;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Update( Int i, Int j, T alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::Update");
AssertValidEntry( i, j );
if( lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
#endif
data_[i+j*ldim_] += alpha;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetDiagonal( Matrix<T,Int>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1 ))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, Get(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, Get(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetDiagonal( const Matrix<T,Int>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
Set( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
Set( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateDiagonal( const Matrix<T,Int>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
Update( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
Update( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline typename Base<T>::type
Matrix<T,Int>::GetReal( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetReal");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return Real(lockedData_[i+j*ldim_]);
else
return Real(data_[i+j*ldim_]);
}
template<typename T,typename Int>
inline typename Base<T>::type
Matrix<T,Int>::GetImag( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetImag");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return Imag(lockedData_[i+j*ldim_]);
else
return Imag(data_[i+j*ldim_]);
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetReal
( Int i, Int j, typename Base<T>::type alpha )
{ SetRealHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetRealHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
parent.data_[i+j*parent.ldim_] = alpha;
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetRealHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Z beta = parent.data_[i+j*parent.ldim_].imag;
parent.data_[i+j*parent.ldim_] = Complex<Z>( alpha, beta );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetImag
( Int i, Int j, typename Base<T>::type alpha )
{ SetImagHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetImagHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagHelper::Func");
#endif
throw std::logic_error("Called complex-only routine with real datatype");
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetImagHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Z beta = parent.data_[i+j*parent.ldim_].real;
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta, alpha );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateReal
( Int i, Int j, typename Base<T>::type alpha )
{ UpdateRealHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateRealHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
parent.data_[i+j*parent.ldim_] += alpha;
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateRealHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Complex<Z> beta = parent.data_[i+j*parent.ldim_];
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta.real+alpha, beta.imag );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateImag
( Int i, Int j, typename Base<T>::type alpha )
{ UpdateImagHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateImagHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagHelper::Func");
#endif
throw std::logic_error("Called complex-only routine with real datatype");
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateImagHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Complex<Z> beta = parent.data_[i+j*parent.ldim_];
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta.real, beta.imag+alpha );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetRealDiagonal
( Matrix<typename Base<T>::type>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetRealDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetReal(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetReal(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetImagDiagonal
( Matrix<typename Base<T>::type>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetImagDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetImag(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetImag(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
SetReal( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
SetReal( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
if( !IsComplex<T>::val )
throw std::logic_error("Cannot set imaginary part of real matrix");
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
SetImag( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
SetImag( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
UpdateReal( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
UpdateReal( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
if( !IsComplex<T>::val )
throw std::logic_error("Cannot update imaginary part of real matrix");
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
UpdateImag( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
UpdateImag( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Viewing other Matrix instances
//
template<typename T,typename Int>
inline bool
Matrix<T,Int>::Viewing() const
{ return viewing_; }
template<typename T,typename Int>
inline bool
Matrix<T,Int>::LockedView() const
{ return lockedView_; }
template<typename T,typename Int>
inline void
Matrix<T,Int>::View
( Int height, Int width, T* buffer, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::View(buffer)");
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = ldim;
data_ = buffer;
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View( Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::View(A)");
#endif
Empty();
height_ = A.Height();
width_ = A.Width();
ldim_ = A.LDim();
data_ = A.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView
( Int height, Int width, const T* buffer, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(buffer)");
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = ldim;
lockedData_ = buffer;
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView( const Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(A)");
#endif
Empty();
height_ = A.Height();
width_ = A.Width();
ldim_ = A.LDim();
lockedData_ = A.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View
( Matrix<T,Int>& A, Int i, Int j, Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::View(A,i,j,height,width)");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( (i+height) > A.Height() || (j+width) > A.Width() )
{
std::ostringstream msg;
msg << "Trying to view outside of a Matrix: "
<< "up to (" << i+height-1 << "," << j+width-1 << ") "
<< "of " << A.Height() << " x " << A.Width() << " Matrix.";
throw std::logic_error( msg.str().c_str() );
}
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = A.LDim();
data_ = A.Buffer(i,j);
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView
( const Matrix<T,Int>& A, Int i, Int j, Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(A,i,j,height,width)");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( (i+height) > A.Height() || (j+width) > A.Width() )
{
std::ostringstream msg;
msg << "Trying to view outside of a Matrix: "
<< "up to (" << i+height-1 << "," << j+width-1 << ") "
<< "of " << A.Height() << " x " << A.Width() << " Matrix.";
throw std::logic_error( msg.str().c_str() );
}
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = A.LDim();
lockedData_ = A.LockedBuffer(i,j);
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View1x2( Matrix<T,Int>& AL, Matrix<T,Int>& AR )
{
#ifndef RELEASE
PushCallStack("Matrix::View1x2");
if( AL.Height() != AR.Height() )
throw std::logic_error("1x2 must have consistent height to combine");
if( AL.LDim() != AR.LDim() )
throw std::logic_error("1x2 must have consistent ldims to combine");
if( AR.Buffer() != (AL.Buffer()+AL.LDim()*AL.Width()) )
throw std::logic_error("1x2 must have contiguous memory");
#endif
Empty();
height_ = AL.Height();
width_ = AL.Width() + AR.Width();
ldim_ = AL.LDim();
data_ = AL.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView1x2( const Matrix<T,Int>& AL, const Matrix<T,Int>& AR )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView1x2");
if( AL.Height() != AR.Height() )
throw std::logic_error("1x2 must have consistent height to combine");
if( AL.LDim() != AR.LDim() )
throw std::logic_error("1x2 must have consistent ldims to combine");
if( AR.LockedBuffer() != (AL.LockedBuffer()+AL.LDim()*AL.Width()) )
throw std::logic_error("1x2 must have contiguous memory");
#endif
Empty();
height_ = AL.Height();
width_ = AL.Width() + AR.Width();
ldim_ = AL.LDim();
lockedData_ = AL.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View2x1
( Matrix<T,Int>& AT,
Matrix<T,Int>& AB )
{
#ifndef RELEASE
PushCallStack("Matrix::View2x1");
if( AT.Width() != AB.Width() )
throw std::logic_error("2x1 must have consistent width to combine");
if( AT.LDim() != AB.LDim() )
throw std::logic_error("2x1 must have consistent ldim to combine");
if( AB.Buffer() != (AT.Buffer() + AT.Height()) )
throw std::logic_error("2x1 must have contiguous memory");
#endif
Empty();
height_ = AT.Height() + AB.Height();
width_ = AT.Width();
ldim_ = AT.LDim();
data_ = AT.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView2x1
( const Matrix<T,Int>& AT,
const Matrix<T,Int>& AB )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView2x1");
if( AT.Width() != AB.Width() )
throw std::logic_error("2x1 must have consistent width to combine");
if( AT.LDim() != AB.LDim() )
throw std::logic_error("2x1 must have consistent ldim to combine");
if( AB.LockedBuffer() != (AT.LockedBuffer()+AT.Height()) )
throw std::logic_error("2x1 must have contiguous memory");
#endif
Empty();
height_ = AT.Height() + AB.Height();
width_ = AT.Width();
ldim_ = AT.LDim();
lockedData_ = AT.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View2x2
( Matrix<T,Int>& ATL, Matrix<T,Int>& ATR,
Matrix<T,Int>& ABL, Matrix<T,Int>& ABR )
{
#ifndef RELEASE
PushCallStack("Matrix::View2x2");
if( ATL.Width() != ABL.Width() ||
ATR.Width() != ABR.Width() ||
ATL.Height() != ATR.Height() ||
ABL.Height() != ABR.Height() )
throw std::logic_error("2x2 must conform to combine");
if( ATL.LDim() != ATR.LDim() ||
ATR.LDim() != ABL.LDim() ||
ABL.LDim() != ABR.LDim() )
throw std::logic_error("2x2 must have consistent ldims to combine");
if( ABL.Buffer() != (ATL.Buffer() + ATL.Height()) ||
ABR.Buffer() != (ATR.Buffer() + ATR.Height()) ||
ATR.Buffer() != (ATL.Buffer() + ATL.LDim()*ATL.Width()) )
throw std::logic_error("2x2 must have contiguous memory");
#endif
Empty();
height_ = ATL.Height() + ABL.Height();
width_ = ATL.Width() + ATR.Width();
ldim_ = ATL.LDim();
data_ = ATL.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView2x2
( const Matrix<T,Int>& ATL, const Matrix<T,Int>& ATR,
const Matrix<T,Int>& ABL, const Matrix<T,Int>& ABR )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView2x2");
if( ATL.Width() != ABL.Width() ||
ATR.Width() != ABR.Width() ||
ATL.Height() != ATR.Height() ||
ABL.Height() != ABR.Height() )
throw std::logic_error("2x2 must conform to combine");
if( ATL.LDim() != ATR.LDim() ||
ATR.LDim() != ABL.LDim() ||
ABL.LDim() != ABR.LDim() )
throw std::logic_error("2x2 must have consistent ldims to combine");
if( ABL.LockedBuffer() != (ATL.LockedBuffer() + ATL.Height()) ||
ABR.LockedBuffer() != (ATR.LockedBuffer() + ATR.Height()) ||
ATR.LockedBuffer() != (ATL.LockedBuffer() + ATL.LDim()*ATL.Width()) )
throw std::logic_error("2x2 must have contiguous memory");
#endif
Empty();
height_ = ATL.Height() + ABL.Height();
width_ = ATL.Width() + ATR.Width();
ldim_ = ATL.LDim();
lockedData_ = ATL.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Utilities
//
template<typename T,typename Int>
inline const Matrix<T,Int>&
Matrix<T,Int>::operator=( const Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::operator=");
if( lockedView_ )
throw std::logic_error("Cannot assign to a locked view");
if( viewing_ && ( A.Height() != Height() || A.Width() != Width() ) )
throw std::logic_error
("Cannot assign to a view of different dimensions");
#endif
if( !viewing_ )
ResizeTo( A.Height(), A.Width() );
const Int height = Height();
const Int width = Width();
const Int ldim = LDim();
const Int ldimOfA = A.LDim();
const T* data = A.LockedBuffer();
#ifdef _OPENMP
#pragma omp parallel for
#endif
for( Int j=0; j<width; ++j )
MemCopy( &data_[j*ldim], &data[j*ldimOfA], height );
#ifndef RELEASE
PopCallStack();
#endif
return *this;
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Empty()
{
memory_.Empty();
height_ = 0;
width_ = 0;
ldim_ = 1;
data_ = 0;
lockedData_ = 0;
viewing_ = false;
lockedView_ = false;
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::ResizeTo( Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::ResizeTo(height,width)");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( viewing_ && (height>height_ || width>width_) )
throw std::logic_error("Cannot increase the size of a view");
#endif
// Only change the ldim when necessary. Simply 'shrink' our view if
// possible.
const Int minLDim = 1;
if( height > height_ || width > width_ )
ldim_ = std::max( height, minLDim );
height_ = height;
width_ = width;
memory_.Require(ldim_*width);
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::ResizeTo( Int height, Int width, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::ResizeTo(height,width,ldim)");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( viewing_ && (height > height_ || width > width_ || ldim != ldim_) )
throw std::logic_error("Illogical ResizeTo on viewed data");
if( ldim < height )
{
std::ostringstream msg;
msg << "Tried to set ldim(" << ldim << ") < height (" << height << ")";
throw std::logic_error( msg.str().c_str() );
}
#endif
height_ = height;
width_ = width;
ldim_ = ldim;
memory_.Require(ldim*width);
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::AssertValidEntry( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::AssertValidEntry");
#endif
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( i > this->Height() || j > this->Width() )
{
std::ostringstream msg;
msg << "Out of bounds: "
<< "(" << i << "," << j << ") of " << this->Height()
<< " x " << this->Width() << " Matrix.";
throw std::logic_error( msg.str() );
}
#ifndef RELEASE
PopCallStack();
#endif
}
} // namespace elem
#endif /* ELEMENTAL_MATRIX_HPP */
| ahmadia/elemental | include/elemental/core/matrix.hpp | C++ | bsd-3-clause | 38,436 |
import threading
from collections import defaultdict
from funcy import once, decorator
from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)
| Suor/django-cacheops | cacheops/transaction.py | Python | bsd-3-clause | 4,124 |