text
stringlengths
2
1.04M
meta
dict
layout: post authors: ["Greg Wilson"] title: "What About MOOCs?" date: 2014-12-02 time: "09:00:00" tags: ["Teaching"] --- <p> We frequently get asked whether Software Carpentry would work as a <a href="https://en.wikipedia.org/wiki/Massive_open_online_course">MOOC</a>. The answer is that I think it can work well if it's what Siemens and Downes actually had in mind when they coined the term. They didn't people watching videos and then doing robo-graded exercises; instead, their connectivist model of learning assumed that participants would use the internet to collaborate in exploring ideas, rather than as a faster form of television. </p> <p> I'm definitely excited about the Siemens and Downes kind of MOOC. In particular, I believe that instructors who don't have time to teach a full workshop might give us an hour a week to help people via one-to-one or one-to-few sessions via Skype and screen sharing. There was a lot of enthusiasm among the instructors for this when we tried it in the spring of 2012; that experiment wound down because we lacked critical mass, but we're five times larger now, and I think it would be worth trying again. </p> <p> The most interesting question for me is where this fits. Should we start people off this way? Should people do the first day in person (so that we can get them through software setup and configuration issues), then do the rest online? Should this be used as the "day 3" follow-on that everyone keeps asking for? We'd like to try all of this and more; if you'd like to help, please <a href="mailto:{{site.contact}}">let us know</a>. </p> <!--more-->
{ "content_hash": "d6cf0debcdd9c5fc05181aee34a35257", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 113, "avg_line_length": 41.575, "alnum_prop": 0.7402285027059531, "repo_name": "swcarpentry/website", "id": "0a8f9253611ba155b66cde301688b2685b9501b7", "size": "1667", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "_posts/2014/12/2014-12-02-what-about-moocs.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4508" }, { "name": "HTML", "bytes": "5325986" }, { "name": "JavaScript", "bytes": "286830" }, { "name": "Jupyter Notebook", "bytes": "209372" }, { "name": "Makefile", "bytes": "2887" }, { "name": "Python", "bytes": "19896" }, { "name": "Ruby", "bytes": "146" }, { "name": "SCSS", "bytes": "361319" }, { "name": "Shell", "bytes": "2576" }, { "name": "TeX", "bytes": "41043" }, { "name": "XSLT", "bytes": "5034" } ], "symlink_target": "" }
#pragma once #include <string> #include <iostream> #include <vector> #include <map> #include <memory> #include <functional> #include <stdlib.h> #include <parse.hpp> #include <token.hpp> #include <tree.hpp> #include <util.hpp> #define LEXICAL_ERROR(message) { \ std::cerr << "LEXICAL ERROR(" << std::to_string(m_currentLine) << "): " << message << std::endl; ABORT } #define SYNTAX_ERROR(message) { \ std::cerr << "SYNTAX ERROR(" << PeekToken(false).line << ":" << PeekToken(false).offset << ") " << message \ << std::endl; ABORT } #define UNEXPECTED_TOKEN(place) SYNTAX_ERROR("Unexpected token in " << place << ": " << Util::TokenName(m_currentToken.type)) #ifdef DEBUG #define PEEK std::cout<<"PeekToken: "<<Util::TokenName(PeekToken(false).type)<<std::endl; #define PEEK_NEXT std::cout<<"NextPeek: "<<Util::TokenName(PeekNextToken(false).type)<<std::endl; #define LOG(message) std::cout<<message<<std::endl; #else #define PEEK #define PEEK_NEXT #define LOG(message) #endif struct Parser { Parser(const std::string& source, ParseResult& parse); ~Parser() { } void Parse(); std::string m_source; ParseResult& m_parse; unsigned int m_currentOffset; unsigned int m_currentLine; unsigned int m_lineOffset; Token m_previousToken; Token m_currentToken; Token m_nextToken; // --- Expression parsing --- std::map<TokenType, std::function<TreeNode*()>> m_prefixParselets; std::map<TokenType, std::function<TreeNode*(TreeNode*)>> m_infixParselets; std::map<TokenType, int> m_precedenceTable; /* * This is used for checking if we're parsing a method call, or a call to a constuctor. * This should be set to the type name before entering the Pratt parser. */ std::string m_expectedConstructorName; // --- Methods for parsing bits of source in recursive-decent --- TreeNode* Expression(int precedence); TreeNode* Statement(std::vector<VariableDef*>& locals); TreeNode* Condition(); std::pair<VariableDef*, TreeNode*> Local(); std::pair<std::vector<VariableDef*>, TreeNode*> Block(); void Import(); ClassDef* Class(); MethodDef* Method(); ConstructorDef* Constructor(); std::vector<VariableDef*> ParameterList(); Attribs Attributes(); // --- Methods to make the recursive-decent parser more readable --- void Consume(TokenType tokenType, bool ignoreNewlines = true); void ConsumeNext(TokenType tokenType, bool ignoreNewlines = true); Token NextToken(bool ignoreNewlines = true); Token PeekToken(bool ignoreNewlines = true); Token PeekNextToken(bool ignoreNewlines = true); bool Match(TokenType expected, bool ignoreNewlines = true); bool MatchNext(TokenType expected, bool ignoreNewlines = true); std::string DottedName(); // --- Lexer --- Token LexNext(); char NextChar(); char PeekChar(); char PeekNextChar(); Token LexNumber(); Token LexHexNumber(); Token LexStringLiteral(); Token LexName(TokenType type); inline bool IsName(char c) const { return ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_')); } inline bool IsDigit(char c) const { return (c >= '0' && c <= '9'); } inline bool IsHexDigit(char c) const { return (IsDigit(c) || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F')); } };
{ "content_hash": "b66d52e9908f82f612e7bf913b2cdbb6", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 125, "avg_line_length": 29.87272727272727, "alnum_prop": 0.6618989653073646, "repo_name": "IsaacWoods/Gecko", "id": "6eaf4899af93ff5030bdd556972042283339224d", "size": "3349", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/parser.hpp", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Assembly", "bytes": "995" }, { "name": "C", "bytes": "152" }, { "name": "C++", "bytes": "75454" }, { "name": "Makefile", "bytes": "1092" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>ip::basic_resolver_iterator::operator *</title> <link rel="stylesheet" href="../../../boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.75.2"> <link rel="home" href="../../../index.html" title="Asio"> <link rel="up" href="../ip__basic_resolver_iterator.html" title="ip::basic_resolver_iterator"> <link rel="prev" href="iterator_category.html" title="ip::basic_resolver_iterator::iterator_category"> <link rel="next" href="operator_not__eq_.html" title="ip::basic_resolver_iterator::operator!="> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr><td valign="top"><img alt="asio C++ library" width="250" height="60" src="../../../asio.png"></td></tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="iterator_category.html"><img src="../../../prev.png" alt="Prev"></a><a accesskey="u" href="../ip__basic_resolver_iterator.html"><img src="../../../up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../home.png" alt="Home"></a><a accesskey="n" href="operator_not__eq_.html"><img src="../../../next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="asio.reference.ip__basic_resolver_iterator.operator__star_"></a><a class="link" href="operator__star_.html" title="ip::basic_resolver_iterator::operator *">ip::basic_resolver_iterator::operator *</a> </h4></div></div></div> <p> <a class="indexterm" name="idm125320"></a> Dereference an iterator. </p> <pre class="programlisting"><span class="keyword">const</span> <span class="identifier">basic_resolver_entry</span><span class="special">&lt;</span> <span class="identifier">InternetProtocol</span> <span class="special">&gt;</span> <span class="special">&amp;</span> <span class="keyword">operator</span> <span class="special">*()</span> <span class="keyword">const</span><span class="special">;</span> </pre> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2016 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="iterator_category.html"><img src="../../../prev.png" alt="Prev"></a><a accesskey="u" href="../ip__basic_resolver_iterator.html"><img src="../../../up.png" alt="Up"></a><a accesskey="h" href="../../../index.html"><img src="../../../home.png" alt="Home"></a><a accesskey="n" href="operator_not__eq_.html"><img src="../../../next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "2d0f0af227e95d36cae31f18d542b5d9", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 402, "avg_line_length": 68.61363636363636, "alnum_prop": 0.6475654190129182, "repo_name": "throughnet/throughnet", "id": "c8213cf89cb83d4aacb32604fe12e00aac3eae85", "size": "3019", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/libs/asio/doc/asio/reference/ip__basic_resolver_iterator/operator__star_.html", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "5592993" }, { "name": "CMake", "bytes": "56051" }, { "name": "CSS", "bytes": "16504" }, { "name": "HTML", "bytes": "18244837" }, { "name": "M4", "bytes": "9302" }, { "name": "Makefile", "bytes": "411179" }, { "name": "Perl", "bytes": "6547" }, { "name": "Python", "bytes": "41502" }, { "name": "Shell", "bytes": "53330" } ], "symlink_target": "" }
/* global $, $iq, Strophe */ import { getLogger } from 'jitsi-meet-logger'; const logger = getLogger(__filename); import ConnectionPlugin from './ConnectionPlugin'; const RAYO_XMLNS = 'urn:xmpp:rayo:1'; /** * */ class RayoConnectionPlugin extends ConnectionPlugin { /** * * @param connection */ init(connection) { super.init(connection); this.connection.addHandler( this.onRayo.bind(this), RAYO_XMLNS, 'iq', 'set', null, null); } /** * * @param iq */ onRayo(iq) { logger.info('Rayo IQ', iq); } /* eslint-disable max-params */ /** * * @param to * @param from * @param roomName * @param roomPass * @param focusMucJid */ dial(to, from, roomName, roomPass, focusMucJid) { return new Promise((resolve, reject) => { if (!focusMucJid) { reject(new Error('Internal error!')); return; } const req = $iq({ type: 'set', to: focusMucJid }); req.c('dial', { xmlns: RAYO_XMLNS, to, from }); req.c('header', { name: 'JvbRoomName', value: roomName }).up(); if (roomPass && roomPass.length) { req.c('header', { name: 'JvbRoomPassword', value: roomPass }).up(); } this.connection.sendIQ( req, result => { logger.info('Dial result ', result); // eslint-disable-next-line newline-per-chained-call const resource = $(result).find('ref').attr('uri'); this.callResource = resource.substr('xmpp:'.length); logger.info(`Received call resource: ${this.callResource}`); resolve(); }, error => { logger.info('Dial error ', error); reject(error); }); }); } /* eslint-enable max-params */ /** * */ hangup() { return new Promise((resolve, reject) => { if (!this.callResource) { reject(new Error('No call in progress')); logger.warn('No call in progress'); return; } const req = $iq({ type: 'set', to: this.callResource }); req.c('hangup', { xmlns: RAYO_XMLNS }); this.connection.sendIQ(req, result => { logger.info('Hangup result ', result); this.callResource = null; resolve(); }, error => { logger.info('Hangup error ', error); this.callResource = null; reject(new Error('Hangup error ')); }); }); } } /** * */ export default function() { Strophe.addConnectionPlugin('rayo', new RayoConnectionPlugin()); }
{ "content_hash": "089021e8d48ff7144d7e0b5546dd624c", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 80, "avg_line_length": 24.189393939393938, "alnum_prop": 0.43470090823676794, "repo_name": "adambowles/lib-jitsi-meet", "id": "8d241ee621af389e72c669e9654371063efad224", "size": "3193", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/xmpp/strophe.rayo.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "1313072" }, { "name": "Shell", "bytes": "281" } ], "symlink_target": "" }
/** * tequila * presentation-model */ // Model Constructor var Presentation = function (args) { if (false === (this instanceof Presentation)) throw new Error('new operator required'); args = args || {}; if (!args.attributes) { args.attributes = []; } args.attributes.push(new Attribute({name: 'name', type: 'String'})); args.attributes.push(new Attribute({name: 'modelName', type: 'String'})); args.attributes.push(new Attribute({name: 'contents', type: 'Object', value: []})); Model.call(this, args); this.modelType = "Presentation"; }; Presentation.prototype = T.inheritPrototype(Model.prototype); /* * Methods */ Presentation.prototype.getObjectStateErrors = function (modelCheckOnly) { var i; this.validationErrors = Model.prototype.getObjectStateErrors.call(this); if (!modelCheckOnly && this.validationErrors.length == 0) { // Only check if model it valid var contents = this.get('contents'); var gotError = false; if (contents instanceof Array) { for (i = 0; i < contents.length; i++) { if (!(contents[i] instanceof Command || contents[i] instanceof Attribute || typeof contents[i] == 'string')) gotError = true; } if (gotError) this.validationErrors.push('contents elements must be Command, Attribute or string'); } else { this.validationErrors.push('contents must be Array'); } } this.validationMessage = this.validationErrors.length > 0 ? this.validationErrors[0] : ''; return this.validationErrors; }; Presentation.prototype.validate = function (callBack) { var presentation = this; if (typeof callBack != 'function') throw new Error('callback is required'); this.getObjectStateErrors(); var e; for (e in this._errorConditions) { if (this._errorConditions.hasOwnProperty(e)) { this.validationErrors.push(this._errorConditions[e]) } } // validate each attribute in contents var i; var gotError = false; var attributeCount = 0; var checkCount = 0; var contents = this.get('contents'); if (contents instanceof Array) { // Count first for (i = 0; i < contents.length; i++) { if (contents[i] instanceof Attribute) { attributeCount++; } } // Launch validations for (i = 0; i < contents.length; i++) { if (contents[i] instanceof Attribute) { contents[i].validate(checkAttrib); } } } function checkAttrib() { checkCount++; if (this.validationMessage) gotError = true; if (checkCount==checkCount) { if (gotError) presentation.validationErrors.push('contents has validation errors'); presentation.validationMessage = presentation.validationErrors.length > 0 ? presentation.validationErrors[0] : ''; callBack(); } } };
{ "content_hash": "fae1c4b5ae8e38a298001d4e18b6b1c0", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 120, "avg_line_length": 33.566265060240966, "alnum_prop": 0.6590093323761665, "repo_name": "seanTsmith/tequila", "id": "736a7d4d13d931cf66f5c10a7bec81079586b25c", "size": "2786", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/models/presentation-model.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "21752" }, { "name": "JavaScript", "bytes": "944691" }, { "name": "Makefile", "bytes": "13" } ], "symlink_target": "" }
<?php /* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4: */ /** * QuickForm renderer for Flexy template engine, static version. * * PHP versions 4 and 5 * * LICENSE: This source file is subject to version 3.01 of the PHP license * that is available through the world-wide-web at the following URI: * http://www.php.net/license/3_01.txt If you did not receive a copy of * the PHP License and are unable to obtain it through the web, please * send a note to license@php.net so we can mail you a copy immediately. * * @category HTML * @package HTML_QuickForm * @author Ron McClain <ron@humaniq.com> * @copyright 2001-2007 The PHP Group * @license http://www.php.net/license/3_01.txt PHP License 3.01 * @version CVS: $Id: ObjectFlexy.php,v 1.9 2007/05/29 18:34:36 avb Exp $ * @link http://pear.php.net/package/HTML_QuickForm */ /** * A concrete renderer for HTML_QuickForm, makes an object from form contents */ require_once 'HTML/QuickForm/Renderer/Object.php'; /** * QuickForm renderer for Flexy template engine, static version. * * A static renderer for HTML_Quickform. Makes a QuickFormFlexyObject * from the form content suitable for use with a Flexy template * * Usage: * <code> * $form =& new HTML_QuickForm('form', 'POST'); * $template =& new HTML_Template_Flexy(); * $renderer =& new HTML_QuickForm_Renderer_ObjectFlexy(&$template); * $renderer->setHtmlTemplate("html.html"); * $renderer->setLabelTemplate("label.html"); * $form->accept($renderer); * $view = new StdClass; * $view->form = $renderer->toObject(); * $template->compile("mytemplate.html"); * </code> * * Based on the code for HTML_QuickForm_Renderer_ArraySmarty * * @category HTML * @package HTML_QuickForm * @author Ron McClain <ron@humaniq.com> * @version Release: 3.2.10 * @since 3.1.1 */ class HTML_QuickForm_Renderer_ObjectFlexy extends HTML_QuickForm_Renderer_Object { /**#@+ * @access private */ /** * HTML_Template_Flexy instance * @var object $_flexy */ var $_flexy; /** * Current element index * @var integer $_elementIdx */ var $_elementIdx; /** * The current element index inside a group * @var integer $_groupElementIdx */ var $_groupElementIdx = 0; /** * Name of template file for form html * @var string $_html * @see setRequiredTemplate() */ var $_html = ''; /** * Name of template file for form labels * @var string $label * @see setErrorTemplate() */ var $label = ''; /** * Class of the element objects, so you can add your own * element methods * @var string $_elementType */ var $_elementType = 'QuickformFlexyElement'; /**#@-*/ /** * Constructor * * @param HTML_Template_Flexy template object to use * @public */ function HTML_QuickForm_Renderer_ObjectFlexy(&$flexy) { $this->HTML_QuickForm_Renderer_Object(true); $this->_obj = new QuickformFlexyForm(); $this->_flexy =& $flexy; } // end constructor function renderHeader(&$header) { if($name = $header->getName()) { $this->_obj->header->$name = $header->toHtml(); } else { $this->_obj->header[$this->_sectionCount] = $header->toHtml(); } $this->_currentSection = $this->_sectionCount++; } // end func renderHeader function startGroup(&$group, $required, $error) { parent::startGroup($group, $required, $error); $this->_groupElementIdx = 1; } //end func startGroup /** * Creates an object representing an element containing * the key for storing this * * @access private * @param HTML_QuickForm_element form element being rendered * @param bool Whether an element is required * @param string Error associated with the element * @return object */ function _elementToObject(&$element, $required, $error) { $ret = parent::_elementToObject($element, $required, $error); if($ret->type == 'group') { $ret->html = $element->toHtml(); unset($ret->elements); } if(!empty($this->_label)) { $this->_renderLabel($ret); } if(!empty($this->_html)) { $this->_renderHtml($ret); $ret->error = $error; } // Create an element key from the name if (false !== ($pos = strpos($ret->name, '[')) || is_object($this->_currentGroup)) { if (!$pos) { $keys = '->{\'' . str_replace(array('\\', '\''), array('\\\\', '\\\''), $ret->name) . '\'}'; } else { $keys = '->{\'' . str_replace( array('\\', '\'', '[', ']'), array('\\\\', '\\\'', '\'}->{\'', ''), $ret->name ) . '\'}'; } // special handling for elements in native groups if (is_object($this->_currentGroup)) { // skip unnamed group items unless radios: no name -> no static access // identification: have the same key string as the parent group if ($this->_currentGroup->keys == $keys && 'radio' != $ret->type) { return false; } // reduce string of keys by remove leading group keys if (0 === strpos($keys, $this->_currentGroup->keys)) { $keys = substr_replace($keys, '', 0, strlen($this->_currentGroup->keys)); } } } elseif (0 == strlen($ret->name)) { $keys = '->{\'element_' . $this->_elementIdx . '\'}'; } else { $keys = '->{\'' . str_replace(array('\\', '\''), array('\\\\', '\\\''), $ret->name) . '\'}'; } // for radios: add extra key from value if ('radio' == $ret->type && '[]' != substr($keys, -2)) { $keys .= '->{\'' . str_replace(array('\\', '\''), array('\\\\', '\\\''), $ret->value) . '\'}'; } $ret->keys = $keys; $this->_elementIdx++; return $ret; } /** * Stores an object representation of an element in the * QuickformFormObject instance * * @access private * @param QuickformElement Object representation of an element * @return void */ function _storeObject($elObj) { if ($elObj) { $keys = $elObj->keys; unset($elObj->keys); if(is_object($this->_currentGroup) && ('group' != $elObj->type)) { $code = '$this->_currentGroup' . $keys . ' = $elObj;'; } else { $code = '$this->_obj' . $keys . ' = $elObj;'; } eval($code); } } /** * Set the filename of the template to render html elements. * In your template, {html} is replaced by the unmodified html. * If the element is required, {required} will be true. * Eg. * <pre> * {if:error} * <font color="red" size="1">{error:h}</font><br /> * {end:} * {html:h} * </pre> * * @access public * @param string Filename of template * @return void */ function setHtmlTemplate($template) { $this->_html = $template; } /** * Set the filename of the template to render form labels * In your template, {label} is replaced by the unmodified label. * {error} will be set to the error, if any. {required} will * be true if this is a required field * Eg. * <pre> * {if:required} * <font color="orange" size="1">*</font> * {end:} * {label:h} * </pre> * * @access public * @param string Filename of template * @return void */ function setLabelTemplate($template) { $this->_label = $template; } function _renderLabel(&$ret) { $this->_flexy->compile($this->_label); $ret->label = $this->_flexy->bufferedOutputObject($ret); } function _renderHtml(&$ret) { $this->_flexy->compile($this->_html); $ret->html = $this->_flexy->bufferedOutputObject($ret); } } // end class HTML_QuickForm_Renderer_ObjectFlexy /** * Adds nothing to QuickformForm, left for backwards compatibility * * @category HTML * @package HTML_QuickForm * @ignore */ class QuickformFlexyForm extends QuickformForm { } /** * Adds nothing to QuickformElement, left for backwards compatibility * * @category HTML * @package HTML_QuickForm * @ignore */ class QuickformFlexyElement extends QuickformElement { } ?>
{ "content_hash": "ad39cd799f7ed470c0e39b38db89b82e", "timestamp": "", "source": "github", "line_count": 291, "max_line_length": 108, "avg_line_length": 30.130584192439862, "alnum_prop": 0.5450501824817519, "repo_name": "ivanlanin/kateglo", "id": "3d0668a86cf378458fed6272927f71a3646e8b27", "size": "8768", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "pear/HTML/QuickForm/Renderer/ObjectFlexy.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1700" }, { "name": "JavaScript", "bytes": "9130" }, { "name": "PHP", "bytes": "2112740" }, { "name": "SQL", "bytes": "44202" } ], "symlink_target": "" }
""" Unit tests for the da.cli module. --- type: python_module validation_level: v00_minimum protection: k00_public copyright: "Copyright 2016 High Integrity Artificial Intelligence Systems" license: "Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License." ... """ import click import pytest # ============================================================================= class SpecifyFuzzyAliasGroup: """ Specify the da.cli.fuzzy_alias_group() function. """ def it_is_callable(self): """ The fuzzy_alias_group() function is callable. """ import da.cli assert callable(da.cli.fuzzy_alias_group) # ============================================================================= class SpecifyExitWithCodeShow: """ Specify the da.cli.ExitWithCode.show() method. """ def it_is_callable(self): """ The show() method is callable. """ import da.cli assert callable(da.cli.ExitWithCode.show) # ============================================================================= class SpecifyExplicitInfoNameCommandMakeContext: """ Specify the da.cli.ExplicitInfoNameCommand.make_context() method. """ def it_is_callable(self): """ The make_context() method is callable. """ import da.cli assert callable(da.cli.ExplicitInfoNameCommand.make_context) # ============================================================================= class SpecifyFuzzyCommandAliasGroupGetCommand: """ Specify the da.cli.FuzzyCommandAliasGroup.get_command() method. """ def it_is_callable(self): """ The get_command() method is callable. """ import da.cli assert callable(da.cli.FuzzyCommandAliasGroup.get_command) # ============================================================================= class SpecifyExitApplication: """ Specify the da.cli.exit_application() function. """ # ------------------------------------------------------------------------- def it_raises_an_clickexception(self): """ Placeholder test case. """ import da.cli test_exit_code = 42 test_exit_message = 'Test exit message' with pytest.raises(click.ClickException) as exc: da.cli.exit_application(exit_code = test_exit_code, message = test_exit_message) assert exc.exit_code == test_exit_code assert exc.message == test_exit_message # ============================================================================= class SpecifyMain: """ Specify the da.cli.main() function. """ def it_is_callable(self): """ The main() function is callable. """ import da.cli assert callable(da.cli.main) # ============================================================================= class SpecifyBuild: """ Specify the da.cli.build() function. """ def it_is_callable(self): """ The build() function is callable. """ import da.cli assert callable(da.cli.build) # ============================================================================= class SpecifyRun: """ Specify the da.cli.run() function. """ def it_is_callable(self): """ The run() function is callable. """ import da.cli assert callable(da.cli.run) # ============================================================================= class SpecifyRepl: """ Specify the da.cli.repl() function. """ def it_is_callable(self): """ The repl() function is callable. """ import da.cli assert callable(da.cli.repl) # ============================================================================= class SpecifySim: """ Specify the da.cli.sim() function. """ def it_is_callable(self): """ The sim() function is callable. """ import da.cli assert callable(da.cli.sim) # ============================================================================= class SpecifyVtx: """ Specify the da.cli.vtx() function. """ def it_is_callable(self): """ The vtx() function is callable. """ import da.cli assert callable(da.cli.vtx) # ============================================================================= class Specify_GenPluginSubgroups: """ Specify the da.cli._gen_plugin_subgroups() function. """ def it_is_callable(self): """ The _gen_plugin_subgroups() function is callable. """ import da.cli assert callable(da.cli._gen_plugin_subgroups) # ============================================================================= class Specify_LoadCliPluginGroup: """ Specify the da.cli._load_cli_plugin_group() function. """ def it_is_callable(self): """ The _load_cli_plugin_group() function is callable. """ import da.cli assert callable(da.cli._load_cli_plugin_group)
{ "content_hash": "0238f2c6cb62334d14948bd43b1ef631", "timestamp": "", "source": "github", "line_count": 252, "max_line_length": 79, "avg_line_length": 22.78174603174603, "alnum_prop": 0.47709458282529177, "repo_name": "wtpayne/hiai", "id": "74d415356671793d8b0771fb9156fd5ebe35c3ae", "size": "5765", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "a3_src/h70_internal/da/spec/spec_cli.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "255" }, { "name": "Python", "bytes": "894704" }, { "name": "Shell", "bytes": "18289" } ], "symlink_target": "" }
#! /bin/sh # Generate index and map for fullFile in /thomas1/mcbs913/anthony/panda_trials/protnew/*/*/*.fna do fileName=$(basename "$fullFile") fullFileBase="${fullFile%%.*}" /thomas1/mcbs913/anthony/mcbs913/bwa-fork/pandanew index $fullFileBase.fna $fullFileBase.gff done
{ "content_hash": "ca8b93b003ad54ce8e5e37d64b1635f5", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 95, "avg_line_length": 28.4, "alnum_prop": 0.7288732394366197, "repo_name": "macmanes-lab/MCBS913", "id": "b4296b4f56eba8e6d12e382e840bbe318dfdbfaf", "size": "284", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "code/anthonyw/panda_trials/makeIndexProtNew.sh", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "548191" }, { "name": "C++", "bytes": "18737" }, { "name": "CSS", "bytes": "17714" }, { "name": "Groff", "bytes": "26156" }, { "name": "HTML", "bytes": "293511" }, { "name": "JavaScript", "bytes": "214635" }, { "name": "Makefile", "bytes": "3162" }, { "name": "Perl", "bytes": "12751" }, { "name": "Python", "bytes": "159654" }, { "name": "Shell", "bytes": "10964" }, { "name": "TeX", "bytes": "49312" } ], "symlink_target": "" }
"use strict"; var express = require('express'); var bodyParser = require('body-parser'); var morgan = require('morgan'); var fs = require('fs'); var options = { extensions: ['htm', 'html'], maxAge: '1d', setHeaders: function (res, path, stat) { res.set('x-timestamp', Date.now()) } }; exports.initApp = function () { var app = express(); //app.use(morgan('combined')); var fileLog = fs.createWriteStream(__dirname + '/tienda.log', { flags: 'a' }) app.use(morgan('combined', { stream: fileLog })); app.use(express.static(__dirname + '/static', options)); app.set('views', __dirname + '/views'); app.set('view engine', 'jade'); app.use(bodyParser.urlencoded({ extended: true })); app.use(bodyParser.json()); app.use(function (peticion, respuesta, siguiente) { if (peticion.body) { console.log("body: " + JSON.stringify(peticion.body)); } siguiente(); }); app.use(function (error, peticion, respuesta, siguiente) { console.error(error.stack); respuesta.status(500).send('Jiuston, tenemos un povlema!'); }); return app; } exports.initRouter = function (app) { var router = express.Router(); app.use(router); return router; }
{ "content_hash": "b6d12bd3feed9897c94377534c4b5f5c", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 64, "avg_line_length": 20.649122807017545, "alnum_prop": 0.6474086661002549, "repo_name": "AcademiaBinaria/NodeJS", "id": "98d4da5073c2e92173dddb5221235ab8caec999a", "size": "1177", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "10 streams/practica/res/config.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "89" }, { "name": "HTML", "bytes": "1492338" }, { "name": "JavaScript", "bytes": "188325" } ], "symlink_target": "" }
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <artifactId>cloud-framework-ipc</artifactId> <name>Apache CloudStack Framework - IPC</name> <parent> <groupId>org.apache.cloudstack</groupId> <artifactId>cloudstack-framework</artifactId> <version>4.10.0.0-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <dependencies> <dependency> <groupId>cglib</groupId> <artifactId>cglib-nodep</artifactId> </dependency> <dependency> <groupId>com.google.code.gson</groupId> <artifactId>gson</artifactId> </dependency> <dependency> <groupId>org.apache.cloudstack</groupId> <artifactId>cloud-utils</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.cloudstack</groupId> <artifactId>cloud-api</artifactId> <version>${project.version}</version> </dependency> </dependencies> <build> <plugins> <plugin> <artifactId>maven-surefire-plugin</artifactId> <configuration> <skipTests>true</skipTests> </configuration> <executions> <execution> <phase>integration-test</phase> <goals> <goal>test</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </project>
{ "content_hash": "1aeb9f9b6f0f65fda9221c0812bbf4fc", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 104, "avg_line_length": 39.46666666666667, "alnum_prop": 0.676097972972973, "repo_name": "resmo/cloudstack", "id": "b917a13277e70deda5c880b6e2b6d9de834ea669", "size": "2368", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "framework/ipc/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "1451" }, { "name": "Batchfile", "bytes": "11926" }, { "name": "C#", "bytes": "2356211" }, { "name": "CSS", "bytes": "335738" }, { "name": "FreeMarker", "bytes": "4917" }, { "name": "Groovy", "bytes": "153137" }, { "name": "HTML", "bytes": "151164" }, { "name": "Java", "bytes": "33712712" }, { "name": "JavaScript", "bytes": "7719277" }, { "name": "Python", "bytes": "11019815" }, { "name": "Ruby", "bytes": "896" }, { "name": "Shell", "bytes": "770039" } ], "symlink_target": "" }
<!doctype html> <html> <head> <title>Polar Area Chart Generator</title> <!-- JQuery --> <script src='lib/js/jquery-1.11.0.js'></script> <!-- Bootstrap library --> <link href='lib/css/bootstrap.css' rel='stylesheet'> <script src='lib/js/bootstrap.js'></script> <!-- Color picker --> <link href='lib/css/farbtastic.css' rel='stylesheet'> <script src='lib/js/farbtastic.js'></script> <!-- Custom polar chart library--> <script src="js/polar.js"></script> <!-- App JS & CSS --> <link href='css/polar-iaste.css' rel='stylesheet'> <script src="js/polar-iaste.js"></script> <meta name = "viewport" content = "initial-scale = 1, user-scalable = no"> <!-- Analytics --> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-49577453-1', 'gsprenger.github.io'); ga('send', 'pageview'); </script> </head> <body> <h3>Polar Area Chart Generator</h3> <div class='main-container'> <div class='wrapper'> <div class='canvas-container'> <div class='canvasfix'> <div class='btn-container'> <button class='btn btn-success' id='savetoimg'>Save chart to PNG</button> </div> <canvas id="canvas" height="600" width="600"></canvas> </div> </div> <div class='form-container'> <ul class="nav nav-tabs"> <li class="active"><a href="#custom" data-toggle="tab">Custom</a></li> <li><a href="#4x2" data-toggle="tab">4x2</a></li> <li><a href="#4x3" data-toggle="tab">4x3</a></li> <li><a href="#5x2" data-toggle="tab">5x2</a></li> <li><a href="#2x" data-toggle="tab">2x*</a></li> <li><a href="#text-zone" data-toggle="tab">Text</a></li> </ul> <div class="tab-content"> <div class="tab-pane fade in active" id="custom"> <!-- CUSTOM CHART --> <div class='btn-group' id='custom-btn-group'> <a href='#' class='btn btn-default' id='less'>&#8211;</a> <a href='#' class='btn btn-default' id='more'>+</a> </div> <div class='btn-container' id='custom-btn-container'> <button class='btn btn-default' id='random'>Random</button> <button class='btn btn-primary' id='generate-custom'>Generate</button> </div> </div> <div class="tab-pane fade" id="4x2"> <!-- 4x2 CHART --> <div class='btn-container'> <button class='btn btn-primary' id='generate-4x2'>Generate</button> </div> </div> <div class="tab-pane fade" id="4x3"> <!-- 4x3 CHART --> <div class='btn-container'> <button class='btn btn-primary' id='generate-4x3'>Generate</button> </div> </div> <div class="tab-pane fade" id="5x2"> <!-- 5x2 CHART --> <div class='btn-container'> <button class='btn btn-primary' id='generate-5x2'>Generate</button> </div> </div> <div class="tab-pane fade" id="2x"> <!-- 2x* CHART --> <div class='2x-section' id='2x-section-north'> <div class='btn-group'> <a href='#' class='btn btn-default' id='2x-n-less'>&#8211;</a> <a href='#' class='btn btn-default' id='2x-n-more'>+</a> </div> North section: <input type='text' id='2x-section-n' class='section'> </div> <hr> <div class='2x-section' id='2x-section-south'> <div class='btn-group'> <a href='#' class='btn btn-default' id='2x-s-less'>&#8211;</a> <a href='#' class='btn btn-default' id='2x-s-more'>+</a> </div> South section: <input type='text' id='2x-section-s' class='section'> </div> <div class='btn-container'> <button class='btn btn-primary' id='generate-2x'>Generate</button> </div> </div> <div class="tab-pane fade" id="text-zone"> <!-- TEXT CHART --> <select id='text-select' class="form-control"> <option>Custom</option> <option>4x2</option> <option>4x3</option> <option>5x2</option> <option selected>2x*</option> </select> <textarea id='text-area'></textarea> <div class='btn-container'> <button class='btn btn-primary' id='generate-text'>Generate</button> </div> </div> </div> <hr> <div> <a href='https://github.com/gsprenger/polar-iaste/archive/master.zip'><span class='label label-success'>Download library</span></a><br> Polar Area Chart Generator developped for the <a href='http://iaste-researchgroup.org/'>IASTE research group</a>.<br> This library is a modified version of the <a href='http://www.chartjs.org'>Chart.js</a> library and was created by <a href='http://gabrielsprenger.com'>Gabriel Sprenger</a>.<br> This library is <a href='https://github.com/gsprenger/polar-iaste'>open source</a> and released under the <a href='LICENSE'>MIT license</a>. </div> </div> </div> </div> <img id='dlimg' src=''> </body> </html>
{ "content_hash": "6a43ad3b647e87f50a9d7e2d78022b4c", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 183, "avg_line_length": 40.82170542635659, "alnum_prop": 0.5704519559437904, "repo_name": "gsprenger/polar-iaste", "id": "7ab34c233de90aae6dda4244db9b90bc2e30c02a", "size": "5266", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "index.html", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "2528" }, { "name": "JavaScript", "bytes": "52384" } ], "symlink_target": "" }
<!DOCTYPE HTML> <html> <head> <title>Easier web scraping in R</title> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1" /> <meta name="generator" content="Hugo 0.16" /> <meta name="author" content="Robert McDonnell"> <meta name="description" content="Robert Myles McDonnell&#39;s website"> <meta name="twitter:card" content="summary"/> <meta name="twitter:title" content="Easier web scraping in R"/> <meta name="twitter:description" content="In an earlier post, I described some ways in which you can interact with a web browser using R and RSelenium. This is ideal when you need to access data through drop-down menus and search bars. However, working with RSelenium can be tricky. There are, of course, easier ways to get information from the internet using R. Perhaps the most straightforward way is to use rvest, in tandem with other packages of the Hadleyverse1, such as dplyr and tidyr for data preparation and cleaning after the webscrape."/> <meta name="twitter:site" content="@RobertMylesMc"/> <meta property="og:title" content="Easier web scraping in R" /> <meta property="og:description" content="In an earlier post, I described some ways in which you can interact with a web browser using R and RSelenium. This is ideal when you need to access data through drop-down menus and search bars. However, working with RSelenium can be tricky. There are, of course, easier ways to get information from the internet using R. Perhaps the most straightforward way is to use rvest, in tandem with other packages of the Hadleyverse1, such as dplyr and tidyr for data preparation and cleaning after the webscrape." /> <meta property="og:type" content="article" /> <meta property="og:url" content="http://robertmyles.github.io/2016/08/05/easier-web-scraping-in-r/" /> <meta property="og:updated_time" content="2016-08-05T00:00:00&#43;00:00"/> <meta property="fb:admins" content="1213956375281226" /> <meta itemprop="name" content="Easier web scraping in R"> <meta itemprop="description" content="In an earlier post, I described some ways in which you can interact with a web browser using R and RSelenium. This is ideal when you need to access data through drop-down menus and search bars. However, working with RSelenium can be tricky. There are, of course, easier ways to get information from the internet using R. Perhaps the most straightforward way is to use rvest, in tandem with other packages of the Hadleyverse1, such as dplyr and tidyr for data preparation and cleaning after the webscrape."> <meta itemprop="dateModified" content="2016-08-05T00:00:00&#43;00:00" /> <meta itemprop="wordCount" content="798"> <meta itemprop="keywords" content="bayesian-statistics,brazil,carnaval,causality,communism,elections,ggplot2,github,healthcare,hugo,human-capital-index,ideal-points,irt,jags,maps,meta-data,osx,political-economy,political-science,r,r-markdown,r-packages,selenium,sf,stan,tidyrss,uk,us,webscraping,eu,geo-reference,ggplot2,github,r,rotten-tomatoes,rstan,the-economist," /> <link rel="stylesheet" href="../../../../css/google-font.css" /> <link rel="stylesheet" href="../../../../css/font-awesome.min.css" /> <link rel="stylesheet" href="../../../../css/main.css" /> <link rel="stylesheet" href="../../../../css/add-on.css" /> <link rel="stylesheet" href="../../../../css/monokai-sublime.css"> <script> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', '76482767', 'auto'); ga('send', 'pageview'); </script> </head> <body> <div id="wrapper"> <header id="header"> <h2><a href="../../../../">Home</i></a></h2> <nav class="links"> <ul> <li> <a href="../../../../research/"> <i class="fa fa-bar-chart">&nbsp;</i>Research </a> </li> <li> <a href="https://github.com/RobertMyles"> <i class="fa fa-code">&nbsp;</i>GitHub </a> </li> <li> <a href="../../../../cv/"> <i class="fa fa-file-o">&nbsp;</i>CV </a> </li> <li> <a href="../../../../post/"> <i class="fa fa-newspaper-o">&nbsp;</i>Blog </a> </li> <li> <a href="../../../../about/"> About </a> </li> <li> <a href="../../../../categories/"> Categories </a> </li> <li> <a href="../../../../blogroll/"> <i class="fa fa-bookmark-o">&nbsp;</i>Blogroll </a> </li> <li> <a href="../../../../other/"> <i class="fa fa-music">&nbsp;</i>Other </a> </li> </ul> </nav> <nav class="main"> <ul> <li id="share-nav" class="share-menu" style="display:none;"> <a class="fa-share-alt" href="#share-menu">Share</a> </li> <li class="search"> <a class="fa-search" href="#search">Search</a> <form id="search" method="get" action="//google.com/search"> <input type="text" name="q" placeholder="Search" /> <input type="hidden" name="q" value="site:http://robertmyles.github.io/"> </form> </li> <li class="menu"> <a class="fa-bars" href="#menu">Menu</a> </li> </ul> </nav> </header> <section id="menu"> <section> <form class="search" method="get" action="//google.com/search"> <input type="text" name="q" placeholder="Search" /> <input type="hidden" name="q" value="site:http://robertmyles.github.io/"> </form> </section> <section> <ul class="links"> <li> <a href="../../../../research/"> <h3> <i class="fa fa-bar-chart">&nbsp;</i> Research </h3> </a> </li> <li> <a href="https://github.com/RobertMyles"> <h3> <i class="fa fa-code">&nbsp;</i> GitHub </h3> </a> </li> <li> <a href="../../../../cv/"> <h3> <i class="fa fa-file-o">&nbsp;</i> CV </h3> </a> </li> <li> <a href="../../../../post/"> <h3> <i class="fa fa-newspaper-o">&nbsp;</i> Blog </h3> </a> </li> <li> <a href="../../../../about/"> <h3> About </h3> </a> </li> <li> <a href="../../../../categories/"> <h3> Categories </h3> </a> </li> <li> <a href="../../../../blogroll/"> <h3> <i class="fa fa-bookmark-o">&nbsp;</i> Blogroll </h3> </a> </li> <li> <a href="../../../../other/"> <h3> <i class="fa fa-music">&nbsp;</i> Other </h3> </a> </li> </ul> </section> <section> <ul class="links"> <header> <h3>Recent Posts</h3> </header> <li> <a href="http://robertmyles.github.io/2017/06/27/rating-r-packages/"><p>Rating R Packages</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/06/26/inhaling-earth/"><p>Inhaling Earth</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/06/11/uk-elections-2017/"><p>UK Elections 2017</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/06/03/human-capital-index-maps/"><p>Human Capital Index Maps</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/06/02/causalimpact-healthcare-costs-in-the-us/"><p>CausalImpact &amp; Healthcare Costs in the US</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/04/22/tfw-you-have-to-copy-and-paste-something-into-r.../"><p>TFW you have to copy and paste something into R...</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/03/16/update-r-from-inside-r/"><p>Update R from inside R</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/02/25/reflections-on-making-my-first-r-package/"><p>Reflections on making my first R package</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/02/24/brazils-murder-rate/"><p>Brazil&#39;s Murder Rate</p></a> </li> <li> <a href="http://robertmyles.github.io/2017/02/19/peace-bread-and-data/"><p>Peace, Bread and Data!</p></a> </li> </ul> </section> </section> <section id="share-menu"> <section id="social-share-nav"> <ul class="links"> <header> <h3>Share this post <i class="fa fa-smile-o"></i></h3> </header> <li><a href="//twitter.com/share?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&text=Easier%20web%20scraping%20in%20R&via=RobertMylesMc" target="_blank" class="share-btn twitter"> <i class="fa fa-twitter"></i> <p>Twitter</p> </a></li> <li><a href="//plus.google.com/share?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn google-plus"> <i class="fa fa-google-plus"></i> <p>Google+</p> </a></li> <li><a href="//www.facebook.com/sharer/sharer.php?u=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn facebook"> <i class="fa fa-facebook"></i> <p>Facebook</p> </a></li> <li><a href="//reddit.com/submit?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn reddit"> <i class="fa fa-reddit-alien"></i> <p>Reddit</p> </a></li> <li><a href="//www.linkedin.com/shareArticle?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn linkedin"> <i class="fa fa-linkedin"></i> <p>LinkedIn</p> </a></li> <li><a href="//www.stumbleupon.com/submit?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn stumbleupon"> <i class="fa fa-stumbleupon"></i> <p>StumbleUpon</p> </a></li> <li><a href="mailto:?subject=Check out this post by Robert%20McDonnell&body=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn email"> <i class="fa fa-envelope"></i> <p>Email</p> </a></li> </ul> </section> </section> <div id="main"> <article class="post"> <header> <div class="title"> <h1><a href="http://robertmyles.github.io/2016/08/05/easier-web-scraping-in-r/">Easier web scraping in R</a></h1> </div> <div class="meta"> <time class="published" datetime='2016-08-05'> August 5, 2016</time> <span class="author">Robert McDonnell</span> <p>4 minute read</p> </div> </header> <section id="social-share"> <ul class="icons"> <li><a href="//twitter.com/share?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&text=Easier%20web%20scraping%20in%20R&via=RobertMylesMc" target="_blank" class="share-btn twitter"> <i class="fa fa-twitter"></i> <p>Twitter</p> </a></li> <li><a href="//plus.google.com/share?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn google-plus"> <i class="fa fa-google-plus"></i> <p>Google+</p> </a></li> <li><a href="//www.facebook.com/sharer/sharer.php?u=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn facebook"> <i class="fa fa-facebook"></i> <p>Facebook</p> </a></li> <li><a href="//reddit.com/submit?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn reddit"> <i class="fa fa-reddit-alien"></i> <p>Reddit</p> </a></li> <li><a href="//www.linkedin.com/shareArticle?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn linkedin"> <i class="fa fa-linkedin"></i> <p>LinkedIn</p> </a></li> <li><a href="//www.stumbleupon.com/submit?url=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f&title=Easier%20web%20scraping%20in%20R" target="_blank" class="share-btn stumbleupon"> <i class="fa fa-stumbleupon"></i> <p>StumbleUpon</p> </a></li> <li><a href="mailto:?subject=Check out this post by Robert%20McDonnell&body=http%3a%2f%2frobertmyles.github.io%2f2016%2f08%2f05%2feasier-web-scraping-in-r%2f" target="_blank" class="share-btn email"> <i class="fa fa-envelope"></i> <p>Email</p> </a></li> </ul> </section> <div id="content"> <p>In an earlier <a href="http://robertmyles.github.io//web-navigation-and-scraping-with-r.html">post</a>, I described some ways in which you can interact with a web browser using R and <code>RSelenium</code>. This is ideal when you need to access data through drop-down menus and search bars. However, working with <code>RSelenium</code> can be tricky. There are, of course, easier ways to get information from the internet using R.</p> <p>Perhaps the most straightforward way is to use <code>rvest</code>, in tandem with other packages of the <a href="https://barryrowlingson.github.io/hadleyverse/#1">Hadleyverse</a><sup id="a1"><a href="#fn1">1</a></sup>, such as <code>dplyr</code> and <code>tidyr</code> for data preparation and cleaning after the webscrape. I’m going to use a simple example that I came across recently in my work, getting the name of each mayor in Brazil.</p> <p>Finding out who was <em>elected</em> to the mayor’s office in each municipality in Brazil is easy: that data exists and is available on the <a href="http://www.tse.jus.br/">website</a> of the <em>Tribunal Superior Eleitoral</em>. However, just because someone was elected to office (in this case in 2014) does not mean that they are still in office now, two years later. After searching around the web for a bit, I realised that this data is not available as a dataset.</p> <p>After wandering to the website of the <a href="http://www.ibge.gov.br/home/">IBGE</a>, a Brazilian statistics agency, I found a way to get the name of the mayor currently in charge of each municipality. Each municipality has its own webpage on the IGBE’s dedicated <a href="http://www.cidades.ibge.gov.br/xtras/home.php">Cidades@</a> site.</p> <p>For example, you will see the a webpage for the municipality of Acrelândia, shown in the image below. As you can see, the name of the mayor (“Prefeito”) is on the right-hand side of the page. Since we now know we can get this for each municipality, we have three tasks to do in order to get this info into R:</p> <ul> <li>find out what part of the url changes as we move from city to city on the website;</li> <li>send the corresponding information to the server using R;</li> <li>scrape the page and tidy up the resulting data in R.</li> </ul> <p> </p> <p><img src="http://i.imgur.com/MGqKffr.png" style="width:750px;height:500px;"></p> <p> </p> <p>The url for <a href="http://www.cidades.ibge.gov.br/xtras/perfil.php?lang=&amp;codmun=120001&amp;search=acre%7Cacrelandia">Acrelândia</a> is unique at: “codmun=120001” and “search=acre|acrelandia”.<br /> The number in “codmun” is available as the IBGE municipal code (although missing the final digit, strangely…but that’s not a problem, we just take it off the end for each one) and the rest is just the state and the municipality, all information that is easy to get from various sources. For this example, I’ve uploaded this basic dataset to Github so we can use it here.</p> <pre class="r"><code>library(dplyr) library(tidyr) library(readr) library(stringr) library(stringi) library(rvest) ## read in data and create variables for webscraping: Mayors &lt;- read_csv(&quot;https://raw.githubusercontent.com/RobertMyles/RobertMyles.github.io/master/_data/IBGE_codes.csv&quot;) %&gt;% select(-c(UF, Cod.Mun)) %&gt;% dplyr::rename(Code_IBGE = Cod.IBGE) %&gt;% mutate(MUNIC2 = tolower(.$MUNIC)) %&gt;% mutate(MUNIC2 = gsub(&quot; &quot;, &quot;-&quot;, .$MUNIC2)) %&gt;% mutate(Name_UF2 = tolower(.$Name_UF)) %&gt;% mutate(Code2 = unlist(str_extract_all(.$Code_IBGE, &quot;[0-9]{6}&quot;))) %&gt;% unite(col = Link, Name_UF2, MUNIC2, sep = &quot;|&quot;, remove = F) %&gt;% arrange(ACR_UF) </code></pre> <p>  In the code snippet above, we’ve taken out unnecessary columns, renamed one, changed the names of the municipalities to lower case (for the url), taken six numbers of the IBGE code for use in the webscrape and joined the state and municipality names together, with <code>|</code> seperating them, as in the url for each municipality webpage. We also need to create some empty data frames to fill, and remove the municipality of Brasília, which does not have a <em>Prefeito</em>, just a <a href="http://www.cidades.ibge.gov.br/xtras/perfil.php?lang=&amp;codmun=530010&amp;search=distrito-federal%7Cbrasilia">governor</a>, which is all done below:</p> <pre class="r"><code>url &lt;- &quot;http://www.cidades.ibge.gov.br/xtras/perfil.php?lang=&amp;codmun=&quot; link &lt;- Mayors$Link grep(&quot;distrito federal|brasilia&quot;, link) link &lt;- link[-804] link2 &lt;- Mayors$Code2 link2 &lt;- link2[-804] Prefeitos &lt;- data.frame() Cidades &lt;- data.frame() Pref &lt;- data.frame()</code></pre> <p>Next comes our webscrape, which is incredibly easy with <code>rvest</code> (<code>xml2</code> is likewise easy). The only hard part of this entire scrape is getting the words “Prefeito” along with the name of the mayor out of the document. This relies on regex, which can be tricky. But trial and error should lead you to the right answer for whatever you need. Or search <a href="http://www.rexegg.com/regex-quickstart.html">Google</a>, of course.</p> <pre class="r"><code>for(i in 1:length(link)){ URL &lt;- paste(url, link2[i], &quot;&amp;search=&quot;, link[i], sep = &quot;&quot;) pref &lt;- read_html(URL) pref1 &lt;- html_nodes(pref, xpath = &#39;//*[@id=&quot;mod_perfil_infosbasicas&quot;]&#39;) str &lt;- html_text(pref1) str1 &lt;- unlist(str_extract_all(str, &quot;Prefeito[\\w A-Z]*&quot;)) print(str1) Prefeitos &lt;- rbind(Prefeitos, str1, stringsAsFactors = F) City &lt;- link[i] Cidades &lt;- rbind(Cidades, City, stringsAsFactors = F) Pref &lt;- cbind(Prefeitos, Cidades) }</code></pre> <p>With a little tidying, we have a nice little dataset of each current mayors for each municipality in Brazil.</p> <pre class="r"><code>colnames(Pref) &lt;- c(&quot;Prefeito&quot;, &quot;Municipio&quot;) Pref$Prefeito &lt;- gsub(&quot;Prefeito&quot;, &quot;&quot;, Pref$Prefeito) Pref$Prefeito &lt;- stri_trans_general(Pref$Prefeito, &quot;Latin-ASCII&quot;) Pref1 &lt;- Pref Pref1$Municipio &lt;- Pref1$Municipio %&gt;% str_split_fixed(&quot;\\|&quot;, n = 2) %&gt;% toupper() Pref$Name_UF &lt;- Pref1$Municipio[,1] Pref$MUNIC &lt;- Pref1$Municipio[,2] Pref &lt;- select(Pref, -Municipio) Mayors$MUNIC &lt;- gsub(&quot;[-]&quot;, &quot; &quot;, Mayors$MUNIC) Pref$MUNIC &lt;- gsub(&quot;[-]&quot;, &quot; &quot;, Pref$MUNIC) rm(Pref1) Prefeitos &lt;- full_join(Mayors, Pref) Prefeitos &lt;- select(Prefeitos, -c(Link, MUNIC2, Name_UF2, Code2)) Prefeitos &lt;- Prefeitos[,c(1:5, 7, 6)]</code></pre> <p><img src="http://i.imgur.com/TRMOSkV.png" style="width:650px;height:400px;"></p> <p> </p> <p><b id="fn1">1</b> Supposedly, Hadley Wickham doesn’t actually like this term, but I’ll use it anyway, I’m sure he wouldn’t mind :smiley:. <a href="#a1">↩</a></p> </div> <footer> <ul class="stats"> <li> Categories </li> <li><a href='../../../../categories/r'>R</a></li> <li><a href='../../../../categories/webscraping'>webscraping</a></li> </ul> </footer> </article> <ul class="actions pagination"> <li><a href="http://robertmyles.github.io/2016/05/21/bayesian-irt-in-r-and-stan/" class="button big previous">Bayesian IRT in R and Stan</a></li> <li><a href="http://robertmyles.github.io/2016/08/13/geo-reference-an-image-in-r/" class="button big next">Geo-reference an image in R</a></li> </ul> <article class="post"> <div id="disqus_thread"></div> <script type="text/javascript"> var disqus_shortname = 'rob-gitblog'; var disqus_identifier = 'http:\/\/robertmyles.github.io\/2016\/08\/05\/easier-web-scraping-in-r\/'; var disqus_title = 'Easier web scraping in R'; var disqus_url = 'http:\/\/robertmyles.github.io\/2016\/08\/05\/easier-web-scraping-in-r\/'; (function() { var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true; dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js'; (document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq); })(); </script> <noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript> <a href="http://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a> </article> </div> <section id="sidebar"> <section id="intro"> <a href="../../../../" class="logo"><img src="../../../../images/pic1.jpg" alt="" /></a> <header> <h2>if(topic != junk) {write} else { wine }</h2> <p>by Robert Myles McDonnell</p> </header> <ul class="icons"> <li><a href="//github.com/RobertMyles" target="_blank" title="GitHub" class="fa fa-github"></a></li> <li><a href="//linkedin.com/in/robert-mcdonnell-7475b320" target="_blank" title="LinkedIn" class="fa fa-linkedin"></a></li> <li><a href="//stackoverflow.com/users/4296028/robertmc" target="_blank" title="Stack Overflow" class="fa fa-stack-overflow"></a></li> <li><a href="//twitter.com/RobertMylesMc" target="_blank" title="Twitter" class="fa fa-twitter"></a></li> <li><a href="mailto:robertmylesmcdonnell@gmail.com" title="Email" class="fa fa-envelope"></a></li> </ul> </section> <section id="recent-posts"> <ul class="posts"> <header> <h3>Recent Posts</h3> </header> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/06/27/rating-r-packages/">Rating R Packages</a></h3> <time class="published" datetime= '2017-06-27'> June 27, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/06/26/inhaling-earth/">Inhaling Earth</a></h3> <time class="published" datetime= '2017-06-26'> June 26, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/06/11/uk-elections-2017/">UK Elections 2017</a></h3> <time class="published" datetime= '2017-06-11'> June 11, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/06/03/human-capital-index-maps/">Human Capital Index Maps</a></h3> <time class="published" datetime= '2017-06-03'> June 3, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/06/02/causalimpact-healthcare-costs-in-the-us/">CausalImpact &amp; Healthcare Costs in the US</a></h3> <time class="published" datetime= '2017-06-02'> June 2, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/04/22/tfw-you-have-to-copy-and-paste-something-into-r.../">TFW you have to copy and paste something into R...</a></h3> <time class="published" datetime= '2017-04-22'> April 22, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/03/16/update-r-from-inside-r/">Update R from inside R</a></h3> <time class="published" datetime= '2017-03-16'> March 16, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/02/25/reflections-on-making-my-first-r-package/">Reflections on making my first R package</a></h3> <time class="published" datetime= '2017-02-25'> February 25, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/02/24/brazils-murder-rate/">Brazil&#39;s Murder Rate</a></h3> <time class="published" datetime= '2017-02-24'> February 24, 2017</time> </header> </article> </li> <li> <article> <header> <h3><a href="http://robertmyles.github.io/2017/02/19/peace-bread-and-data/">Peace, Bread and Data!</a></h3> <time class="published" datetime= '2017-02-19'> February 19, 2017</time> </header> </article> </li> <li> <ul class="actions"> <li><a href= /post/ class="button">View more posts</a></li> </ul> </li> </ul> </section> <section id="categories"> <ul class="posts"> <header> <h3><a href="../../../../categories/">Categories</a></h3> </header> <li> <article> <header> <a href="../../../../categories/r/">r</a> <span style="float:right;">24</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/brazil/">brazil</a> <span style="float:right;">3</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/political-science/">political-science</a> <span style="float:right;">3</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/bayesian-statistics/">bayesian-statistics</a> <span style="float:right;">2</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/r-markdown/">r-markdown</a> <span style="float:right;">2</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/webscraping/">webscraping</a> <span style="float:right;">2</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/carnaval/">carnaval</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/causality/">causality</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/communism/">communism</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/elections/">elections</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/ggplot2/">ggplot2</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/github/">github</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/healthcare/">healthcare</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/hugo/">hugo</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/human-capital-index/">human-capital-index</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/ideal-points/">ideal-points</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/irt/">irt</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/jags/">jags</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/maps/">maps</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/meta-data/">meta-data</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/osx/">osx</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/political-economy/">political-economy</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/r-packages/">r-packages</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/selenium/">selenium</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/sf/">sf</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/stan/">stan</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/tidyrss/">tidyrss</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/uk/">uk</a> <span style="float:right;">1</span> </header> </article> </li> <li> <article> <header> <a href="../../../../categories/us/">us</a> <span style="float:right;">1</span> </header> </article> </li> </ul> </section> <section id="footer"> <ul class="icons"> <li><a href="//github.com/RobertMyles" target="_blank" title="GitHub" class="fa fa-github"></a></li> <li><a href="//linkedin.com/in/robert-mcdonnell-7475b320" target="_blank" title="LinkedIn" class="fa fa-linkedin"></a></li> <li><a href="//stackoverflow.com/users/4296028/robertmc" target="_blank" title="Stack Overflow" class="fa fa-stack-overflow"></a></li> <li><a href="//twitter.com/RobertMylesMc" target="_blank" title="Twitter" class="fa fa-twitter"></a></li> <li><a href="mailto:robertmylesmcdonnell@gmail.com" title="Email" class="fa fa-envelope"></a></li> </ul> <p class="copyright">&copy; Robert Myles McDonnell. Design: <a href="http://html5up.net" target="_blank">HTML5 UP</a>. Ported by <a href="//github.com/jpescador" target="_blank">Julio Pescador</a>. Powered by <a href="//gohugo.io" target="_blank">Hugo</a></p> </section> </section> </div> <a id="back-to-top" href="#" class="fa fa-arrow-up fa-border fa-2x"></a> <script type="text/x-mathjax-config"> MathJax.Hub.Config({ tex2jax: { skipTags: ['script', 'noscript', 'style', 'textarea', 'pre'] } }); </script> <script type="text/javascript" src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"> </script> <script src="../../../../js/jquery.min.js"></script> <script src="../../../../js/skel.min.js"></script> <script src="../../../../js/util.js"></script> <script src="../../../../js/main.js"></script> <script src="../../../../js/backToTop.js"></script> <script src="../../../../js/highlight.pack.js"></script> <script>hljs.initHighlightingOnLoad();</script> </body> </html>
{ "content_hash": "0f751b427aca6d8cab4480507c7657c9", "timestamp": "", "source": "github", "line_count": 1291, "max_line_length": 654, "avg_line_length": 36.90549961270333, "alnum_prop": 0.4175464371917305, "repo_name": "RobertMyles/RobertMyles.github.io", "id": "4da94ef3f54d870f7cebf4cad537e821f1225e01", "size": "47696", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "2016/08/05/easier-web-scraping-in-r/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "77106" }, { "name": "HTML", "bytes": "8494635" }, { "name": "JavaScript", "bytes": "16529" } ], "symlink_target": "" }
using namespace vespalib; class Test : public TestApp { public: void testGrowing(); int Main() override; }; void Test::testGrowing() { GrowableByteBuffer buf(10); buf.putInt(3); buf.putInt(7); buf.putLong(1234); buf.putDouble(1234); buf.putString("hei der"); EXPECT_EQUAL(35u, buf.position()); } int Test::Main() { TEST_INIT("guard_test"); testGrowing(); TEST_DONE(); } TEST_APPHOOK(Test)
{ "content_hash": "bc32623f82a84d595e1c67bbe7857200", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 38, "avg_line_length": 13.84375, "alnum_prop": 0.6252821670428894, "repo_name": "vespa-engine/vespa", "id": "0a616745023d4019c9bd4406ac731fbf0a3b1c13", "size": "646", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vespalib/src/tests/growablebytebuffer/growablebytebuffer_test.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "8130" }, { "name": "C", "bytes": "60315" }, { "name": "C++", "bytes": "29580035" }, { "name": "CMake", "bytes": "593981" }, { "name": "Emacs Lisp", "bytes": "91" }, { "name": "GAP", "bytes": "3312" }, { "name": "Go", "bytes": "560664" }, { "name": "HTML", "bytes": "54520" }, { "name": "Java", "bytes": "40814190" }, { "name": "JavaScript", "bytes": "73436" }, { "name": "LLVM", "bytes": "6152" }, { "name": "Lex", "bytes": "11499" }, { "name": "Makefile", "bytes": "5553" }, { "name": "Objective-C", "bytes": "12369" }, { "name": "Perl", "bytes": "23134" }, { "name": "Python", "bytes": "52392" }, { "name": "Roff", "bytes": "17506" }, { "name": "Ruby", "bytes": "10690" }, { "name": "Shell", "bytes": "268737" }, { "name": "Yacc", "bytes": "14735" } ], "symlink_target": "" }
using namespace std; enum AttributeName { ATTRNAME_Null, ATTRNAME_SynVal, ATTRNAME_InhVal, ATTRNAME_LexVal }; typedef int AttributeType; struct Attribute { AttributeName Name; AttributeType Value; bool Evaluated; Attribute(AttributeName p_name) : Name(p_name), Value(AttributeType()), Evaluated(false) {} }; struct AttributeSet { int ProductionRelativeIdx; Attribute SynVal; Attribute InhVal; Attribute LexVal; AttributeSet() : ProductionRelativeIdx(-1), SynVal(ATTRNAME_SynVal), InhVal(ATTRNAME_InhVal), LexVal(ATTRNAME_LexVal) {} Attribute& operator[](AttributeName p_attrName) { switch(p_attrName) { case ATTRNAME_InhVal: return InhVal; break; case ATTRNAME_SynVal: return SynVal; break; case ATTRNAME_LexVal: return LexVal; break; default: assert(false); return LexVal; } } }; #endif // ATTRIBUTE_H
{ "content_hash": "477aac7ae01faf4a3871cf848e9c3f28", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 124, "avg_line_length": 20.53846153846154, "alnum_prop": 0.5870786516853933, "repo_name": "ogail/thesis", "id": "600dc00d58502ab0f1c4320cff7d96c4517321fb", "size": "1161", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "archieve/old-istrategizer/Tools/Serialization/UnrealCompiler/Attribute.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "205164" }, { "name": "C++", "bytes": "768302" } ], "symlink_target": "" }
Genomics Life ------------- https://genomicslife.github.io Contributors: Nitesh Turaga Mike Berger
{ "content_hash": "f336ae4c7f0028ae41410e6d9acf77af", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 30, "avg_line_length": 10.3, "alnum_prop": 0.6893203883495146, "repo_name": "genomicslife/genomicslife.github.io", "id": "ce8a4f7dc6f6d542a5ef442ce2ff17e2c8c1bb8f", "size": "103", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "250377" }, { "name": "HTML", "bytes": "85040" }, { "name": "JavaScript", "bytes": "41925" }, { "name": "Ruby", "bytes": "25196" } ], "symlink_target": "" }
<header> <div class="header-content"> <div class="header-content-inner"> <h1>Great! Thanks for signing up to learn more!</h1> <h2>You will be redirected Home in {{timerValue}} seconds!</h2> <h3>Or you can just get there quicker by clicking <a href="#" ng-click="goHome()" style="color:white">here</a></h3> </div> </div> </header>
{ "content_hash": "f92c4ff453f33dea02544dc8ab1cc1a7", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 127, "avg_line_length": 35.63636363636363, "alnum_prop": 0.5816326530612245, "repo_name": "jonnynabors/CarePRN", "id": "3fe3712e64b4456c1e614837b4a859a08c8cee5e", "size": "392", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/signup/successfulSignUpTemplate.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "137482" }, { "name": "HTML", "bytes": "24927" }, { "name": "JavaScript", "bytes": "17629" } ], "symlink_target": "" }
""" Classes for making VMware VI SOAP calls. """ import httplib from oslo.config import cfg import suds from nova.openstack.common.gettextutils import _ from nova import utils from nova.virt.vmwareapi import error_util RESP_NOT_XML_ERROR = 'Response is "text/html", not "text/xml"' CONN_ABORT_ERROR = 'Software caused connection abort' ADDRESS_IN_USE_ERROR = 'Address already in use' vmwareapi_wsdl_loc_opt = cfg.StrOpt('wsdl_location', help='Optional VIM Service WSDL Location ' 'e.g http://<server>/vimService.wsdl. ' 'Optional over-ride to default location for bug work-arounds') CONF = cfg.CONF CONF.register_opt(vmwareapi_wsdl_loc_opt, 'vmware') def get_moref(value, type): """Get managed object reference.""" moref = suds.sudsobject.Property(value) moref._type = type return moref def object_to_dict(obj, list_depth=1): """Convert Suds object into serializable format. The calling function can limit the amount of list entries that are converted. """ d = {} for k, v in suds.sudsobject.asdict(obj).iteritems(): if hasattr(v, '__keylist__'): d[k] = object_to_dict(v, list_depth=list_depth) elif isinstance(v, list): d[k] = [] used = 0 for item in v: used = used + 1 if used > list_depth: break if hasattr(item, '__keylist__'): d[k].append(object_to_dict(item, list_depth=list_depth)) else: d[k].append(item) else: d[k] = v return d class VIMMessagePlugin(suds.plugin.MessagePlugin): def addAttributeForValue(self, node): # suds does not handle AnyType properly. # VI SDK requires type attribute to be set when AnyType is used if node.name == 'value': node.set('xsi:type', 'xsd:string') def marshalled(self, context): """suds will send the specified soap envelope. Provides the plugin with the opportunity to prune empty nodes and fixup nodes before sending it to the server. """ # suds builds the entire request object based on the wsdl schema. # VI SDK throws server errors if optional SOAP nodes are sent # without values, e.g. <test/> as opposed to <test>test</test> context.envelope.prune() context.envelope.walk(self.addAttributeForValue) class Vim: """The VIM Object.""" def __init__(self, protocol="https", host="localhost"): """ Creates the necessary Communication interfaces and gets the ServiceContent for initiating SOAP transactions. protocol: http or https host : ESX IPAddress[:port] or ESX Hostname[:port] """ if not suds: raise Exception(_("Unable to import suds.")) self._protocol = protocol self._host_name = host self.wsdl_url = Vim.get_wsdl_url(protocol, host) self.url = Vim.get_soap_url(protocol, host) self.client = suds.client.Client(self.wsdl_url, location=self.url, plugins=[VIMMessagePlugin()]) self._service_content = self.retrieve_service_content() def retrieve_service_content(self): return self.RetrieveServiceContent("ServiceInstance") @staticmethod def get_wsdl_url(protocol, host_name): """ allows override of the wsdl location, making this static means we can test the logic outside of the constructor without forcing the test environment to have multiple valid wsdl locations to test against. :param protocol: https or http :param host_name: localhost or other server name :return: string to WSDL location for vSphere WS Management API """ # optional WSDL location over-ride for work-arounds if CONF.vmware.wsdl_location: return CONF.vmware.wsdl_location # calculate default WSDL location if no override supplied return Vim.get_soap_url(protocol, host_name) + "/vimService.wsdl" @staticmethod def get_soap_url(protocol, host_name): """ Calculates the location of the SOAP services for a particular server. Created as a static method for testing. :param protocol: https or http :param host_name: localhost or other vSphere server name :return: the url to the active vSphere WS Management API """ if utils.is_valid_ipv6(host_name): return '%s://[%s]/sdk' % (protocol, host_name) return '%s://%s/sdk' % (protocol, host_name) def get_service_content(self): """Gets the service content object.""" return self._service_content def __getattr__(self, attr_name): """Makes the API calls and gets the result.""" def vim_request_handler(managed_object, **kwargs): """ Builds the SOAP message and parses the response for fault checking and other errors. managed_object : Managed Object Reference or Managed Object Name **kwargs : Keyword arguments of the call """ # Dynamic handler for VI SDK Calls try: request_mo = self._request_managed_object_builder( managed_object) request = getattr(self.client.service, attr_name) response = request(request_mo, **kwargs) # To check for the faults that are part of the message body # and not returned as Fault object response from the ESX # SOAP server if hasattr(error_util.FaultCheckers, attr_name.lower() + "_fault_checker"): fault_checker = getattr(error_util.FaultCheckers, attr_name.lower() + "_fault_checker") fault_checker(response) return response # Catch the VimFaultException that is raised by the fault # check of the SOAP response except error_util.VimFaultException: raise except suds.MethodNotFound: raise except suds.WebFault as excep: doc = excep.document detail = doc.childAtPath("/Envelope/Body/Fault/detail") fault_list = [] for child in detail.getChildren(): fault_list.append(child.get("type")) raise error_util.VimFaultException(fault_list, excep) except AttributeError as excep: raise error_util.VimAttributeError(_("No such SOAP method " "'%s' provided by VI SDK") % (attr_name), excep) except (httplib.CannotSendRequest, httplib.ResponseNotReady, httplib.CannotSendHeader) as excep: raise error_util.SessionOverLoadException(_("httplib " "error in %s: ") % (attr_name), excep) except Exception as excep: # Socket errors which need special handling for they # might be caused by ESX API call overload if (str(excep).find(ADDRESS_IN_USE_ERROR) != -1 or str(excep).find(CONN_ABORT_ERROR)) != -1: raise error_util.SessionOverLoadException(_("Socket " "error in %s: ") % (attr_name), excep) # Type error that needs special handling for it might be # caused by ESX host API call overload elif str(excep).find(RESP_NOT_XML_ERROR) != -1: raise error_util.SessionOverLoadException(_("Type " "error in %s: ") % (attr_name), excep) else: raise error_util.VimException( _("Exception in %s ") % (attr_name), excep) return vim_request_handler def _request_managed_object_builder(self, managed_object): """Builds the request managed object.""" # Request Managed Object Builder if isinstance(managed_object, str): mo = suds.sudsobject.Property(managed_object) mo._type = managed_object else: mo = managed_object return mo def __repr__(self): return "VIM Object" def __str__(self): return "VIM Object"
{ "content_hash": "6ad91b22c7271683b608eaa05f733d8a", "timestamp": "", "source": "github", "line_count": 222, "max_line_length": 76, "avg_line_length": 38.92342342342342, "alnum_prop": 0.5747020020830922, "repo_name": "sacharya/nova", "id": "b860586e912f5f4749c43430b3bf00d1d6c1583b", "size": "9375", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nova/virt/vmwareapi/vim.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "13505239" }, { "name": "Shell", "bytes": "16239" } ], "symlink_target": "" }
/** * Creates six tasks that operate on three queues as follows: * * The first two tasks send and receive an incrementing number to/from a queue. * One task acts as a producer and the other as the consumer. The consumer is a * higher priority than the producer and is set to block on queue reads. The queue * only has space for one item - as soon as the producer posts a message on the * queue the consumer will unblock, pre-empt the producer, and remove the item. * * The second two tasks work the other way around. Again the queue used only has * enough space for one item. This time the consumer has a lower priority than the * producer. The producer will try to post on the queue blocking when the queue is * full. When the consumer wakes it will remove the item from the queue, causing * the producer to unblock, pre-empt the consumer, and immediately re-fill the * queue. * * The last two tasks use the same queue producer and consumer functions. This time the queue has * enough space for lots of items and the tasks operate at the same priority. The * producer will execute, placing items into the queue. The consumer will start * executing when either the queue becomes full (causing the producer to block) or * a context switch occurs (tasks of the same priority will time slice). * * \page BlockQC blockQ.c * \ingroup DemoFiles * <HR> */ /* Changes from V1.00: + Reversed the priority and block times of the second two demo tasks so they operate as per the description above. Changes from V2.0.0 + Delay periods are now specified using variables and constants of TickType_t rather than unsigned long. Changes from V4.0.2 + The second set of tasks were created the wrong way around. This has been corrected. */ #include <stdlib.h> /* Scheduler include files. */ #include "FreeRTOS.h" #include "task.h" #include "queue.h" /* Demo program include files. */ #include "BlockQ.h" #include "print.h" #define blckqSTACK_SIZE ( ( unsigned short ) configMINIMAL_STACK_SIZE ) #define blckqNUM_TASK_SETS ( 3 ) /* Structure used to pass parameters to the blocking queue tasks. */ typedef struct BLOCKING_QUEUE_PARAMETERS { QueueHandle_t xQueue; /*< The queue to be used by the task. */ TickType_t xBlockTime; /*< The block time to use on queue reads/writes. */ volatile short *psCheckVariable; /*< Incremented on each successful cycle to check the task is still running. */ } xBlockingQueueParameters; /* Task function that creates an incrementing number and posts it on a queue. */ static void vBlockingQueueProducer( void *pvParameters ); /* Task function that removes the incrementing number from a queue and checks that it is the expected number. */ static void vBlockingQueueConsumer( void *pvParameters ); /* Variables which are incremented each time an item is removed from a queue, and found to be the expected value. These are used to check that the tasks are still running. */ static volatile short sBlockingConsumerCount[ blckqNUM_TASK_SETS ] = { ( short ) 0, ( short ) 0, ( short ) 0 }; /* Variable which are incremented each time an item is posted on a queue. These are used to check that the tasks are still running. */ static volatile short sBlockingProducerCount[ blckqNUM_TASK_SETS ] = { ( short ) 0, ( short ) 0, ( short ) 0 }; /*-----------------------------------------------------------*/ void vStartBlockingQueueTasks( unsigned portBASE_TYPE uxPriority ) { xBlockingQueueParameters *pxQueueParameters1, *pxQueueParameters2; xBlockingQueueParameters *pxQueueParameters3, *pxQueueParameters4; xBlockingQueueParameters *pxQueueParameters5, *pxQueueParameters6; const unsigned portBASE_TYPE uxQueueSize1 = 1, uxQueueSize5 = 5; const TickType_t xBlockTime = ( TickType_t ) 1000 / portTICK_PERIOD_MS; const TickType_t xDontBlock = ( TickType_t ) 0; /* Create the first two tasks as described at the top of the file. */ /* First create the structure used to pass parameters to the consumer tasks. */ pxQueueParameters1 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); /* Create the queue used by the first two tasks to pass the incrementing number. Pass a pointer to the queue in the parameter structure. */ pxQueueParameters1->xQueue = xQueueCreate( uxQueueSize1, ( unsigned portBASE_TYPE ) sizeof( unsigned short ) ); /* The consumer is created first so gets a block time as described above. */ pxQueueParameters1->xBlockTime = xBlockTime; /* Pass in the variable that this task is going to increment so we can check it is still running. */ pxQueueParameters1->psCheckVariable = &( sBlockingConsumerCount[ 0 ] ); /* Create the structure used to pass parameters to the producer task. */ pxQueueParameters2 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); /* Pass the queue to this task also, using the parameter structure. */ pxQueueParameters2->xQueue = pxQueueParameters1->xQueue; /* The producer is not going to block - as soon as it posts the consumer will wake and remove the item so the producer should always have room to post. */ pxQueueParameters2->xBlockTime = xDontBlock; /* Pass in the variable that this task is going to increment so we can check it is still running. */ pxQueueParameters2->psCheckVariable = &( sBlockingProducerCount[ 0 ] ); /* Note the producer has a lower priority than the consumer when the tasks are spawned. */ xTaskCreate( vBlockingQueueConsumer, "QConsB1", blckqSTACK_SIZE, ( void * ) pxQueueParameters1, uxPriority, NULL ); xTaskCreate( vBlockingQueueProducer, "QProdB2", blckqSTACK_SIZE, ( void * ) pxQueueParameters2, tskIDLE_PRIORITY, NULL ); /* Create the second two tasks as described at the top of the file. This uses the same mechanism but reverses the task priorities. */ pxQueueParameters3 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); pxQueueParameters3->xQueue = xQueueCreate( uxQueueSize1, ( unsigned portBASE_TYPE ) sizeof( unsigned short ) ); pxQueueParameters3->xBlockTime = xDontBlock; pxQueueParameters3->psCheckVariable = &( sBlockingProducerCount[ 1 ] ); pxQueueParameters4 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); pxQueueParameters4->xQueue = pxQueueParameters3->xQueue; pxQueueParameters4->xBlockTime = xBlockTime; pxQueueParameters4->psCheckVariable = &( sBlockingConsumerCount[ 1 ] ); xTaskCreate( vBlockingQueueProducer, "QProdB3", blckqSTACK_SIZE, ( void * ) pxQueueParameters3, tskIDLE_PRIORITY, NULL ); xTaskCreate( vBlockingQueueConsumer, "QConsB4", blckqSTACK_SIZE, ( void * ) pxQueueParameters4, uxPriority, NULL ); /* Create the last two tasks as described above. The mechanism is again just the same. This time both parameter structures are given a block time. */ pxQueueParameters5 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); pxQueueParameters5->xQueue = xQueueCreate( uxQueueSize5, ( unsigned portBASE_TYPE ) sizeof( unsigned short ) ); pxQueueParameters5->xBlockTime = xBlockTime; pxQueueParameters5->psCheckVariable = &( sBlockingProducerCount[ 2 ] ); pxQueueParameters6 = ( xBlockingQueueParameters * ) pvPortMalloc( sizeof( xBlockingQueueParameters ) ); pxQueueParameters6->xQueue = pxQueueParameters5->xQueue; pxQueueParameters6->xBlockTime = xBlockTime; pxQueueParameters6->psCheckVariable = &( sBlockingConsumerCount[ 2 ] ); xTaskCreate( vBlockingQueueProducer, "QProdB5", blckqSTACK_SIZE, ( void * ) pxQueueParameters5, tskIDLE_PRIORITY, NULL ); xTaskCreate( vBlockingQueueConsumer, "QConsB6", blckqSTACK_SIZE, ( void * ) pxQueueParameters6, tskIDLE_PRIORITY, NULL ); } /*-----------------------------------------------------------*/ static void vBlockingQueueProducer( void *pvParameters ) { unsigned short usValue = 0; xBlockingQueueParameters *pxQueueParameters; const char * const pcTaskStartMsg = "Blocking queue producer started.\r\n"; const char * const pcTaskErrorMsg = "Could not post on blocking queue\r\n"; short sErrorEverOccurred = pdFALSE; pxQueueParameters = ( xBlockingQueueParameters * ) pvParameters; /* Queue a message for printing to say the task has started. */ vPrintDisplayMessage( &pcTaskStartMsg ); for( ;; ) { if( xQueueSendToBack( pxQueueParameters->xQueue, ( void * ) &usValue, pxQueueParameters->xBlockTime ) != pdPASS ) { vPrintDisplayMessage( &pcTaskErrorMsg ); sErrorEverOccurred = pdTRUE; } else { /* We have successfully posted a message, so increment the variable used to check we are still running. */ if( sErrorEverOccurred == pdFALSE ) { ( *pxQueueParameters->psCheckVariable )++; } /* Increment the variable we are going to post next time round. The consumer will expect the numbers to follow in numerical order. */ ++usValue; } } } /*-----------------------------------------------------------*/ static void vBlockingQueueConsumer( void *pvParameters ) { unsigned short usData, usExpectedValue = 0; xBlockingQueueParameters *pxQueueParameters; const char * const pcTaskStartMsg = "Blocking queue consumer started.\r\n"; const char * const pcTaskErrorMsg = "Incorrect value received on blocking queue.\r\n"; short sErrorEverOccurred = pdFALSE; /* Queue a message for printing to say the task has started. */ vPrintDisplayMessage( &pcTaskStartMsg ); pxQueueParameters = ( xBlockingQueueParameters * ) pvParameters; for( ;; ) { if( xQueueReceive( pxQueueParameters->xQueue, &usData, pxQueueParameters->xBlockTime ) == pdPASS ) { if( usData != usExpectedValue ) { vPrintDisplayMessage( &pcTaskErrorMsg ); /* Catch-up. */ usExpectedValue = usData; sErrorEverOccurred = pdTRUE; } else { /* We have successfully received a message, so increment the variable used to check we are still running. */ if( sErrorEverOccurred == pdFALSE ) { ( *pxQueueParameters->psCheckVariable )++; } /* Increment the value we expect to remove from the queue next time round. */ ++usExpectedValue; } } } } /*-----------------------------------------------------------*/ /* This is called to check that all the created tasks are still running. */ portBASE_TYPE xAreBlockingQueuesStillRunning( void ) { static short sLastBlockingConsumerCount[ blckqNUM_TASK_SETS ] = { ( short ) 0, ( short ) 0, ( short ) 0 }; static short sLastBlockingProducerCount[ blckqNUM_TASK_SETS ] = { ( short ) 0, ( short ) 0, ( short ) 0 }; portBASE_TYPE xReturn = pdPASS, xTasks; /* Not too worried about mutual exclusion on these variables as they are 16 bits and we are only reading them. We also only care to see if they have changed or not. Loop through each check variable and return pdFALSE if any are found not to have changed since the last call. */ for( xTasks = 0; xTasks < blckqNUM_TASK_SETS; xTasks++ ) { if( sBlockingConsumerCount[ xTasks ] == sLastBlockingConsumerCount[ xTasks ] ) { xReturn = pdFALSE; } sLastBlockingConsumerCount[ xTasks ] = sBlockingConsumerCount[ xTasks ]; if( sBlockingProducerCount[ xTasks ] == sLastBlockingProducerCount[ xTasks ] ) { xReturn = pdFALSE; } sLastBlockingProducerCount[ xTasks ] = sBlockingProducerCount[ xTasks ]; } return xReturn; }
{ "content_hash": "fc58edb59aa48872c25658c9949836e9", "timestamp": "", "source": "github", "line_count": 283, "max_line_length": 122, "avg_line_length": 40.159010600706715, "alnum_prop": 0.7239771227452706, "repo_name": "kasperdokter/Reo-compiler", "id": "d008659fcc1b961beb0bfebd3a3bd2a621a15d13", "size": "15191", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "FreeRTOSv9.0.0/FreeRTOS/Demo/Common/Full/BlockQ.c", "mode": "33188", "license": "mit", "language": [ { "name": "ANTLR", "bytes": "9338" }, { "name": "Batchfile", "bytes": "6695" }, { "name": "HTML", "bytes": "26509" }, { "name": "Java", "bytes": "187975" }, { "name": "JavaScript", "bytes": "5701" }, { "name": "Makefile", "bytes": "6750" }, { "name": "Python", "bytes": "8451" }, { "name": "R", "bytes": "417" } ], "symlink_target": "" }
package com.redhat.ceylon.compiler.typechecker.model; import java.util.ArrayList; import java.util.List; public abstract class MethodOrValue extends TypedDeclaration { private boolean captured; private boolean shortcutRefinement; private Parameter initializerParameter; private List<Declaration> members = new ArrayList<Declaration>(3); private List<Annotation> annotations = new ArrayList<Annotation>(4); @Override public List<Annotation> getAnnotations() { return annotations; } @Override public List<Declaration> getMembers() { return members; } public void addMember(Declaration declaration) { members.add(declaration); } public boolean isShortcutRefinement() { return shortcutRefinement; } public void setShortcutRefinement(boolean shortcutRefinement) { this.shortcutRefinement = shortcutRefinement; } @Override public DeclarationKind getDeclarationKind() { return DeclarationKind.MEMBER; } public Parameter getInitializerParameter() { return initializerParameter; } public void setInitializerParameter(Parameter d) { initializerParameter = d; } @Override public boolean isParameter() { return initializerParameter!=null; } public boolean isTransient() { return true; } @Override public boolean isCaptured() { return captured; } public void setCaptured(boolean local) { this.captured = local; } }
{ "content_hash": "411c1a1febeed75f0c320ca507afa806", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 72, "avg_line_length": 23.671641791044777, "alnum_prop": 0.667717528373266, "repo_name": "lucaswerkmeister/ceylon-spec", "id": "725617fc6ff0982106edde05136d109858177d1f", "size": "1586", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/redhat/ceylon/compiler/typechecker/model/MethodOrValue.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "12593" }, { "name": "Ceylon", "bytes": "490885" }, { "name": "GAP", "bytes": "132175" }, { "name": "Java", "bytes": "1142924" }, { "name": "XSLT", "bytes": "1975926" } ], "symlink_target": "" }
FROM debian:jessie MAINTAINER snowdream <yanghui1986527@gmail.com> # Install RUN apt-get update -y && \ apt-get install -y --no-install-recommends expect && \ printf "\ set timeout -1\n\ spawn apt-get install -y --no-install-recommends keyboard-configuration\n\ expect {\n\ \"Keyboard layout: \" { send \"1\\\n\" }\n\ }\n\ expect eof\n\ " | expect RUN DEBIAN_FRONTEND=noninteractive && \ apt-key adv --recv-keys --keyserver keys.gnupg.net E1F958385BFE2B6E && \ echo "deb http://packages.x2go.org/debian jessie main" >> /etc/apt/sources.list.d/x2go.list && \ apt-get -qq update && \ apt-get -qqy install --no-install-recommends \ vim \ apt-utils \ pwgen \ task-mate-desktop \ x2goserver x2goserver-xsession x2gomatebindings \ openssh-server && \ mkdir -p /var/run/sshd && \ sed -i "s/UsePrivilegeSeparation.*/UsePrivilegeSeparation no/g" /etc/ssh/sshd_config && \ sed -i "s/UsePAM.*/UsePAM no/g" /etc/ssh/sshd_config && \ sed -i "s/PermitRootLogin.*/PermitRootLogin yes/g" /etc/ssh/sshd_config && \ sed -i "s/#PasswordAuthentication/PasswordAuthentication/g" /etc/ssh/sshd_config && \ sed -i 's/^mesg n$/tty -s \&\& mesg n/g' ~/.profile && \ mkdir -p /tmp/.X11-unix && chmod 1777 /tmp/.X11-unix && \ mkdir /var/run/dbus && \ apt-get clean && \ apt-get autoremove && \ rm -rf /var/lib/apt/lists/* ADD set_root_pw.sh /set_root_pw.sh ADD run.sh /run.sh RUN chmod +x /*.sh EXPOSE 22 CMD ["/run.sh"]
{ "content_hash": "f7538bbfc72d5a709469f44d173e14ec", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 100, "avg_line_length": 34.28260869565217, "alnum_prop": 0.6087507926442612, "repo_name": "snowdream/dockerfiles", "id": "48709a7771640a7b16212bd539d0a0c8efd0f3c9", "size": "1577", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "desktop/base/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "8041" }, { "name": "Shell", "bytes": "1141" } ], "symlink_target": "" }
const int32_t __EH_FRAME_LIST_END__[] __attribute__((section(".eh_frame"), aligned(sizeof(int32_t)), visibility("hidden"), used)) = {0}; #ifndef CRT_HAS_INITFINI_ARRAY typedef void (*fp)(void); fp __CTOR_LIST_END__[] __attribute__((section(".ctors"), visibility("hidden"), used)) = {0}; fp __DTOR_LIST_END__[] __attribute__((section(".dtors"), visibility("hidden"), used)) = {0}; #endif
{ "content_hash": "df762c343ae9117d1668a80a603fcad9", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 73, "avg_line_length": 38.09090909090909, "alnum_prop": 0.5847255369928401, "repo_name": "endlessm/chromium-browser", "id": "ebcc60b89a102543749274bf192276be88f966cd", "size": "890", "binary": false, "copies": "19", "ref": "refs/heads/master", "path": "third_party/llvm/compiler-rt/lib/crt/crtend.c", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package protopath import ( "strings" "google.golang.org/protobuf/proto" "google.golang.org/protobuf/reflect/protoreflect" ) // ToJSONPath converts the `path` corresponding to the `proto` in to the JSON // path, providing a cleaner way to return paths to users for debugging. Therefore, // errors are ignored and conversion is simply aborted early. func ToJSONPath(p proto.Message, path Path) string { if !isValidPath(path) { return "" } return strings.Join(toJSON(p.ProtoReflect().Descriptor(), path.parts), ".") } func toJSON(md protoreflect.MessageDescriptor, path []protoreflect.Name) []string { var jsonParts []string if len(path) == 0 { return jsonParts } name := path[0] path = path[1:] // Find the valid FieldDescriptor by name. fd := md.Fields().ByName(name) if fd == nil { od := md.Oneofs().ByName(name) // If this is a oneOf, we move one down into the oneOf as a oneOf doesn't // have a MessageDescriptor to pass on the next call. if od != nil && len(path) >= 1 { fd = od.Fields().ByName(path[0]) path = path[1:] } if fd == nil { return jsonParts } } // Extract the JSON representation of the FieldDescriptor. var jsonName string if fd.ContainingOneof() != nil { jsonName = "ofType(" + string(fd.Message().Name()) + ")" } else { jsonName = fd.JSONName() } if jsonName != "" && fd.Cardinality() == protoreflect.Repeated && len(path) >= 1 { jsonName += "[" + string(path[0]) + "]" path = path[1:] } if jsonName != "" { jsonParts = append(jsonParts, jsonName) } return append(jsonParts, toJSON(fd.Message(), path)...) }
{ "content_hash": "2dd48c6568f0b6d42c20c56bfe6d4ce2", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 83, "avg_line_length": 26.19672131147541, "alnum_prop": 0.6652065081351689, "repo_name": "google/fhir", "id": "9280a953e3f41d1a7bcc542038b2b057da53dfe6", "size": "2187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "go/jsonformat/internal/protopath/proto_path_to_json.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "5560" }, { "name": "C", "bytes": "37522" }, { "name": "C++", "bytes": "1376466" }, { "name": "Dockerfile", "bytes": "966" }, { "name": "Go", "bytes": "542973" }, { "name": "Java", "bytes": "929152" }, { "name": "Python", "bytes": "645991" }, { "name": "Shell", "bytes": "17013" }, { "name": "Starlark", "bytes": "308438" } ], "symlink_target": "" }
var express = require('express'); var uuid = require('uuid'); var basicAuth = require('basic-auth'); var Analytics = require('analytics-node'); var nuts = require('../'); var app = express(); var apiAuth = { username: process.env.API_USERNAME, password: process.env.API_PASSWORD }; var analytics = undefined; var downloadEvent = process.env.ANALYTICS_EVENT_DOWNLOAD || 'download'; if (process.env.ANALYTICS_TOKEN) { analytics = new Analytics(process.env.ANALYTICS_TOKEN); } var myNuts = nuts.Nuts({ repository: process.env.GITHUB_REPO, token: process.env.GITHUB_TOKEN, endpoint: process.env.GITHUB_ENDPOINT, username: process.env.GITHUB_USERNAME, password: process.env.GITHUB_PASSWORD, timeout: process.env.VERSIONS_TIMEOUT, cache: process.env.VERSIONS_CACHE, refreshSecret: process.env.GITHUB_SECRET, proxyAssets: !Boolean(process.env.DONT_PROXY_ASSETS) }); // Control access to API myNuts.before('api', function(access, next) { if (!apiAuth.username) return next(); function unauthorized() { next(new Error('Invalid username/password for API')); }; var user = basicAuth(access.req); if (!user || !user.name || !user.pass) { return unauthorized(); }; if (user.name === apiAuth.username && user.pass === apiAuth.password) { return next(); } else { return unauthorized(); }; }); // Log download myNuts.before('download', function(download, next) { console.log('download', download.platform.filename, "for version", download.version.tag, "on channel", download.version.channel, "for", download.platform.type); next(); }); myNuts.after('download', function(download, next) { console.log('downloaded', download.platform.filename, "for version", download.version.tag, "on channel", download.version.channel, "for", download.platform.type); // Track on segment if enabled if (analytics) { var userId = download.req.query.user; analytics.track({ event: downloadEvent, anonymousId: userId? null : uuid.v4(), userId: userId, properties: { version: download.version.tag, channel: download.version.channel, platform: download.platform.type, os: nuts.platforms.toType(download.platform.type) } }); } next(); }); if (process.env.TRUST_PROXY) { try { var trustProxyObject = JSON.parse(process.env.TRUST_PROXY); app.set('trust proxy', trustProxyObject); } catch (e) { app.set('trust proxy', process.env.TRUST_PROXY); } } app.use(myNuts.router); // Error handling app.use(function(req, res, next) { res.status(404).send("Page not found"); }); app.use(function(err, req, res, next) { var msg = err.message || err; var code = 500; console.error(err.stack || err); // Return error res.format({ 'text/plain': function(){ res.status(code).send(msg); }, 'text/html': function () { res.status(code).send(msg); }, 'application/json': function (){ res.status(code).send({ 'error': msg, 'code': code }); } }); }); myNuts.init() // Start the HTTP server .then(function() { var server = app.listen(process.env.PORT || 5000, function () { var host = server.address().address; var port = server.address().port; console.log('Listening at http://%s:%s', host, port); }); }, function(err) { console.log(err.stack || err); process.exit(1); });
{ "content_hash": "c4f301e6706402ee8a17b4999d06bc9a", "timestamp": "", "source": "github", "line_count": 133, "max_line_length": 166, "avg_line_length": 27.518796992481203, "alnum_prop": 0.6073770491803279, "repo_name": "GitbookIO/nuts", "id": "9e7c0aab7a9a3276fc33dea1b160db9a0105274e", "size": "3660", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bin/web.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "46819" } ], "symlink_target": "" }
<?php namespace Phalcon\DI { /** * Phalcon\DI\Injectable * * This class allows to access services in the services container by just only accessing a public property * with the same name of a registered service */ abstract class Injectable implements \Phalcon\DI\InjectionAwareInterface, \Phalcon\Events\EventsAwareInterface { protected $_dependencyInjector; protected $_eventsManager; /** * @var \Phalcon\Mvc\ViewInterface */ public $view; /** * @var \Phalcon\Mvc\RouterInterface */ public $router; /** * @var \Phalcon\Mvc\DispatcherInterface */ public $dispatcher; /** * @var \Phalcon\Mvc\UrlInterface */ public $url; /** * @var \Phalcon\DiInterface */ public $di; /** * @var \Phalcon\HTTP\RequestInterface */ public $request; /** * @var \Phalcon\HTTP\ResponseInterface */ public $response; /** * @var \Phalcon\Flash\Direct */ public $flash; /** * @var \Phalcon\Flash\Session */ public $flashSession; /** * @var \Phalcon\Session\AdapterInterface */ public $session; /** * @var \Phalcon\Session\Bag */ public $persistent; /** * @var \Phalcon\Mvc\Model\ManagerInterface */ public $modelsManager; /** * @var \Phalcon\Mvc\Model\MetadataInterface */ public $modelsMetadata; /** * @var \Phalcon\Mvc\Model\Transaction\Manager */ public $transactionManager; /** * @var \Phalcon\FilterInterface */ public $filter; /** * @var \Phalcon\Security */ public $security; /** * @var \Phalcon\Annotations\Adapter\Memory */ public $annotations; /** * Sets the dependency injector * * @param \Phalcon\DiInterface $dependencyInjector */ public function setDI($dependencyInjector) { if (!is_object($dependencyInjector)) { throw new \Phalcon\DI\Exception('Dependency Injector is invalid'); } $this->__dependencyInjector = $dependencyInjector; } /** * Returns the internal dependency injector * * @return \Phalcon\DiInterface */ public function getDI() { return $this->_dependencyInjector; } /** * Sets the event manager * * @param \Phalcon\Events\ManagerInterface $eventsManager */ public function setEventsManager($eventsManager) { if (!is_object($eventsManager)) { throw new \Phalcon\DI\Exception('Events manager is invalid'); } $this->_eventsManager = $eventsManager; } /** * Returns the internal event manager * * @return \Phalcon\Events\ManagerInterface */ public function getEventsManager() { return $this->_eventsManager; } /** * Magic method __get * * @param string $propertyName */ public function __get($propertyName) { $dependencyInjector = $this->_dependencyInjector; if (!is_object($dependencyInjector)) { $dependencyInjector = \Phalcon\DI::getDefault(); if (!is_object($dependencyInjector)) { throw new \Phalcon\DI\Exception('A dependency injection object is required to access the application services'); } } /** * This class injects a public property with a resolved service */ if ($dependencyInjector->has($propertyName)) { $service = $dependencyInjector->getShared($propertyName); $this->$propertyName = $service; return $service; } if ('di' == $propertyName) { $this->di = $dependencyInjector; return $dependencyInjector; } /** * Accessing the persistent property will create a session bag in any class */ if ('persistent' == $propertyName) { $className = get_class($this); $arguments = array($className); $persistent = $dependencyInjector->get('sessionBag', $arguments); $this->persistent = $persistent; return $persistent; } /** * A notice is shown if the property is not defined and isn't a valid service */ trigger_error('Access to undefined property "' . $propertyName . '"'); } } }
{ "content_hash": "3b9912d8ecd58224ef0b87873687de21", "timestamp": "", "source": "github", "line_count": 210, "max_line_length": 132, "avg_line_length": 21.49047619047619, "alnum_prop": 0.5617106137824064, "repo_name": "101010111100/phalconpi", "id": "5afaebff343a7c4159b4999f18514c64251c1a40", "size": "4513", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Phalcon/DI/Injectable.php", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package org.spongepowered.common.data.processor.data.entity; import org.spongepowered.api.data.DataTransactionResult; import org.spongepowered.api.data.key.Keys; import org.spongepowered.api.data.manipulator.immutable.block.ImmutableDirectionalData; import org.spongepowered.api.data.manipulator.mutable.block.DirectionalData; import org.spongepowered.api.data.value.ValueContainer; import org.spongepowered.api.data.value.immutable.ImmutableValue; import org.spongepowered.api.data.value.mutable.Value; import org.spongepowered.api.util.Direction; import org.spongepowered.common.data.manipulator.mutable.block.SpongeDirectionalData; import org.spongepowered.common.data.processor.common.AbstractSingleDataSingleTargetProcessor; import org.spongepowered.common.data.value.immutable.ImmutableSpongeValue; import org.spongepowered.common.data.value.mutable.SpongeValue; import org.spongepowered.common.interfaces.entity.IMixinEntityHanging; import java.util.Optional; public class HangingDataProcessor extends AbstractSingleDataSingleTargetProcessor<IMixinEntityHanging, Direction, Value<Direction>, DirectionalData, ImmutableDirectionalData> { public HangingDataProcessor() { super(Keys.DIRECTION, IMixinEntityHanging.class); } @Override protected DirectionalData createManipulator() { return new SpongeDirectionalData(); } @Override protected boolean set(IMixinEntityHanging dataHolder, Direction value) { dataHolder.setDirection(value); return true; } @Override protected Optional<Direction> getVal(IMixinEntityHanging dataHolder) { return Optional.of(dataHolder.getDirection()); } @Override protected ImmutableValue<Direction> constructImmutableValue(Direction value) { return ImmutableSpongeValue.cachedOf(this.key, Direction.NONE, value); } @Override protected Value<Direction> constructValue(Direction actualValue) { return new SpongeValue<>(this.key, Direction.NONE, actualValue); } @Override public DataTransactionResult removeFrom(ValueContainer<?> container) { return DataTransactionResult.failNoData(); } }
{ "content_hash": "42e510188e1db3d870dfcd13ff12a3c4", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 176, "avg_line_length": 38.14035087719298, "alnum_prop": 0.7893284268629255, "repo_name": "JBYoshi/SpongeCommon", "id": "6791b5cb9be0b0a452298c935b31cc79b655d4fe", "size": "3421", "binary": false, "copies": "3", "ref": "refs/heads/bleeding", "path": "src/main/java/org/spongepowered/common/data/processor/data/entity/HangingDataProcessor.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "10666082" }, { "name": "Shell", "bytes": "1072" } ], "symlink_target": "" }
using System; using System.Windows.Forms; namespace DwriteCairoDemo_DoubleBuffered { static class Program { /// <summary> /// The main entry point for the application. /// </summary> [STAThread] static void Main() { Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new Form1()); } } }
{ "content_hash": "84360151109573fa2d0ae0c4c66c06d4", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 65, "avg_line_length": 23.473684210526315, "alnum_prop": 0.5852017937219731, "repo_name": "zwcloud/ZWCloud.DwriteCairo", "id": "a1c20717ef8c69b84f8f7c61b29be95fdc02091e", "size": "448", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DwriteCairoDemo_DoubleBuffered/Program.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "120869" }, { "name": "C#", "bytes": "84386" }, { "name": "C++", "bytes": "11393" } ], "symlink_target": "" }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.form.engine.impl.el; import org.flowable.engine.common.impl.javax.el.ELContext; import org.flowable.engine.common.impl.javax.el.ELResolver; import org.flowable.engine.common.impl.javax.el.FunctionMapper; import org.flowable.engine.common.impl.javax.el.VariableMapper; /** * @author Joram Barrez * @author Tijs Rademakers */ public class FlowableFormElContext extends ELContext { protected ELResolver elResolver; public FlowableFormElContext(ELResolver elResolver) { this.elResolver = elResolver; } public ELResolver getELResolver() { return elResolver; } public FunctionMapper getFunctionMapper() { return new FlowableFormFunctionMapper(); } public VariableMapper getVariableMapper() { return null; } }
{ "content_hash": "07efa559345fe790f150140a63460a1b", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 75, "avg_line_length": 31.72093023255814, "alnum_prop": 0.7375366568914956, "repo_name": "robsoncardosoti/flowable-engine", "id": "53eec3b8aac0f3fc1d5cdf6b43da5775498b9d81", "size": "1364", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/flowable-form-engine/src/main/java/org/flowable/form/engine/impl/el/FlowableFormElContext.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "96556" }, { "name": "Batchfile", "bytes": "166" }, { "name": "CSS", "bytes": "704172" }, { "name": "Groovy", "bytes": "476" }, { "name": "HTML", "bytes": "921680" }, { "name": "Java", "bytes": "25074158" }, { "name": "JavaScript", "bytes": "12894604" }, { "name": "PLSQL", "bytes": "1426" }, { "name": "PLpgSQL", "bytes": "5155" }, { "name": "Shell", "bytes": "19565" } ], "symlink_target": "" }
var _gaq = _gaq || []; _gaq.push(['_setAccount', 'UA-55159746-3']); _gaq.push(['_trackPageview']); (function() { var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true; ga.src = 'https://ssl.google-analytics.com/ga.js'; var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s); })();
{ "content_hash": "f8f4968c5b04bc92abc451add4e453fd", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 53, "avg_line_length": 32.27272727272727, "alnum_prop": 0.6422535211267606, "repo_name": "sagargarg/counter", "id": "71f06376a915f8439b2abe5aff3bd89785ca764e", "size": "355", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "popup.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1249" }, { "name": "HTML", "bytes": "526" }, { "name": "JavaScript", "bytes": "2111" } ], "symlink_target": "" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "net/socket/transport_client_socket_pool.h" #include <algorithm> #include <utility> #include "base/bind.h" #include "base/check_op.h" #include "base/compiler_specific.h" #include "base/format_macros.h" #include "base/location.h" #include "base/memory/ptr_util.h" #include "base/metrics/histogram_macros.h" #include "base/notreached.h" #include "base/single_thread_task_runner.h" #include "base/strings/string_util.h" #include "base/strings/stringprintf.h" #include "base/threading/thread_task_runner_handle.h" #include "base/time/time.h" #include "base/trace_event/memory_allocator_dump.h" #include "base/trace_event/process_memory_dump.h" #include "base/values.h" #include "net/base/net_errors.h" #include "net/base/proxy_server.h" #include "net/log/net_log.h" #include "net/log/net_log_event_type.h" #include "net/log/net_log_source.h" #include "net/traffic_annotation/network_traffic_annotation.h" using base::TimeDelta; namespace net { namespace { // Indicate whether or not we should establish a new transport layer connection // after a certain timeout has passed without receiving an ACK. bool g_connect_backup_jobs_enabled = true; base::Value NetLogCreateConnectJobParams( bool backup_job, const ClientSocketPool::GroupId* group_id) { base::Value dict(base::Value::Type::DICTIONARY); dict.SetBoolKey("backup_job", backup_job); dict.SetStringKey("group_id", group_id->ToString()); return dict; } } // namespace const char TransportClientSocketPool::kCertDatabaseChanged[] = "Cert database changed"; const char TransportClientSocketPool::kClosedConnectionReturnedToPool[] = "Connection was closed when it was returned to the pool"; const char TransportClientSocketPool::kDataReceivedUnexpectedly[] = "Data received unexpectedly"; const char TransportClientSocketPool::kIdleTimeLimitExpired[] = "Idle time limit expired"; const char TransportClientSocketPool::kNetworkChanged[] = "Network changed"; const char TransportClientSocketPool::kRemoteSideClosedConnection[] = "Remote side closed connection"; const char TransportClientSocketPool::kSocketGenerationOutOfDate[] = "Socket generation out of date"; const char TransportClientSocketPool::kSocketPoolDestroyed[] = "Socket pool destroyed"; const char TransportClientSocketPool::kSslConfigChanged[] = "SSL configuration changed"; // ConnectJobFactory implementation that creates the standard ConnectJob // classes, using SocketParams. class TransportClientSocketPool::ConnectJobFactoryImpl : public TransportClientSocketPool::ConnectJobFactory { public: ConnectJobFactoryImpl(const ProxyServer& proxy_server, bool is_for_websockets, const CommonConnectJobParams* common_connect_job_params) : proxy_server_(proxy_server), is_for_websockets_(is_for_websockets), common_connect_job_params_(common_connect_job_params) { // This class should not be used with WebSockets. Note that // |common_connect_job_params| may be nullptr in tests. DCHECK(!common_connect_job_params || !common_connect_job_params->websocket_endpoint_lock_manager); } ~ConnectJobFactoryImpl() override = default; // TransportClientSocketPool::ConnectJobFactory methods. std::unique_ptr<ConnectJob> NewConnectJob( ClientSocketPool::GroupId group_id, scoped_refptr<ClientSocketPool::SocketParams> socket_params, const base::Optional<NetworkTrafficAnnotationTag>& proxy_annotation_tag, RequestPriority request_priority, SocketTag socket_tag, ConnectJob::Delegate* delegate) const override { return CreateConnectJob(group_id, socket_params, proxy_server_, proxy_annotation_tag, is_for_websockets_, common_connect_job_params_, request_priority, socket_tag, delegate); } private: const ProxyServer proxy_server_; const bool is_for_websockets_; const CommonConnectJobParams* common_connect_job_params_; DISALLOW_COPY_AND_ASSIGN(ConnectJobFactoryImpl); }; TransportClientSocketPool::Request::Request( ClientSocketHandle* handle, CompletionOnceCallback callback, const ProxyAuthCallback& proxy_auth_callback, RequestPriority priority, const SocketTag& socket_tag, RespectLimits respect_limits, Flags flags, scoped_refptr<SocketParams> socket_params, const base::Optional<NetworkTrafficAnnotationTag>& proxy_annotation_tag, const NetLogWithSource& net_log) : handle_(handle), callback_(std::move(callback)), proxy_auth_callback_(proxy_auth_callback), priority_(priority), respect_limits_(respect_limits), flags_(flags), socket_params_(std::move(socket_params)), proxy_annotation_tag_(proxy_annotation_tag), net_log_(net_log), socket_tag_(socket_tag), job_(nullptr) { if (respect_limits_ == ClientSocketPool::RespectLimits::DISABLED) DCHECK_EQ(priority_, MAXIMUM_PRIORITY); } TransportClientSocketPool::Request::~Request() {} void TransportClientSocketPool::Request::AssignJob(ConnectJob* job) { DCHECK(job); DCHECK(!job_); job_ = job; if (job_->priority() != priority_) job_->ChangePriority(priority_); } ConnectJob* TransportClientSocketPool::Request::ReleaseJob() { DCHECK(job_); ConnectJob* job = job_; job_ = nullptr; return job; } TransportClientSocketPool::TransportClientSocketPool( int max_sockets, int max_sockets_per_group, base::TimeDelta unused_idle_socket_timeout, const ProxyServer& proxy_server, bool is_for_websockets, const CommonConnectJobParams* common_connect_job_params) : TransportClientSocketPool( max_sockets, max_sockets_per_group, unused_idle_socket_timeout, ClientSocketPool::used_idle_socket_timeout(), proxy_server, std::make_unique<ConnectJobFactoryImpl>(proxy_server, is_for_websockets, common_connect_job_params), common_connect_job_params->ssl_client_context, true /* connect_backup_jobs_enabled */) {} TransportClientSocketPool::~TransportClientSocketPool() { // Clean up any idle sockets and pending connect jobs. Assert that we have no // remaining active sockets or pending requests. They should have all been // cleaned up prior to |this| being destroyed. FlushWithError(ERR_ABORTED, kSocketPoolDestroyed); DCHECK(group_map_.empty()); DCHECK(pending_callback_map_.empty()); DCHECK_EQ(0, connecting_socket_count_); DCHECK_EQ(0, handed_out_socket_count_); CHECK(higher_pools_.empty()); if (ssl_client_context_) ssl_client_context_->RemoveObserver(this); NetworkChangeNotifier::RemoveIPAddressObserver(this); } std::unique_ptr<TransportClientSocketPool> TransportClientSocketPool::CreateForTesting( int max_sockets, int max_sockets_per_group, base::TimeDelta unused_idle_socket_timeout, base::TimeDelta used_idle_socket_timeout, const ProxyServer& proxy_server, std::unique_ptr<ConnectJobFactory> connect_job_factory, SSLClientContext* ssl_client_context, bool connect_backup_jobs_enabled) { return base::WrapUnique<TransportClientSocketPool>( new TransportClientSocketPool( max_sockets, max_sockets_per_group, unused_idle_socket_timeout, used_idle_socket_timeout, proxy_server, std::move(connect_job_factory), ssl_client_context, connect_backup_jobs_enabled)); } TransportClientSocketPool::CallbackResultPair::CallbackResultPair() : result(OK) {} TransportClientSocketPool::CallbackResultPair::CallbackResultPair( CompletionOnceCallback callback_in, int result_in) : callback(std::move(callback_in)), result(result_in) {} TransportClientSocketPool::CallbackResultPair::CallbackResultPair( TransportClientSocketPool::CallbackResultPair&& other) = default; TransportClientSocketPool::CallbackResultPair& TransportClientSocketPool::CallbackResultPair::operator=( TransportClientSocketPool::CallbackResultPair&& other) = default; TransportClientSocketPool::CallbackResultPair::~CallbackResultPair() = default; bool TransportClientSocketPool::IsStalled() const { // If fewer than |max_sockets_| are in use, then clearly |this| is not // stalled. if ((handed_out_socket_count_ + connecting_socket_count_) < max_sockets_) return false; // So in order to be stalled, |this| must be using at least |max_sockets_| AND // |this| must have a request that is actually stalled on the global socket // limit. To find such a request, look for a group that has more requests // than jobs AND where the number of sockets is less than // |max_sockets_per_group_|. (If the number of sockets is equal to // |max_sockets_per_group_|, then the request is stalled on the group limit, // which does not count.) for (auto it = group_map_.begin(); it != group_map_.end(); ++it) { if (it->second->CanUseAdditionalSocketSlot(max_sockets_per_group_)) return true; } return false; } void TransportClientSocketPool::AddHigherLayeredPool( HigherLayeredPool* higher_pool) { CHECK(higher_pool); CHECK(!base::Contains(higher_pools_, higher_pool)); higher_pools_.insert(higher_pool); } void TransportClientSocketPool::RemoveHigherLayeredPool( HigherLayeredPool* higher_pool) { CHECK(higher_pool); CHECK(base::Contains(higher_pools_, higher_pool)); higher_pools_.erase(higher_pool); } int TransportClientSocketPool::RequestSocket( const GroupId& group_id, scoped_refptr<SocketParams> params, const base::Optional<NetworkTrafficAnnotationTag>& proxy_annotation_tag, RequestPriority priority, const SocketTag& socket_tag, RespectLimits respect_limits, ClientSocketHandle* handle, CompletionOnceCallback callback, const ProxyAuthCallback& proxy_auth_callback, const NetLogWithSource& net_log) { CHECK(callback); CHECK(handle); NetLogTcpClientSocketPoolRequestedSocket(net_log, group_id); std::unique_ptr<Request> request = std::make_unique<Request>( handle, std::move(callback), proxy_auth_callback, priority, socket_tag, respect_limits, NORMAL, std::move(params), proxy_annotation_tag, net_log); // Cleanup any timed-out idle sockets. CleanupIdleSockets(false, nullptr /* net_log_reason_utf8 */); request->net_log().BeginEvent(NetLogEventType::SOCKET_POOL); int rv = RequestSocketInternal(group_id, *request); if (rv != ERR_IO_PENDING) { if (rv == OK) { request->handle()->socket()->ApplySocketTag(request->socket_tag()); } request->net_log().EndEventWithNetErrorCode(NetLogEventType::SOCKET_POOL, rv); CHECK(!request->handle()->is_initialized()); request.reset(); } else { Group* group = GetOrCreateGroup(group_id); group->InsertUnboundRequest(std::move(request)); // Have to do this asynchronously, as closing sockets in higher level pools // call back in to |this|, which will cause all sorts of fun and exciting // re-entrancy issues if the socket pool is doing something else at the // time. if (group->CanUseAdditionalSocketSlot(max_sockets_per_group_)) { base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce( &TransportClientSocketPool::TryToCloseSocketsInLayeredPools, weak_factory_.GetWeakPtr())); } } return rv; } void TransportClientSocketPool::RequestSockets( const GroupId& group_id, scoped_refptr<SocketParams> params, const base::Optional<NetworkTrafficAnnotationTag>& proxy_annotation_tag, int num_sockets, const NetLogWithSource& net_log) { if (net_log.IsCapturing()) { // TODO(eroman): Split out the host and port parameters. net_log.AddEvent(NetLogEventType::TCP_CLIENT_SOCKET_POOL_REQUESTED_SOCKETS, [&] { return NetLogGroupIdParams(group_id); }); } Request request(nullptr /* no handle */, CompletionOnceCallback(), ProxyAuthCallback(), IDLE, SocketTag(), RespectLimits::ENABLED, NO_IDLE_SOCKETS, std::move(params), proxy_annotation_tag, net_log); // Cleanup any timed-out idle sockets. CleanupIdleSockets(false, nullptr /* net_log_reason_utf8 */); if (num_sockets > max_sockets_per_group_) { num_sockets = max_sockets_per_group_; } request.net_log().BeginEventWithIntParams( NetLogEventType::SOCKET_POOL_CONNECTING_N_SOCKETS, "num_sockets", num_sockets); Group* group = GetOrCreateGroup(group_id); // RequestSocketsInternal() may delete the group. bool deleted_group = false; int rv = OK; for (int num_iterations_left = num_sockets; group->NumActiveSocketSlots() < num_sockets && num_iterations_left > 0; num_iterations_left--) { rv = RequestSocketInternal(group_id, request); if (rv < 0 && rv != ERR_IO_PENDING) { // We're encountering a synchronous error. Give up. if (!base::Contains(group_map_, group_id)) deleted_group = true; break; } if (!base::Contains(group_map_, group_id)) { // Unexpected. The group should only be getting deleted on synchronous // error. NOTREACHED(); deleted_group = true; break; } } if (!deleted_group && group->IsEmpty()) RemoveGroup(group_id); if (rv == ERR_IO_PENDING) rv = OK; request.net_log().EndEventWithNetErrorCode( NetLogEventType::SOCKET_POOL_CONNECTING_N_SOCKETS, rv); } int TransportClientSocketPool::RequestSocketInternal(const GroupId& group_id, const Request& request) { ClientSocketHandle* const handle = request.handle(); const bool preconnecting = !handle; Group* group = nullptr; auto group_it = group_map_.find(group_id); if (group_it != group_map_.end()) { group = group_it->second; if (!(request.flags() & NO_IDLE_SOCKETS)) { // Try to reuse a socket. if (AssignIdleSocketToRequest(request, group)) return OK; } // If there are more ConnectJobs than pending requests, don't need to do // anything. Can just wait for the extra job to connect, and then assign it // to the request. if (!preconnecting && group->TryToUseNeverAssignedConnectJob()) return ERR_IO_PENDING; // Can we make another active socket now? if (!group->HasAvailableSocketSlot(max_sockets_per_group_) && request.respect_limits() == RespectLimits::ENABLED) { // TODO(willchan): Consider whether or not we need to close a socket in a // higher layered group. I don't think this makes sense since we would // just reuse that socket then if we needed one and wouldn't make it down // to this layer. request.net_log().AddEvent( NetLogEventType::SOCKET_POOL_STALLED_MAX_SOCKETS_PER_GROUP); return ERR_IO_PENDING; } } if (ReachedMaxSocketsLimit() && request.respect_limits() == RespectLimits::ENABLED) { // NOTE(mmenke): Wonder if we really need different code for each case // here. Only reason for them now seems to be preconnects. if (idle_socket_count_ > 0) { // There's an idle socket in this pool. Either that's because there's // still one in this group, but we got here due to preconnecting // bypassing idle sockets, or because there's an idle socket in another // group. bool closed = CloseOneIdleSocketExceptInGroup(group); if (preconnecting && !closed) return ERR_PRECONNECT_MAX_SOCKET_LIMIT; } else { // We could check if we really have a stalled group here, but it // requires a scan of all groups, so just flip a flag here, and do the // check later. request.net_log().AddEvent( NetLogEventType::SOCKET_POOL_STALLED_MAX_SOCKETS); return ERR_IO_PENDING; } } // We couldn't find a socket to reuse, and there's space to allocate one, // so allocate and connect a new one. group = GetOrCreateGroup(group_id); connecting_socket_count_++; std::unique_ptr<ConnectJob> owned_connect_job( connect_job_factory_->NewConnectJob( group_id, request.socket_params(), request.proxy_annotation_tag(), request.priority(), request.socket_tag(), group)); owned_connect_job->net_log().AddEvent( NetLogEventType::SOCKET_POOL_CONNECT_JOB_CREATED, [&] { return NetLogCreateConnectJobParams(false /* backup_job */, &group_id); }); ConnectJob* connect_job = owned_connect_job.get(); bool was_group_empty = group->IsEmpty(); // Need to add the ConnectJob to the group before connecting, to ensure // |group| is not empty. Otherwise, if the ConnectJob calls back into the // socket pool with a new socket request (Like for DNS over HTTPS), the pool // would then notice the group is empty, and delete it. That would result in a // UAF when group is referenced later in this function. group->AddJob(std::move(owned_connect_job), preconnecting); int rv = connect_job->Connect(); if (rv == ERR_IO_PENDING) { // If we didn't have any sockets in this group, set a timer for potentially // creating a new one. If the SYN is lost, this backup socket may complete // before the slow socket, improving end user latency. if (connect_backup_jobs_enabled_ && was_group_empty) group->StartBackupJobTimer(group_id); return rv; } LogBoundConnectJobToRequest(connect_job->net_log().source(), request); if (preconnecting) { if (rv == OK) AddIdleSocket(connect_job->PassSocket(), group); } else { DCHECK(handle); if (rv != OK) handle->SetAdditionalErrorState(connect_job); std::unique_ptr<StreamSocket> socket = connect_job->PassSocket(); if (socket) { HandOutSocket(std::move(socket), ClientSocketHandle::UNUSED, connect_job->connect_timing(), handle, base::TimeDelta() /* idle_time */, group, request.net_log()); } } RemoveConnectJob(connect_job, group); if (group->IsEmpty()) RemoveGroup(group_id); return rv; } bool TransportClientSocketPool::AssignIdleSocketToRequest( const Request& request, Group* group) { std::list<IdleSocket>* idle_sockets = group->mutable_idle_sockets(); auto idle_socket_it = idle_sockets->end(); // Iterate through the idle sockets forwards (oldest to newest) // * Delete any disconnected ones. // * If we find a used idle socket, assign to |idle_socket|. At the end, // the |idle_socket_it| will be set to the newest used idle socket. for (auto it = idle_sockets->begin(); it != idle_sockets->end();) { // Check whether socket is usable. Note that it's unlikely that the socket // is not usable because this function is always invoked after a // reusability check, but in theory socket can be closed asynchronously. const char* net_log_reason_utf8; if (!it->IsUsable(&net_log_reason_utf8)) { it->socket->NetLog().AddEventWithStringParams( NetLogEventType::SOCKET_POOL_CLOSING_SOCKET, "reason", net_log_reason_utf8); DecrementIdleCount(); delete it->socket; it = idle_sockets->erase(it); continue; } if (it->socket->WasEverUsed()) { // We found one we can reuse! idle_socket_it = it; } ++it; } // If we haven't found an idle socket, that means there are no used idle // sockets. Pick the oldest (first) idle socket (FIFO). if (idle_socket_it == idle_sockets->end() && !idle_sockets->empty()) idle_socket_it = idle_sockets->begin(); if (idle_socket_it != idle_sockets->end()) { DecrementIdleCount(); base::TimeDelta idle_time = base::TimeTicks::Now() - idle_socket_it->start_time; IdleSocket idle_socket = *idle_socket_it; idle_sockets->erase(idle_socket_it); // TODO(davidben): If |idle_time| is under some low watermark, consider // treating as UNUSED rather than UNUSED_IDLE. This will avoid // HttpNetworkTransaction retrying on some errors. ClientSocketHandle::SocketReuseType reuse_type = idle_socket.socket->WasEverUsed() ? ClientSocketHandle::REUSED_IDLE : ClientSocketHandle::UNUSED_IDLE; // If this socket took multiple attempts to obtain, don't report those // every time it's reused, just to the first user. if (idle_socket.socket->WasEverUsed()) idle_socket.socket->ClearConnectionAttempts(); HandOutSocket(std::unique_ptr<StreamSocket>(idle_socket.socket), reuse_type, LoadTimingInfo::ConnectTiming(), request.handle(), idle_time, group, request.net_log()); return true; } return false; } // static void TransportClientSocketPool::LogBoundConnectJobToRequest( const NetLogSource& connect_job_source, const Request& request) { request.net_log().AddEventReferencingSource( NetLogEventType::SOCKET_POOL_BOUND_TO_CONNECT_JOB, connect_job_source); } void TransportClientSocketPool::SetPriority(const GroupId& group_id, ClientSocketHandle* handle, RequestPriority priority) { auto group_it = group_map_.find(group_id); if (group_it == group_map_.end()) { DCHECK(base::Contains(pending_callback_map_, handle)); // The Request has already completed and been destroyed; nothing to // reprioritize. return; } group_it->second->SetPriority(handle, priority); } void TransportClientSocketPool::CancelRequest(const GroupId& group_id, ClientSocketHandle* handle, bool cancel_connect_job) { auto callback_it = pending_callback_map_.find(handle); if (callback_it != pending_callback_map_.end()) { int result = callback_it->second.result; pending_callback_map_.erase(callback_it); std::unique_ptr<StreamSocket> socket = handle->PassSocket(); if (socket) { if (result != OK) { socket->Disconnect(); } else if (cancel_connect_job) { // Close the socket if |cancel_connect_job| is true and there are no // other pending requests. Group* group = GetOrCreateGroup(group_id); if (group->unbound_request_count() == 0) socket->Disconnect(); } ReleaseSocket(handle->group_id(), std::move(socket), handle->group_generation()); } return; } CHECK(base::Contains(group_map_, group_id)); Group* group = GetOrCreateGroup(group_id); std::unique_ptr<Request> request = group->FindAndRemoveBoundRequest(handle); if (request) { --connecting_socket_count_; OnAvailableSocketSlot(group_id, group); CheckForStalledSocketGroups(); return; } // Search |unbound_requests_| for matching handle. request = group->FindAndRemoveUnboundRequest(handle); if (request) { request->net_log().AddEvent(NetLogEventType::CANCELLED); request->net_log().EndEvent(NetLogEventType::SOCKET_POOL); // Let the job run, unless |cancel_connect_job| is true, or we're at the // socket limit and there are no other requests waiting on the job. bool reached_limit = ReachedMaxSocketsLimit(); if (group->jobs().size() > group->unbound_request_count() && (cancel_connect_job || reached_limit)) { RemoveConnectJob(group->jobs().begin()->get(), group); if (group->IsEmpty()) RemoveGroup(group->group_id()); if (reached_limit) CheckForStalledSocketGroups(); } } } void TransportClientSocketPool::CloseIdleSockets( const char* net_log_reason_utf8) { CleanupIdleSockets(true, net_log_reason_utf8); DCHECK_EQ(0, idle_socket_count_); } void TransportClientSocketPool::CloseIdleSocketsInGroup( const GroupId& group_id, const char* net_log_reason_utf8) { if (idle_socket_count_ == 0) return; auto it = group_map_.find(group_id); if (it == group_map_.end()) return; CleanupIdleSocketsInGroup(true, it->second, base::TimeTicks::Now(), net_log_reason_utf8); if (it->second->IsEmpty()) RemoveGroup(it); } int TransportClientSocketPool::IdleSocketCount() const { return idle_socket_count_; } size_t TransportClientSocketPool::IdleSocketCountInGroup( const GroupId& group_id) const { auto i = group_map_.find(group_id); CHECK(i != group_map_.end()); return i->second->idle_sockets().size(); } LoadState TransportClientSocketPool::GetLoadState( const GroupId& group_id, const ClientSocketHandle* handle) const { if (base::Contains(pending_callback_map_, handle)) return LOAD_STATE_CONNECTING; auto group_it = group_map_.find(group_id); if (group_it == group_map_.end()) { // TODO(mmenke): This is actually reached in the wild, for unknown reasons. // Would be great to understand why, and if it's a bug, fix it. If not, // should have a test for that case. NOTREACHED(); return LOAD_STATE_IDLE; } const Group& group = *group_it->second; ConnectJob* job = group.GetConnectJobForHandle(handle); if (job) return job->GetLoadState(); if (group.CanUseAdditionalSocketSlot(max_sockets_per_group_)) return LOAD_STATE_WAITING_FOR_STALLED_SOCKET_POOL; return LOAD_STATE_WAITING_FOR_AVAILABLE_SOCKET; } base::Value TransportClientSocketPool::GetInfoAsValue( const std::string& name, const std::string& type) const { // TODO(mmenke): This currently doesn't return bound Requests or ConnectJobs. base::Value dict(base::Value::Type::DICTIONARY); dict.SetStringKey("name", name); dict.SetStringKey("type", type); dict.SetIntKey("handed_out_socket_count", handed_out_socket_count_); dict.SetIntKey("connecting_socket_count", connecting_socket_count_); dict.SetIntKey("idle_socket_count", idle_socket_count_); dict.SetIntKey("max_socket_count", max_sockets_); dict.SetIntKey("max_sockets_per_group", max_sockets_per_group_); if (group_map_.empty()) return dict; base::Value all_groups_dict(base::Value::Type::DICTIONARY); for (const auto& entry : group_map_) { const Group* group = entry.second; base::Value group_dict(base::Value::Type::DICTIONARY); group_dict.SetIntKey("pending_request_count", group->unbound_request_count()); if (group->has_unbound_requests()) { group_dict.SetStringKey( "top_pending_priority", RequestPriorityToString(group->TopPendingPriority())); } group_dict.SetIntKey("active_socket_count", group->active_socket_count()); std::vector<base::Value> idle_socket_list; for (const auto& idle_socket : group->idle_sockets()) { int source_id = idle_socket.socket->NetLog().source().id; idle_socket_list.push_back(base::Value(source_id)); } group_dict.SetKey("idle_sockets", base::Value(std::move(idle_socket_list))); std::vector<base::Value> connect_jobs_list; for (const auto& job : group->jobs()) { int source_id = job->net_log().source().id; connect_jobs_list.push_back(base::Value(source_id)); } group_dict.SetKey("connect_jobs", base::Value(std::move(connect_jobs_list))); group_dict.SetBoolKey("is_stalled", group->CanUseAdditionalSocketSlot( max_sockets_per_group_)); group_dict.SetBoolKey("backup_job_timer_is_running", group->BackupJobTimerIsRunning()); all_groups_dict.SetKey(entry.first.ToString(), std::move(group_dict)); } dict.SetKey("groups", std::move(all_groups_dict)); return dict; } void TransportClientSocketPool::DumpMemoryStats( base::trace_event::ProcessMemoryDump* pmd, const std::string& parent_dump_absolute_name) const { size_t socket_count = 0; size_t total_size = 0; size_t buffer_size = 0; size_t cert_count = 0; size_t cert_size = 0; for (const auto& kv : group_map_) { for (const auto& socket : kv.second->idle_sockets()) { StreamSocket::SocketMemoryStats stats; socket.socket->DumpMemoryStats(&stats); total_size += stats.total_size; buffer_size += stats.buffer_size; cert_count += stats.cert_count; cert_size += stats.cert_size; ++socket_count; } } // Only create a MemoryAllocatorDump if there is at least one idle socket if (socket_count > 0) { base::trace_event::MemoryAllocatorDump* socket_pool_dump = pmd->CreateAllocatorDump(base::StringPrintf( "%s/socket_pool", parent_dump_absolute_name.c_str())); socket_pool_dump->AddScalar( base::trace_event::MemoryAllocatorDump::kNameSize, base::trace_event::MemoryAllocatorDump::kUnitsBytes, total_size); socket_pool_dump->AddScalar( base::trace_event::MemoryAllocatorDump::kNameObjectCount, base::trace_event::MemoryAllocatorDump::kUnitsObjects, socket_count); socket_pool_dump->AddScalar( "buffer_size", base::trace_event::MemoryAllocatorDump::kUnitsBytes, buffer_size); socket_pool_dump->AddScalar( "cert_count", base::trace_event::MemoryAllocatorDump::kUnitsObjects, cert_count); socket_pool_dump->AddScalar( "cert_size", base::trace_event::MemoryAllocatorDump::kUnitsBytes, cert_size); } } bool TransportClientSocketPool::IdleSocket::IsUsable( const char** net_log_reason_utf8) const { DCHECK(net_log_reason_utf8); if (socket->WasEverUsed()) { if (!socket->IsConnectedAndIdle()) { if (!socket->IsConnected()) { *net_log_reason_utf8 = kRemoteSideClosedConnection; } else { *net_log_reason_utf8 = kDataReceivedUnexpectedly; } return false; } return true; } if (!socket->IsConnected()) { *net_log_reason_utf8 = kRemoteSideClosedConnection; return false; } return true; } TransportClientSocketPool::TransportClientSocketPool( int max_sockets, int max_sockets_per_group, base::TimeDelta unused_idle_socket_timeout, base::TimeDelta used_idle_socket_timeout, const ProxyServer& proxy_server, std::unique_ptr<ConnectJobFactory> connect_job_factory, SSLClientContext* ssl_client_context, bool connect_backup_jobs_enabled) : idle_socket_count_(0), connecting_socket_count_(0), handed_out_socket_count_(0), max_sockets_(max_sockets), max_sockets_per_group_(max_sockets_per_group), unused_idle_socket_timeout_(unused_idle_socket_timeout), used_idle_socket_timeout_(used_idle_socket_timeout), proxy_server_(proxy_server), connect_job_factory_(std::move(connect_job_factory)), connect_backup_jobs_enabled_(connect_backup_jobs_enabled && g_connect_backup_jobs_enabled), ssl_client_context_(ssl_client_context) { DCHECK_LE(0, max_sockets_per_group); DCHECK_LE(max_sockets_per_group, max_sockets); NetworkChangeNotifier::AddIPAddressObserver(this); if (ssl_client_context_) ssl_client_context_->AddObserver(this); } void TransportClientSocketPool::OnSSLConfigChanged( bool is_cert_database_change) { // When the user changes the SSL config, flush all idle sockets so they won't // get re-used. if (is_cert_database_change) { FlushWithError(ERR_CERT_DATABASE_CHANGED, kCertDatabaseChanged); } else { FlushWithError(ERR_NETWORK_CHANGED, kNetworkChanged); } } void TransportClientSocketPool::OnSSLConfigForServerChanged( const HostPortPair& server) { // Current time value. Retrieving it once at the function start rather than // inside the inner loop, since it shouldn't change by any meaningful amount. // // TODO(davidben): This value is not actually needed because // CleanupIdleSocketsInGroup() is called with |force| = true. Tidy up // interfaces so the parameter is not necessary. base::TimeTicks now = base::TimeTicks::Now(); // If the proxy is |server| and uses SSL settings (HTTPS or QUIC), refresh // every group. bool proxy_matches = proxy_server_.is_http_like() && !proxy_server_.is_http() && proxy_server_.host_port_pair() == server; bool refreshed_any = false; for (auto it = group_map_.begin(); it != group_map_.end();) { auto to_refresh = it++; if (proxy_matches || (to_refresh->first.socket_type() == SocketType::kSsl && to_refresh->first.destination() == server)) { refreshed_any = true; // Note this call may destroy the group and invalidate |to_refresh|. RefreshGroup(to_refresh, now, kSslConfigChanged); } } if (refreshed_any) { // Check to see if any group can use the freed up socket slots. It would be // more efficient to give the slots to the refreshed groups, if the still // exists and need them, but this should be rare enough that it doesn't // matter. This will also make sure the slots are given to the group with // the highest priority request without an assigned ConnectJob. CheckForStalledSocketGroups(); } } bool TransportClientSocketPool::HasGroup(const GroupId& group_id) const { return base::Contains(group_map_, group_id); } void TransportClientSocketPool::CleanupIdleSockets( bool force, const char* net_log_reason_utf8) { if (idle_socket_count_ == 0) return; // Current time value. Retrieving it once at the function start rather than // inside the inner loop, since it shouldn't change by any meaningful amount. base::TimeTicks now = base::TimeTicks::Now(); for (auto i = group_map_.begin(); i != group_map_.end();) { Group* group = i->second; CleanupIdleSocketsInGroup(force, group, now, net_log_reason_utf8); // Delete group if no longer needed. if (group->IsEmpty()) { auto old = i++; RemoveGroup(old); } else { ++i; } } } bool TransportClientSocketPool::CloseOneIdleSocket() { if (idle_socket_count_ == 0) return false; return CloseOneIdleSocketExceptInGroup(nullptr); } bool TransportClientSocketPool::CloseOneIdleConnectionInHigherLayeredPool() { // This pool doesn't have any idle sockets. It's possible that a pool at a // higher layer is holding one of this sockets active, but it's actually idle. // Query the higher layers. for (auto it = higher_pools_.begin(); it != higher_pools_.end(); ++it) { if ((*it)->CloseOneIdleConnection()) return true; } return false; } void TransportClientSocketPool::CleanupIdleSocketsInGroup( bool force, Group* group, const base::TimeTicks& now, const char* net_log_reason_utf8) { // If |force| is true, a reason must be provided. DCHECK(!force || net_log_reason_utf8); auto idle_socket_it = group->mutable_idle_sockets()->begin(); while (idle_socket_it != group->idle_sockets().end()) { bool should_clean_up = force; const char* reason_for_closing_socket = net_log_reason_utf8; base::TimeDelta timeout = idle_socket_it->socket->WasEverUsed() ? used_idle_socket_timeout_ : unused_idle_socket_timeout_; // Timeout errors take precedence over the reason for flushing sockets in // the group, if applicable. if (now - idle_socket_it->start_time >= timeout) { should_clean_up = true; reason_for_closing_socket = kIdleTimeLimitExpired; } // Usability errors take precedence over over other errors. if (!idle_socket_it->IsUsable(&reason_for_closing_socket)) should_clean_up = true; if (should_clean_up) { DCHECK(reason_for_closing_socket); idle_socket_it->socket->NetLog().AddEventWithStringParams( NetLogEventType::SOCKET_POOL_CLOSING_SOCKET, "reason", reason_for_closing_socket); delete idle_socket_it->socket; idle_socket_it = group->mutable_idle_sockets()->erase(idle_socket_it); DecrementIdleCount(); } else { DCHECK(!reason_for_closing_socket); ++idle_socket_it; } } } TransportClientSocketPool::Group* TransportClientSocketPool::GetOrCreateGroup( const GroupId& group_id) { auto it = group_map_.find(group_id); if (it != group_map_.end()) return it->second; Group* group = new Group(group_id, this); group_map_[group_id] = group; return group; } void TransportClientSocketPool::RemoveGroup(const GroupId& group_id) { auto it = group_map_.find(group_id); CHECK(it != group_map_.end()); RemoveGroup(it); } void TransportClientSocketPool::RemoveGroup(GroupMap::iterator it) { delete it->second; group_map_.erase(it); } // static bool TransportClientSocketPool::connect_backup_jobs_enabled() { return g_connect_backup_jobs_enabled; } // static bool TransportClientSocketPool::set_connect_backup_jobs_enabled(bool enabled) { bool old_value = g_connect_backup_jobs_enabled; g_connect_backup_jobs_enabled = enabled; return old_value; } void TransportClientSocketPool::IncrementIdleCount() { ++idle_socket_count_; } void TransportClientSocketPool::DecrementIdleCount() { --idle_socket_count_; } void TransportClientSocketPool::ReleaseSocket( const GroupId& group_id, std::unique_ptr<StreamSocket> socket, int64_t group_generation) { auto i = group_map_.find(group_id); CHECK(i != group_map_.end()); Group* group = i->second; CHECK_GT(handed_out_socket_count_, 0); handed_out_socket_count_--; CHECK_GT(group->active_socket_count(), 0); group->DecrementActiveSocketCount(); bool can_resuse_socket = false; base::StringPiece not_reusable_reason; if (!socket->IsConnectedAndIdle()) { if (!socket->IsConnected()) { not_reusable_reason = kClosedConnectionReturnedToPool; } else { not_reusable_reason = kDataReceivedUnexpectedly; } } else if (group_generation != group->generation()) { not_reusable_reason = kSocketGenerationOutOfDate; } else { can_resuse_socket = true; } if (can_resuse_socket) { DCHECK(not_reusable_reason.empty()); // Add it to the idle list. AddIdleSocket(std::move(socket), group); OnAvailableSocketSlot(group_id, group); } else { DCHECK(!not_reusable_reason.empty()); socket->NetLog().AddEventWithStringParams( NetLogEventType::SOCKET_POOL_CLOSING_SOCKET, "reason", not_reusable_reason); if (group->IsEmpty()) RemoveGroup(i); socket.reset(); } CheckForStalledSocketGroups(); } void TransportClientSocketPool::CheckForStalledSocketGroups() { // Loop until there's nothing more to do. while (true) { // If we have idle sockets, see if we can give one to the top-stalled group. GroupId top_group_id; Group* top_group = nullptr; if (!FindTopStalledGroup(&top_group, &top_group_id)) return; if (ReachedMaxSocketsLimit()) { if (idle_socket_count_ > 0) { CloseOneIdleSocket(); } else { // We can't activate more sockets since we're already at our global // limit. return; } } // Note that this may delete top_group. OnAvailableSocketSlot(top_group_id, top_group); } } // Search for the highest priority pending request, amongst the groups that // are not at the |max_sockets_per_group_| limit. Note: for requests with // the same priority, the winner is based on group hash ordering (and not // insertion order). bool TransportClientSocketPool::FindTopStalledGroup(Group** group, GroupId* group_id) const { CHECK((group && group_id) || (!group && !group_id)); Group* top_group = nullptr; const GroupId* top_group_id = nullptr; bool has_stalled_group = false; for (auto i = group_map_.begin(); i != group_map_.end(); ++i) { Group* curr_group = i->second; if (!curr_group->has_unbound_requests()) continue; if (curr_group->CanUseAdditionalSocketSlot(max_sockets_per_group_)) { if (!group) return true; has_stalled_group = true; bool has_higher_priority = !top_group || curr_group->TopPendingPriority() > top_group->TopPendingPriority(); if (has_higher_priority) { top_group = curr_group; top_group_id = &i->first; } } } if (top_group) { CHECK(group); *group = top_group; *group_id = *top_group_id; } else { CHECK(!has_stalled_group); } return has_stalled_group; } void TransportClientSocketPool::OnIPAddressChanged() { FlushWithError(ERR_NETWORK_CHANGED, kNetworkChanged); } void TransportClientSocketPool::FlushWithError( int error, const char* net_log_reason_utf8) { CancelAllConnectJobs(); CloseIdleSockets(net_log_reason_utf8); CancelAllRequestsWithError(error); for (const auto& group : group_map_) { group.second->IncrementGeneration(); } } void TransportClientSocketPool::RemoveConnectJob(ConnectJob* job, Group* group) { CHECK_GT(connecting_socket_count_, 0); connecting_socket_count_--; DCHECK(group); group->RemoveUnboundJob(job); } void TransportClientSocketPool::OnAvailableSocketSlot(const GroupId& group_id, Group* group) { DCHECK(base::Contains(group_map_, group_id)); if (group->IsEmpty()) { RemoveGroup(group_id); } else if (group->has_unbound_requests()) { ProcessPendingRequest(group_id, group); } } void TransportClientSocketPool::ProcessPendingRequest(const GroupId& group_id, Group* group) { const Request* next_request = group->GetNextUnboundRequest(); DCHECK(next_request); // If the group has no idle sockets, and can't make use of an additional slot, // either because it's at the limit or because it's at the socket per group // limit, then there's nothing to do. if (group->idle_sockets().empty() && !group->CanUseAdditionalSocketSlot(max_sockets_per_group_)) { return; } int rv = RequestSocketInternal(group_id, *next_request); if (rv != ERR_IO_PENDING) { std::unique_ptr<Request> request = group->PopNextUnboundRequest(); DCHECK(request); if (group->IsEmpty()) RemoveGroup(group_id); request->net_log().EndEventWithNetErrorCode(NetLogEventType::SOCKET_POOL, rv); InvokeUserCallbackLater(request->handle(), request->release_callback(), rv, request->socket_tag()); } } void TransportClientSocketPool::HandOutSocket( std::unique_ptr<StreamSocket> socket, ClientSocketHandle::SocketReuseType reuse_type, const LoadTimingInfo::ConnectTiming& connect_timing, ClientSocketHandle* handle, base::TimeDelta idle_time, Group* group, const NetLogWithSource& net_log) { DCHECK(socket); handle->SetSocket(std::move(socket)); handle->set_reuse_type(reuse_type); handle->set_idle_time(idle_time); handle->set_group_generation(group->generation()); handle->set_connect_timing(connect_timing); if (reuse_type == ClientSocketHandle::REUSED_IDLE) { net_log.AddEventWithIntParams( NetLogEventType::SOCKET_POOL_REUSED_AN_EXISTING_SOCKET, "idle_ms", static_cast<int>(idle_time.InMilliseconds())); } if (reuse_type != ClientSocketHandle::UNUSED) { // The socket being handed out is no longer considered idle, but was // considered idle until just before this method was called. UMA_HISTOGRAM_CUSTOM_COUNTS("Net.Socket.NumIdleSockets", idle_socket_count_ + 1, 1, 256, 50); } net_log.AddEventReferencingSource( NetLogEventType::SOCKET_POOL_BOUND_TO_SOCKET, handle->socket()->NetLog().source()); handed_out_socket_count_++; group->IncrementActiveSocketCount(); } void TransportClientSocketPool::AddIdleSocket( std::unique_ptr<StreamSocket> socket, Group* group) { DCHECK(socket); IdleSocket idle_socket; idle_socket.socket = socket.release(); idle_socket.start_time = base::TimeTicks::Now(); group->mutable_idle_sockets()->push_back(idle_socket); IncrementIdleCount(); } void TransportClientSocketPool::CancelAllConnectJobs() { for (auto i = group_map_.begin(); i != group_map_.end();) { Group* group = i->second; connecting_socket_count_ -= group->jobs().size(); group->RemoveAllUnboundJobs(); // Delete group if no longer needed. if (group->IsEmpty()) { auto old = i++; RemoveGroup(old); } else { ++i; } } } void TransportClientSocketPool::CancelAllRequestsWithError(int error) { for (auto i = group_map_.begin(); i != group_map_.end();) { Group* group = i->second; while (true) { std::unique_ptr<Request> request = group->PopNextUnboundRequest(); if (!request) break; InvokeUserCallbackLater(request->handle(), request->release_callback(), error, request->socket_tag()); } // Mark bound connect jobs as needing to fail. Can't fail them immediately // because they may have access to objects owned by the ConnectJob, and // could access them if a user callback invocation is queued. It would also // result in the consumer handling two messages at once, which in general // isn't safe for a lot of code. group->SetPendingErrorForAllBoundRequests(error); // Delete group if no longer needed. if (group->IsEmpty()) { auto old = i++; RemoveGroup(old); } else { ++i; } } } bool TransportClientSocketPool::ReachedMaxSocketsLimit() const { // Each connecting socket will eventually connect and be handed out. int total = handed_out_socket_count_ + connecting_socket_count_ + idle_socket_count_; // There can be more sockets than the limit since some requests can ignore // the limit if (total < max_sockets_) return false; return true; } bool TransportClientSocketPool::CloseOneIdleSocketExceptInGroup( const Group* exception_group) { CHECK_GT(idle_socket_count_, 0); for (auto i = group_map_.begin(); i != group_map_.end(); ++i) { Group* group = i->second; if (exception_group == group) continue; std::list<IdleSocket>* idle_sockets = group->mutable_idle_sockets(); if (!idle_sockets->empty()) { delete idle_sockets->front().socket; idle_sockets->pop_front(); DecrementIdleCount(); if (group->IsEmpty()) RemoveGroup(i); return true; } } return false; } void TransportClientSocketPool::OnConnectJobComplete(Group* group, int result, ConnectJob* job) { DCHECK_NE(ERR_IO_PENDING, result); DCHECK(group_map_.find(group->group_id()) != group_map_.end()); DCHECK_EQ(group, group_map_[group->group_id()]); DCHECK(result != OK || job->socket() != nullptr); // Check if the ConnectJob is already bound to a Request. If so, result is // returned to that specific request. base::Optional<Group::BoundRequest> bound_request = group->FindAndRemoveBoundRequestForConnectJob(job); Request* request = nullptr; std::unique_ptr<Request> owned_request; if (bound_request) { --connecting_socket_count_; // If the socket pools were previously flushed with an error, return that // error to the bound request and discard the socket. if (bound_request->pending_error != OK) { InvokeUserCallbackLater(bound_request->request->handle(), bound_request->request->release_callback(), bound_request->pending_error, bound_request->request->socket_tag()); bound_request->request->net_log().EndEventWithNetErrorCode( NetLogEventType::SOCKET_POOL, bound_request->pending_error); OnAvailableSocketSlot(group->group_id(), group); CheckForStalledSocketGroups(); return; } // If the ConnectJob is from a previous generation, add the request back to // the group, and kick off another request. The socket will be discarded. if (bound_request->generation != group->generation()) { group->InsertUnboundRequest(std::move(bound_request->request)); OnAvailableSocketSlot(group->group_id(), group); CheckForStalledSocketGroups(); return; } request = bound_request->request.get(); } else { // In this case, RemoveConnectJob(job, _) must be called before exiting this // method. Otherwise, |job| will be leaked. owned_request = group->PopNextUnboundRequest(); request = owned_request.get(); if (!request) { if (result == OK) AddIdleSocket(job->PassSocket(), group); RemoveConnectJob(job, group); OnAvailableSocketSlot(group->group_id(), group); CheckForStalledSocketGroups(); return; } LogBoundConnectJobToRequest(job->net_log().source(), *request); } // The case where there's no request is handled above. DCHECK(request); if (result != OK) request->handle()->SetAdditionalErrorState(job); if (job->socket()) { HandOutSocket(job->PassSocket(), ClientSocketHandle::UNUSED, job->connect_timing(), request->handle(), base::TimeDelta(), group, request->net_log()); } request->net_log().EndEventWithNetErrorCode(NetLogEventType::SOCKET_POOL, result); InvokeUserCallbackLater(request->handle(), request->release_callback(), result, request->socket_tag()); if (!bound_request) RemoveConnectJob(job, group); // If no socket was handed out, there's a new socket slot available. if (!request->handle()->socket()) { OnAvailableSocketSlot(group->group_id(), group); CheckForStalledSocketGroups(); } } void TransportClientSocketPool::OnNeedsProxyAuth( Group* group, const HttpResponseInfo& response, HttpAuthController* auth_controller, base::OnceClosure restart_with_auth_callback, ConnectJob* job) { DCHECK(group_map_.find(group->group_id()) != group_map_.end()); DCHECK_EQ(group, group_map_[group->group_id()]); const Request* request = group->BindRequestToConnectJob(job); // If can't bind the ConnectJob to a request, treat this as a ConnectJob // failure. if (!request) { OnConnectJobComplete(group, ERR_PROXY_AUTH_REQUESTED, job); return; } request->proxy_auth_callback().Run(response, auth_controller, std::move(restart_with_auth_callback)); } void TransportClientSocketPool::InvokeUserCallbackLater( ClientSocketHandle* handle, CompletionOnceCallback callback, int rv, const SocketTag& socket_tag) { CHECK(!base::Contains(pending_callback_map_, handle)); pending_callback_map_[handle] = CallbackResultPair(std::move(callback), rv); if (rv == OK) { handle->socket()->ApplySocketTag(socket_tag); } base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::BindOnce(&TransportClientSocketPool::InvokeUserCallback, weak_factory_.GetWeakPtr(), handle)); } void TransportClientSocketPool::InvokeUserCallback(ClientSocketHandle* handle) { auto it = pending_callback_map_.find(handle); // Exit if the request has already been cancelled. if (it == pending_callback_map_.end()) return; CHECK(!handle->is_initialized()); CompletionOnceCallback callback = std::move(it->second.callback); int result = it->second.result; pending_callback_map_.erase(it); std::move(callback).Run(result); } void TransportClientSocketPool::TryToCloseSocketsInLayeredPools() { while (IsStalled()) { // Closing a socket will result in calling back into |this| to use the freed // socket slot, so nothing else is needed. if (!CloseOneIdleConnectionInHigherLayeredPool()) return; } } void TransportClientSocketPool::RefreshGroup(GroupMap::iterator it, const base::TimeTicks& now, const char* net_log_reason_utf8) { Group* group = it->second; CleanupIdleSocketsInGroup(true /* force */, group, now, net_log_reason_utf8); connecting_socket_count_ -= group->jobs().size(); group->RemoveAllUnboundJobs(); // Otherwise, prevent reuse of existing sockets. group->IncrementGeneration(); // Delete group if no longer needed. if (group->IsEmpty()) { RemoveGroup(it); } } TransportClientSocketPool::Group::Group( const GroupId& group_id, TransportClientSocketPool* client_socket_pool) : group_id_(group_id), client_socket_pool_(client_socket_pool), never_assigned_job_count_(0), unbound_requests_(NUM_PRIORITIES), active_socket_count_(0), generation_(0) {} TransportClientSocketPool::Group::~Group() { DCHECK_EQ(0u, never_assigned_job_count()); DCHECK_EQ(0u, unassigned_job_count()); DCHECK(unbound_requests_.empty()); DCHECK(jobs_.empty()); DCHECK(bound_requests_.empty()); } void TransportClientSocketPool::Group::OnConnectJobComplete(int result, ConnectJob* job) { DCHECK_NE(ERR_IO_PENDING, result); client_socket_pool_->OnConnectJobComplete(this, result, job); } void TransportClientSocketPool::Group::OnNeedsProxyAuth( const HttpResponseInfo& response, HttpAuthController* auth_controller, base::OnceClosure restart_with_auth_callback, ConnectJob* job) { client_socket_pool_->OnNeedsProxyAuth(this, response, auth_controller, std::move(restart_with_auth_callback), job); } void TransportClientSocketPool::Group::StartBackupJobTimer( const GroupId& group_id) { // Only allow one timer to run at a time. if (BackupJobTimerIsRunning()) return; // Unretained here is okay because |backup_job_timer_| is // automatically cancelled when it's destroyed. backup_job_timer_.Start(FROM_HERE, client_socket_pool_->ConnectRetryInterval(), base::BindOnce(&Group::OnBackupJobTimerFired, base::Unretained(this), group_id)); } bool TransportClientSocketPool::Group::BackupJobTimerIsRunning() const { return backup_job_timer_.IsRunning(); } bool TransportClientSocketPool::Group::TryToUseNeverAssignedConnectJob() { SanityCheck(); if (never_assigned_job_count_ == 0) return false; --never_assigned_job_count_; return true; } void TransportClientSocketPool::Group::AddJob(std::unique_ptr<ConnectJob> job, bool is_preconnect) { SanityCheck(); if (is_preconnect) ++never_assigned_job_count_; jobs_.push_back(std::move(job)); TryToAssignUnassignedJob(jobs_.back().get()); SanityCheck(); } std::unique_ptr<ConnectJob> TransportClientSocketPool::Group::RemoveUnboundJob( ConnectJob* job) { SanityCheck(); // Check that |job| is in the list. auto it = std::find_if(jobs_.begin(), jobs_.end(), [job](const std::unique_ptr<ConnectJob>& ptr) { return ptr.get() == job; }); DCHECK(it != jobs_.end()); // Check if |job| is in the unassigned jobs list. If so, remove it. auto it2 = std::find(unassigned_jobs_.begin(), unassigned_jobs_.end(), job); if (it2 != unassigned_jobs_.end()) { unassigned_jobs_.erase(it2); } else { // Otherwise, |job| must be assigned to some Request. Unassign it, then // try to replace it with another job if possible (either by taking an // unassigned job or stealing from another request, if any requests after it // have a job). RequestQueue::Pointer request_with_job = FindUnboundRequestWithJob(job); DCHECK(!request_with_job.is_null()); request_with_job.value()->ReleaseJob(); TryToAssignJobToRequest(request_with_job); } std::unique_ptr<ConnectJob> owned_job = std::move(*it); jobs_.erase(it); size_t job_count = jobs_.size(); if (job_count < never_assigned_job_count_) never_assigned_job_count_ = job_count; // If we've got no more jobs for this group, then we no longer need a // backup job either. if (jobs_.empty()) { DCHECK(unassigned_jobs_.empty()); backup_job_timer_.Stop(); } SanityCheck(); return owned_job; } void TransportClientSocketPool::Group::OnBackupJobTimerFired( const GroupId& group_id) { // If there are no more jobs pending, there is no work to do. // If we've done our cleanups correctly, this should not happen. if (jobs_.empty()) { NOTREACHED(); return; } // If the old job has already established a connection, don't start a backup // job. Backup jobs are only for issues establishing the initial TCP // connection - the timeout they used is tuned for that, and tests expect that // behavior. // // TODO(https://crbug.com/929814): Replace both this and the // LOAD_STATE_RESOLVING_HOST check with a callback. Use the // LOAD_STATE_RESOLVING_HOST callback to start the timer (And invoke the // OnHostResolved callback of any pending requests), and the // HasEstablishedConnection() callback to stop the timer. That should result // in a more robust, testable API. if ((*jobs_.begin())->HasEstablishedConnection()) return; // If our old job is waiting on DNS, or if we can't create any sockets // right now due to limits, just reset the timer. if (client_socket_pool_->ReachedMaxSocketsLimit() || !HasAvailableSocketSlot(client_socket_pool_->max_sockets_per_group_) || (*jobs_.begin())->GetLoadState() == LOAD_STATE_RESOLVING_HOST) { StartBackupJobTimer(group_id); return; } if (unbound_requests_.empty()) return; Request* request = unbound_requests_.FirstMax().value().get(); std::unique_ptr<ConnectJob> owned_backup_job = client_socket_pool_->connect_job_factory_->NewConnectJob( group_id, request->socket_params(), request->proxy_annotation_tag(), request->priority(), request->socket_tag(), this); owned_backup_job->net_log().AddEvent( NetLogEventType::SOCKET_POOL_CONNECT_JOB_CREATED, [&] { return NetLogCreateConnectJobParams(true /* backup_job */, &group_id_); }); ConnectJob* backup_job = owned_backup_job.get(); AddJob(std::move(owned_backup_job), false); client_socket_pool_->connecting_socket_count_++; int rv = backup_job->Connect(); if (rv != ERR_IO_PENDING) { client_socket_pool_->OnConnectJobComplete(this, rv, backup_job); } } void TransportClientSocketPool::Group::SanityCheck() const { #if DCHECK_IS_ON() DCHECK_LE(never_assigned_job_count(), jobs_.size()); DCHECK_LE(unassigned_job_count(), jobs_.size()); // Check that |unassigned_jobs_| is empty iff there are at least as many // requests as jobs. DCHECK_EQ(unassigned_jobs_.empty(), jobs_.size() <= unbound_requests_.size()); size_t num_assigned_jobs = jobs_.size() - unassigned_jobs_.size(); RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); for (size_t i = 0; i < unbound_requests_.size(); ++i, pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { DCHECK(!pointer.is_null()); DCHECK(pointer.value()); // Check that the first |num_assigned_jobs| requests have valid job // assignments. if (i < num_assigned_jobs) { // The request has a job. ConnectJob* job = pointer.value()->job(); DCHECK(job); // The request's job is not in |unassigned_jobs_| DCHECK(std::find(unassigned_jobs_.begin(), unassigned_jobs_.end(), job) == unassigned_jobs_.end()); // The request's job is in |jobs_| DCHECK(std::find_if(jobs_.begin(), jobs_.end(), [job](const std::unique_ptr<ConnectJob>& ptr) { return ptr.get() == job; }) != jobs_.end()); // The same job is not assigned to any other request with a job. RequestQueue::Pointer pointer2 = unbound_requests_.GetNextTowardsLastMin(pointer); for (size_t j = i + 1; j < num_assigned_jobs; ++j, pointer2 = unbound_requests_.GetNextTowardsLastMin(pointer2)) { DCHECK(!pointer2.is_null()); ConnectJob* job2 = pointer2.value()->job(); DCHECK(job2); DCHECK_NE(job, job2); } DCHECK_EQ(pointer.value()->priority(), job->priority()); } else { // Check that any subsequent requests do not have a job. DCHECK(!pointer.value()->job()); } } for (auto it = unassigned_jobs_.begin(); it != unassigned_jobs_.end(); ++it) { // Check that all unassigned jobs are in |jobs_| ConnectJob* job = *it; DCHECK(std::find_if(jobs_.begin(), jobs_.end(), [job](const std::unique_ptr<ConnectJob>& ptr) { return ptr.get() == job; }) != jobs_.end()); // Check that there are no duplicated entries in |unassigned_jobs_| for (auto it2 = std::next(it); it2 != unassigned_jobs_.end(); ++it2) { DCHECK_NE(job, *it2); } // Check that no |unassigned_jobs_| are in |bound_requests_|. DCHECK(std::find_if(bound_requests_.begin(), bound_requests_.end(), [job](const BoundRequest& bound_request) { return bound_request.connect_job.get() == job; }) == bound_requests_.end()); } #endif } void TransportClientSocketPool::Group::RemoveAllUnboundJobs() { SanityCheck(); // Remove jobs from any requests that have them. if (!unbound_requests_.empty()) { for (RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); !pointer.is_null() && pointer.value()->job(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { pointer.value()->ReleaseJob(); } } unassigned_jobs_.clear(); never_assigned_job_count_ = 0; // Delete active jobs. jobs_.clear(); // Stop backup job timer. backup_job_timer_.Stop(); SanityCheck(); } size_t TransportClientSocketPool::Group::ConnectJobCount() const { return bound_requests_.size() + jobs_.size(); } ConnectJob* TransportClientSocketPool::Group::GetConnectJobForHandle( const ClientSocketHandle* handle) const { // Search through bound requests for |handle|. for (const auto& bound_pair : bound_requests_) { if (handle == bound_pair.request->handle()) return bound_pair.connect_job.get(); } // Search through the unbound requests that have corresponding jobs for a // request with |handle|. for (RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); !pointer.is_null() && pointer.value()->job(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { if (pointer.value()->handle() == handle) return pointer.value()->job(); } return nullptr; } void TransportClientSocketPool::Group::InsertUnboundRequest( std::unique_ptr<Request> request) { SanityCheck(); // Should not have a job because it is not already in |unbound_requests_| DCHECK(!request->job()); // This value must be cached before we release |request|. RequestPriority priority = request->priority(); RequestQueue::Pointer new_position; if (request->respect_limits() == RespectLimits::DISABLED) { // Put requests with RespectLimits::DISABLED (which should have // priority == MAXIMUM_PRIORITY) ahead of other requests with // MAXIMUM_PRIORITY. DCHECK_EQ(priority, MAXIMUM_PRIORITY); new_position = unbound_requests_.InsertAtFront(std::move(request), priority); } else { new_position = unbound_requests_.Insert(std::move(request), priority); } DCHECK(!unbound_requests_.empty()); TryToAssignJobToRequest(new_position); SanityCheck(); } const TransportClientSocketPool::Request* TransportClientSocketPool::Group::GetNextUnboundRequest() const { return unbound_requests_.empty() ? nullptr : unbound_requests_.FirstMax().value().get(); } std::unique_ptr<TransportClientSocketPool::Request> TransportClientSocketPool::Group::PopNextUnboundRequest() { if (unbound_requests_.empty()) return nullptr; return RemoveUnboundRequest(unbound_requests_.FirstMax()); } std::unique_ptr<TransportClientSocketPool::Request> TransportClientSocketPool::Group::FindAndRemoveUnboundRequest( ClientSocketHandle* handle) { for (RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); !pointer.is_null(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { if (pointer.value()->handle() == handle) { DCHECK_EQ(static_cast<RequestPriority>(pointer.priority()), pointer.value()->priority()); std::unique_ptr<Request> request = RemoveUnboundRequest(pointer); return request; } } return nullptr; } void TransportClientSocketPool::Group::SetPendingErrorForAllBoundRequests( int pending_error) { for (auto bound_pair = bound_requests_.begin(); bound_pair != bound_requests_.end(); ++bound_pair) { // Earlier errors take precedence. if (bound_pair->pending_error == OK) bound_pair->pending_error = pending_error; } } const TransportClientSocketPool::Request* TransportClientSocketPool::Group::BindRequestToConnectJob( ConnectJob* connect_job) { // Check if |job| is already bound to a Request. for (const auto& bound_pair : bound_requests_) { if (bound_pair.connect_job.get() == connect_job) return bound_pair.request.get(); } // If not, try to bind it to a Request. const Request* request = GetNextUnboundRequest(); // If there are no pending requests, or the highest priority request has no // callback to handle auth challenges, return nullptr. if (!request || request->proxy_auth_callback().is_null()) return nullptr; // Otherwise, bind the ConnectJob to the Request. std::unique_ptr<Request> owned_request = PopNextUnboundRequest(); DCHECK_EQ(owned_request.get(), request); std::unique_ptr<ConnectJob> owned_connect_job = RemoveUnboundJob(connect_job); LogBoundConnectJobToRequest(owned_connect_job->net_log().source(), *request); bound_requests_.emplace_back(BoundRequest( std::move(owned_connect_job), std::move(owned_request), generation())); return request; } base::Optional<TransportClientSocketPool::Group::BoundRequest> TransportClientSocketPool::Group::FindAndRemoveBoundRequestForConnectJob( ConnectJob* connect_job) { for (auto bound_pair = bound_requests_.begin(); bound_pair != bound_requests_.end(); ++bound_pair) { if (bound_pair->connect_job.get() != connect_job) continue; BoundRequest ret = std::move(*bound_pair); bound_requests_.erase(bound_pair); return std::move(ret); } return base::nullopt; } std::unique_ptr<TransportClientSocketPool::Request> TransportClientSocketPool::Group::FindAndRemoveBoundRequest( ClientSocketHandle* client_socket_handle) { for (auto bound_pair = bound_requests_.begin(); bound_pair != bound_requests_.end(); ++bound_pair) { if (bound_pair->request->handle() != client_socket_handle) continue; std::unique_ptr<Request> request = std::move(bound_pair->request); bound_requests_.erase(bound_pair); return request; } return nullptr; } void TransportClientSocketPool::Group::SetPriority(ClientSocketHandle* handle, RequestPriority priority) { for (RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); !pointer.is_null(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { if (pointer.value()->handle() == handle) { if (pointer.value()->priority() == priority) return; std::unique_ptr<Request> request = RemoveUnboundRequest(pointer); // Requests that ignore limits much be created and remain at the highest // priority, and should not be reprioritized. DCHECK_EQ(request->respect_limits(), RespectLimits::ENABLED); request->set_priority(priority); InsertUnboundRequest(std::move(request)); return; } } // This function must be called with a valid ClientSocketHandle. NOTREACHED(); } bool TransportClientSocketPool::Group::RequestWithHandleHasJobForTesting( const ClientSocketHandle* handle) const { SanityCheck(); if (GetConnectJobForHandle(handle)) return true; // There's no corresponding ConnectJob. Verify that the handle is at least // owned by a request. RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); for (size_t i = 0; i < unbound_requests_.size(); ++i) { if (pointer.value()->handle() == handle) return false; pointer = unbound_requests_.GetNextTowardsLastMin(pointer); } NOTREACHED(); return false; } TransportClientSocketPool::Group::BoundRequest::BoundRequest() : pending_error(OK) {} TransportClientSocketPool::Group::BoundRequest::BoundRequest( std::unique_ptr<ConnectJob> connect_job, std::unique_ptr<Request> request, int64_t generation) : connect_job(std::move(connect_job)), request(std::move(request)), generation(generation), pending_error(OK) {} TransportClientSocketPool::Group::BoundRequest::BoundRequest( BoundRequest&& other) = default; TransportClientSocketPool::Group::BoundRequest& TransportClientSocketPool::Group::BoundRequest::operator=( BoundRequest&& other) = default; TransportClientSocketPool::Group::BoundRequest::~BoundRequest() = default; std::unique_ptr<TransportClientSocketPool::Request> TransportClientSocketPool::Group::RemoveUnboundRequest( const RequestQueue::Pointer& pointer) { SanityCheck(); // TODO(eroman): Temporary for debugging http://crbug.com/467797. CHECK(!pointer.is_null()); std::unique_ptr<Request> request = unbound_requests_.Erase(pointer); if (request->job()) { TryToAssignUnassignedJob(request->ReleaseJob()); } // If there are no more unbound requests, kill the backup timer. if (unbound_requests_.empty()) backup_job_timer_.Stop(); SanityCheck(); return request; } TransportClientSocketPool::RequestQueue::Pointer TransportClientSocketPool::Group::FindUnboundRequestWithJob( const ConnectJob* job) const { SanityCheck(); for (RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); !pointer.is_null() && pointer.value()->job(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { if (pointer.value()->job() == job) return pointer; } // If a request with the job was not found, it must be in |unassigned_jobs_|. DCHECK(std::find(unassigned_jobs_.begin(), unassigned_jobs_.end(), job) != unassigned_jobs_.end()); return RequestQueue::Pointer(); } TransportClientSocketPool::RequestQueue::Pointer TransportClientSocketPool::Group::GetFirstRequestWithoutJob() const { RequestQueue::Pointer pointer = unbound_requests_.FirstMax(); size_t i = 0; for (; !pointer.is_null() && pointer.value()->job(); pointer = unbound_requests_.GetNextTowardsLastMin(pointer)) { ++i; } DCHECK_EQ(i, jobs_.size() - unassigned_jobs_.size()); DCHECK(pointer.is_null() || !pointer.value()->job()); return pointer; } void TransportClientSocketPool::Group::TryToAssignUnassignedJob( ConnectJob* job) { unassigned_jobs_.push_back(job); RequestQueue::Pointer first_request_without_job = GetFirstRequestWithoutJob(); if (!first_request_without_job.is_null()) { first_request_without_job.value()->AssignJob(unassigned_jobs_.back()); unassigned_jobs_.pop_back(); } } void TransportClientSocketPool::Group::TryToAssignJobToRequest( TransportClientSocketPool::RequestQueue::Pointer request_pointer) { DCHECK(!request_pointer.value()->job()); if (!unassigned_jobs_.empty()) { request_pointer.value()->AssignJob(unassigned_jobs_.front()); unassigned_jobs_.pop_front(); return; } // If the next request in the queue does not have a job, then there are no // requests with a job after |request_pointer| from which we can steal. RequestQueue::Pointer next_request = unbound_requests_.GetNextTowardsLastMin(request_pointer); if (next_request.is_null() || !next_request.value()->job()) return; // Walk down the queue to find the last request with a job. RequestQueue::Pointer cur = next_request; RequestQueue::Pointer next = unbound_requests_.GetNextTowardsLastMin(cur); while (!next.is_null() && next.value()->job()) { cur = next; next = unbound_requests_.GetNextTowardsLastMin(next); } // Steal the job from the last request with a job. TransferJobBetweenRequests(cur.value().get(), request_pointer.value().get()); } void TransportClientSocketPool::Group::TransferJobBetweenRequests( TransportClientSocketPool::Request* source, TransportClientSocketPool::Request* dest) { DCHECK(!dest->job()); DCHECK(source->job()); dest->AssignJob(source->ReleaseJob()); } } // namespace net
{ "content_hash": "06b47d319896ea2f99640c2a5b8aa16a", "timestamp": "", "source": "github", "line_count": 2030, "max_line_length": 80, "avg_line_length": 36.135960591133006, "alnum_prop": 0.6695157860297726, "repo_name": "endlessm/chromium-browser", "id": "3d5aff5436823d8865af806f1f214f5c216eed79", "size": "73356", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "net/socket/transport_client_socket_pool.cc", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package com.asha.md360player4android; import android.app.Activity; import android.util.SparseArray; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Spinner; /** * Created by hzqiujiadi on 16/6/24. * hzqiujiadi ashqalcn@gmail.com */ public class SpinnerHelper { private Activity activity; private SparseArray<String> data; private ClickHandler clickHandler; private int defaultKey; public interface ClickHandler { void onSpinnerClicked(int index, int key, String value); } public SpinnerHelper(Activity activity) { this.activity = activity; } public SpinnerHelper setDefault(int key) { defaultKey = key; return this; } public SpinnerHelper setData(SparseArray<String> data) { this.data = data; return this; } public SpinnerHelper setClickHandler(ClickHandler clickHandler) { this.clickHandler = clickHandler; return this; } public void init(int id) { if (data == null) { return; } Spinner spinner = (Spinner) activity.findViewById(id); ArrayAdapter<String> adapter = new ArrayAdapter<>(activity, R.layout.simple_spinner_item); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); for (int i = 0; i < data.size(); i++) { String value = data.valueAt(i); adapter.add(value); } spinner.setAdapter(adapter); int index = data.indexOfKey(defaultKey); index = index == -1 ? 0 : index; spinner.setSelection(index); spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { int key = data.keyAt(position); String value = data.valueAt(position); if (clickHandler != null) { clickHandler.onSpinnerClicked(position, key, value); } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); } public static SpinnerHelper with(Activity activity) { return new SpinnerHelper(activity); } }
{ "content_hash": "c94747ebc3100de0a3f0477311b53d1a", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 98, "avg_line_length": 29.08641975308642, "alnum_prop": 0.6269100169779287, "repo_name": "tony-pan/Parse-MD4A", "id": "4637d2969d6ac9243f6b037102ef05ffc7abfd40", "size": "2356", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/asha/md360player4android/SpinnerHelper.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "GLSL", "bytes": "3783" }, { "name": "Java", "bytes": "313174" } ], "symlink_target": "" }
package org.apache.flink.runtime.state; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.api.common.typeutils.base.StringSerializer; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.StateObjectCollection; import org.apache.flink.runtime.operators.testutils.DummyEnvironment; import org.apache.flink.runtime.state.heap.HeapKeyedStateBackend; import org.apache.flink.runtime.state.memory.MemCheckpointStreamFactory; import org.apache.flink.runtime.state.memory.MemoryStateBackend; import org.apache.flink.testutils.ArtificialCNFExceptionThrowingClassLoader; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FutureUtil; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import java.io.Serializable; import java.util.Collections; import java.util.concurrent.RunnableFuture; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** * Tests for the {@link org.apache.flink.runtime.state.memory.MemoryStateBackend}. */ public class MemoryStateBackendTest extends StateBackendTestBase<MemoryStateBackend> { @Override protected MemoryStateBackend getStateBackend() throws Exception { return new MemoryStateBackend(useAsyncMode()); } protected boolean useAsyncMode() { return false; } @Override protected boolean isSerializerPresenceRequiredOnRestore() { return true; } // disable these because the verification does not work for this state backend @Override @Test public void testValueStateRestoreWithWrongSerializers() {} @Override @Test public void testListStateRestoreWithWrongSerializers() {} @Override @Test public void testReducingStateRestoreWithWrongSerializers() {} @Override @Test public void testMapStateRestoreWithWrongSerializers() {} /** * Verifies that the operator state backend fails with appropriate error and message if * previous serializer can not be restored. */ @Test public void testOperatorStateRestoreFailsIfSerializerDeserializationFails() throws Exception { DummyEnvironment env = new DummyEnvironment(); AbstractStateBackend abstractStateBackend = new MemoryStateBackend(4096); OperatorStateBackend operatorStateBackend = abstractStateBackend.createOperatorStateBackend(env, "test-op-name"); // write some state ListStateDescriptor<Serializable> stateDescriptor1 = new ListStateDescriptor<>("test1", new JavaSerializer<>()); ListStateDescriptor<Serializable> stateDescriptor2 = new ListStateDescriptor<>("test2", new JavaSerializer<>()); ListStateDescriptor<Serializable> stateDescriptor3 = new ListStateDescriptor<>("test3", new JavaSerializer<>()); ListState<Serializable> listState1 = operatorStateBackend.getListState(stateDescriptor1); ListState<Serializable> listState2 = operatorStateBackend.getListState(stateDescriptor2); ListState<Serializable> listState3 = operatorStateBackend.getUnionListState(stateDescriptor3); listState1.add(42); listState1.add(4711); listState2.add(7); listState2.add(13); listState2.add(23); listState3.add(17); listState3.add(18); listState3.add(19); listState3.add(20); CheckpointStreamFactory streamFactory = new MemCheckpointStreamFactory(MemoryStateBackend.DEFAULT_MAX_STATE_SIZE); RunnableFuture<SnapshotResult<OperatorStateHandle>> runnableFuture = operatorStateBackend.snapshot(1, 1, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()); SnapshotResult<OperatorStateHandle> snapshotResult = FutureUtil.runIfNotDoneAndGet(runnableFuture); OperatorStateHandle stateHandle = snapshotResult.getJobManagerOwnedSnapshot(); try { operatorStateBackend.close(); operatorStateBackend.dispose(); env = new DummyEnvironment( new ArtificialCNFExceptionThrowingClassLoader( getClass().getClassLoader(), Collections.singleton(JavaSerializer.class.getName()))); operatorStateBackend = abstractStateBackend.createOperatorStateBackend( env, "testOperator"); operatorStateBackend.restore(StateObjectCollection.singleton(stateHandle)); fail("The operator state restore should have failed if the previous state serializer could not be loaded."); } catch (Exception expected) { Assert.assertTrue(ExceptionUtils.findThrowable(expected, ClassNotFoundException.class).isPresent()); } finally { stateHandle.discardState(); } } /** * Verifies that memory-backed keyed state backend fails with appropriate error and message if * previous serializer can not be restored. */ @Test public void testKeyedStateRestoreFailsIfSerializerDeserializationFails() throws Exception { CheckpointStreamFactory streamFactory = createStreamFactory(); SharedStateRegistry sharedStateRegistry = new SharedStateRegistry(); KeyedStateBackend<Integer> backend = createKeyedBackend(IntSerializer.INSTANCE); ValueStateDescriptor<String> kvId = new ValueStateDescriptor<>("id", String.class, null); kvId.initializeSerializerUnlessSet(new ExecutionConfig()); HeapKeyedStateBackend<Integer> heapBackend = (HeapKeyedStateBackend<Integer>) backend; assertEquals(0, heapBackend.numKeyValueStateEntries()); ValueState<String> state = backend.getPartitionedState(VoidNamespace.INSTANCE, VoidNamespaceSerializer.INSTANCE, kvId); // write some state backend.setCurrentKey(0); state.update("hello"); state.update("ciao"); KeyedStateHandle snapshot = runSnapshot( ((HeapKeyedStateBackend<Integer>) backend).snapshot( 682375462378L, 2, streamFactory, CheckpointOptions.forCheckpointWithDefaultLocation()), sharedStateRegistry); backend.dispose(); // ========== restore snapshot ========== try { restoreKeyedBackend( IntSerializer.INSTANCE, snapshot, new DummyEnvironment( new ArtificialCNFExceptionThrowingClassLoader( getClass().getClassLoader(), Collections.singleton(StringSerializer.class.getName())))); fail("The keyed state restore should have failed if the previous state serializer could not be loaded."); } catch (Exception expected) { Assert.assertTrue(ExceptionUtils.findThrowable(expected, ClassNotFoundException.class).isPresent()); } } @Ignore @Test public void testConcurrentMapIfQueryable() throws Exception { super.testConcurrentMapIfQueryable(); } }
{ "content_hash": "596ad55cc05be8983cb7d1f4b914b4f3", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 121, "avg_line_length": 34.74869109947644, "alnum_prop": 0.7944854602983276, "repo_name": "mylog00/flink", "id": "d5310412385a3cd7f6c20a53a1e3a41db468b512", "size": "7442", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flink-runtime/src/test/java/org/apache/flink/runtime/state/MemoryStateBackendTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5666" }, { "name": "CSS", "bytes": "18100" }, { "name": "Clojure", "bytes": "81015" }, { "name": "CoffeeScript", "bytes": "91220" }, { "name": "Dockerfile", "bytes": "9788" }, { "name": "HTML", "bytes": "86821" }, { "name": "Java", "bytes": "40279802" }, { "name": "JavaScript", "bytes": "8267" }, { "name": "Python", "bytes": "249644" }, { "name": "Scala", "bytes": "7501313" }, { "name": "Shell", "bytes": "391588" } ], "symlink_target": "" }
package org.xsaas.xstat.business; import java.util.List; import org.xsaas.xstat.po.ArticleInfo; public interface IArticleInfoService { /** * ±£´æÎÄÕÂÐÅÏ¢ * @param data ÎÄÕÂÐÅÏ¢ */ public void saveArticleInfo(ArticleInfo data); /** * ¸üÐÂÎÄÕÂÐÅÏ¢ * @param data ÎÄÕÂÐÅÏ¢ */ public void updateArticleInfo(ArticleInfo data); /** * ɾ³ýÎÄÕÂÐÅÏ¢ * @param data ÎÄÕÂÐÅÏ¢ */ public void deleteArticleInfo(ArticleInfo data); /** * »ñÈ¡ÎÄÕÂÐÅÏ¢ * @param articleID Îʾí±àºÅ * @return ÎÄÕÂÐÅÏ¢ */ public ArticleInfo getArticleInfo(Long articleID); /** * »ñÈ¡ÎÄÕÂÐÅÏ¢Áбí * @return ÎÄÕÂÐÅÏ¢Áбí */ public List<ArticleInfo> getArticleInfoList(); /** * ÐÅÏ¢×ÜÊý * @return ÊýÁ¿ */ public int getArticleInfoTotal(String title); public int getArticleInfoTotal(); /** * ·ÖÒ³ÐÅÏ¢ * @param firstResult ¿ªÊ¼Êý * @param maxResult ×î´óÊý * @return ÐÅÏ¢½á¹û */ public List<ArticleInfo> findArticleInfoByPage(String title,final int firstResult, final int maxResult); public List<ArticleInfo> findArticleInfoByPage(final int firstResult, final int maxResult); /** * ÐÅÏ¢×ÜÊý * @return ÊýÁ¿ */ public int getTotalByDelStatus(); /** * ·ÖÒ³ÐÅÏ¢ * @param firstResult ¿ªÊ¼Êý * @param maxResult ×î´óÊý * @return ÐÅÏ¢½á¹û */ public List<ArticleInfo> findPageByDelStatus(final int firstResult, final int maxResult); }
{ "content_hash": "69208d2dd649642b8a287f2ce96809da", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 105, "avg_line_length": 21.5, "alnum_prop": 0.6911337209302325, "repo_name": "wangxin39/xstat", "id": "136c90be6fb7b42b7cfa0357598155cb5881471b", "size": "1376", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "XStatAPI/src/org/xsaas/xstat/business/IArticleInfoService.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "86104" }, { "name": "CSS", "bytes": "671441" }, { "name": "ColdFusion", "bytes": "246890" }, { "name": "Java", "bytes": "3159003" }, { "name": "JavaScript", "bytes": "7490591" }, { "name": "Lasso", "bytes": "35010" }, { "name": "PHP", "bytes": "80294" }, { "name": "Perl", "bytes": "67654" }, { "name": "Python", "bytes": "76098" }, { "name": "Shell", "bytes": "86" } ], "symlink_target": "" }
Template Tags ############# ***************** CMS template tags ***************** .. highlightlang:: html+django To use any of the following template tags you first need to load them at the top of your template:: {% load cms_tags %} .. template tag:: placeholder placeholder =========== .. versionchanged:: 2.1 The placeholder name became case sensitive. The ``placeholder`` template tag defines a placeholder on a page. All placeholders in a template will be auto-detected and can be filled with plugins when editing a page that is using said template. When rendering, the content of these plugins will appear where the ``placeholder`` tag was. Example:: {% placeholder "content" %} If you want additional content to be displayed in case the placeholder is empty, use the ``or`` argument and an additional ``{% endplaceholder %}`` closing tag. Everything between ``{% placeholder "..." or %}`` and ``{% endplaceholder %}`` is rendered in the event that the placeholder has no plugins or the plugins do not generate any output. Example:: {% placeholder "content" or %}There is no content.{% endplaceholder %} If you want to add extra variables to the context of the placeholder, you should use Django's :ttag:`with` tag. For instance, if you want to re-size images from your templates according to a context variable called ``width``, you can pass it as follows:: {% with 320 as width %}{% placeholder "content" %}{% endwith %} If you want the placeholder to inherit the content of a placeholder with the same name on parent pages, simply pass the ``inherit`` argument:: {% placeholder "content" inherit %} This will walk up the page tree up until the root page and will show the first placeholder it can find with content. It's also possible to combine this with the ``or`` argument to show an ultimate fallback if the placeholder and none of the placeholders on parent pages have plugins that generate content:: {% placeholder "content" inherit or %}There is no spoon.{% endplaceholder %} See also the :setting:`CMS_PLACEHOLDER_CONF` setting where you can also add extra context variables and change some other placeholder behaviour. .. template tag:: static_placeholder static_placeholder ================== .. versionadded:: 3.0 The static_placeholder template tag can be used anywhere in any template and is not bound to any page or model. It needs a name and it will create a placeholder that you can fill with plugins afterwards. The static_placeholder tag is normally used to display the same content on multiple locations or inside of apphooks or other third party apps. Static_placeholder need to be published to show up on live pages. Example:: {% load cms_tags %} {% static_placeholder "footer" %} .. warning:: Static_placeholders are not included in the undo/redo and page history pages If you want additional content to be displayed in case the static placeholder is empty, use the ``or`` argument and an additional ``{% endstatic_placeholder %}`` closing tag. Everything between ``{% static_placeholder "..." or %}`` and ``{% endstatic_placeholder %}`` is rendered in the event that the placeholder has no plugins or the plugins do not generate any output. Example:: {% static_placeholder "footer" or %}There is no content.{% endstatic_placeholder %} By default, a static placeholder applies to *all* sites in a project. If you want to make your static placeholder site-specific, so that different sites can have their own content in it, you can add the flag ``site`` to the template tag to achieve this. Example:: {% static_placeholder "footer" site or %}There is no content.{% endstatic_placeholder %} Note that the `Django "sites" framework <https://docs.djangoproject.com/en/dev/ref/contrib/sites/>`_ *is* required and ``SITE_ID`` :ref:`*must* be set <configure-django-cms>` in ``settings.py`` for this (not to mention other aspects of django CMS) to work correctly. .. templatetag:: render_placeholder render_placeholder ================== `{% render_placeholder %}` is used if you have a PlaceholderField in your own model and want to render it in the template. The :ttag:`render_placeholder` tag takes the following parameters: * :class:`~cms.models.fields.PlaceholderField` instance * ``width`` parameter for context sensitive plugins (optional) * ``language`` keyword plus ``language-code`` string to render content in the specified language (optional) * ``as`` keyword followed by ``varname`` (optional): the template tag output can be saved as a context variable for later use. The following example renders the ``my_placeholder`` field from the ``mymodel_instance`` and will render only the English (``en``) plugins: .. code-block:: html+django {% load cms_tags %} {% render_placeholder mymodel_instance.my_placeholder language 'en' %} .. versionadded:: 3.0.2 This template tag supports the ``as`` argument. With this you can assign the result of the template tag to a new variable that you can use elsewhere in the template. Example:: {% render_placeholder mymodel_instance.my_placeholder as placeholder_content %} <p>{{ placeholder_content }}</p> When used in this manner, the placeholder will not be displayed for editing when the CMS is in edit mode. .. templatetag:: render_uncached_placeholder render_uncached_placeholder =========================== The same as :ttag:`render_placeholder`, but the placeholder contents will not be cached or taken from the cache. Arguments: * :class:`~cms.models.fields.PlaceholderField` instance * ``width`` parameter for context sensitive plugins (optional) * ``language`` keyword plus ``language-code`` string to render content in the specified language (optional) * ``as`` keyword followed by ``varname`` (optional): the template tag output can be saved as a context variable for later use. Example:: {% render_uncached_placeholder mymodel_instance.my_placeholder language 'en' %} .. templatetag:: show_placeholder show_placeholder ================ Displays a specific placeholder from a given page. This is useful if you want to have some more or less static content that is shared among many pages, such as a footer. Arguments: * ``placeholder_name`` * ``page_lookup`` (see `page_lookup`_ for more information) * ``language`` (optional) * ``site`` (optional) Examples:: {% show_placeholder "footer" "footer_container_page" %} {% show_placeholder "content" request.current_page.parent_id %} {% show_placeholder "teaser" request.current_page.get_root %} .. templatetag:: show_uncached_placeholder show_uncached_placeholder ========================= The same as :ttag:`show_placeholder`, but the placeholder contents will not be cached or taken from the cache. Arguments: - ``placeholder_name`` - ``page_lookup`` (see `page_lookup`_ for more information) - ``language`` (optional) - ``site`` (optional) Example:: {% show_uncached_placeholder "footer" "footer_container_page" %} .. templatetag:: page_lookup page_lookup =========== The ``page_lookup`` argument, passed to several template tags to retrieve a page, can be of any of the following types: * :class:`str <basestring>`: interpreted as the ``reverse_id`` field of the desired page, which can be set in the "Advanced" section when editing a page. * :class:`int`: interpreted as the primary key (``pk`` field) of the desired page * :class:`dict`: a dictionary containing keyword arguments to find the desired page (for instance: ``{'pk': 1}``) * :class:`~cms.models.Page`: you can also pass a page object directly, in which case there will be no database lookup. If you know the exact page you are referring to, it is a good idea to use a ``reverse_id`` (a string used to uniquely name a page) rather than a hard-coded numeric ID in your template. For example, you might have a help page that you want to link to or display parts of on all pages. To do this, you would first open the help page in the admin interface and enter an ID (such as ``help``) under the 'Advanced' tab of the form. Then you could use that ``reverse_id`` with the appropriate template tags:: {% show_placeholder "right-column" "help" %} <a href="{% page_url "help" %}">Help page</a> If you are referring to a page `relative` to the current page, you'll probably have to use a numeric page ID or a page object. For instance, if you want the content of the parent page to display on the current page, you can use:: {% show_placeholder "content" request.current_page.parent_id %} Or, suppose you have a placeholder called ``teaser`` on a page that, unless a content editor has filled it with content specific to the current page, should inherit the content of its root-level ancestor:: {% placeholder "teaser" or %} {% show_placeholder "teaser" request.current_page.get_root %} {% endplaceholder %} .. templatetag:: page_url page_url ======== Displays the URL of a page in the current language. Arguments: - ``page_lookup`` (see `page_lookup`_ for more information) - ``language`` (optional) - ``site`` (optional) - ``as var_name`` (version 3.0 or later, optional; page_url can now be used to assign the resulting URL to a context variable ``var_name``) Example:: <a href="{% page_url "help" %}">Help page</a> <a href="{% page_url request.current_page.parent %}">Parent page</a> If a matching page isn't found and :setting:`django:DEBUG` is ``True``, an exception will be raised. However, if :setting:`django:DEBUG` is ``False``, an exception will not be raised. Additionally, if :setting:`django:SEND_BROKEN_LINK_EMAILS` is ``True`` and you have specified some addresses in :setting:`django:MANAGERS`, an email will be sent to those addresses to inform them of the broken link. .. versionadded:: 3.0 page_url now supports the ``as`` argument. When used this way, the tag emits nothing, but sets a variable in the context with the specified name to the resulting value. When using the ``as`` argument PageNotFound exceptions are always suppressed, regardless of the setting of :setting:`django:DEBUG` and the tag will simply emit an empty string in these cases. Example:: {# Emit a 'canonical' tag when the page is displayed on an alternate url #} {% page_url request.current_page as current_url %}{% if current_url and current_url != request.get_full_path %}<link rel="canonical" href="{% page_url request.current_page %}">{% endif %} .. templatetag:: page_attribute page_attribute ============== This template tag is used to display an attribute of the current page in the current language. Arguments: - ``attribute_name`` - ``page_lookup`` (optional; see `page_lookup`_ for more information) Possible values for ``attribute_name`` are: ``"title"``, ``"menu_title"``, ``"page_title"``, ``"slug"``, ``"meta_description"``, ``"changed_date"``, ``"changed_by"`` (note that you can also supply that argument without quotes, but this is deprecated because the argument might also be a template variable). Example:: {% page_attribute "page_title" %} If you supply the optional ``page_lookup`` argument, you will get the page attribute from the page found by that argument. Example:: {% page_attribute "page_title" "my_page_reverse_id" %} {% page_attribute "page_title" request.current_page.parent_id %} {% page_attribute "slug" request.current_page.get_root %} .. versionadded:: 2.3.2 This template tag supports the ``as`` argument. With this you can assign the result of the template tag to a new variable that you can use elsewhere in the template. Example:: {% page_attribute "page_title" as title %} <title>{{ title }}</title> It even can be used in combination with the ``page_lookup`` argument. Example:: {% page_attribute "page_title" "my_page_reverse_id" as title %} <a href="/mypage/">{{ title }}</a> .. templatetag:: render_plugin .. versionadded:: 2.4 render_plugin ============= This template tag is used to render child plugins of the current plugin and should be used inside plugin templates. Arguments: - ``plugin`` Plugin needs to be an instance of a plugin model. Example:: {% load cms_tags %} <div class="multicolumn"> {% for plugin in instance.child_plugin_instances %} <div style="width: {{ plugin.width }}00px;"> {% render_plugin plugin %} </div> {% endfor %} </div> Normally the children of plugins can be accessed via the ``child_plugins`` attribute of plugins. Plugins need the ``allow_children`` attribute to set to `True` for this to be enabled. .. versionadded:: 3.0 .. templatetag:: render_plugin_block render_plugin_block =================== This template tag acts like the template tag ``render_model_block`` but with a plugin instead of a model as its target. This is used to link from a block of markup to a plugin's change form in edit/preview mode. This is useful for user interfaces that have some plugins hidden from display in edit/preview mode, but the CMS author needs to expose a way to edit them. It is also useful for just making duplicate or alternate means of triggering the change form for a plugin. This would typically be used inside a parent-plugin’s render template. In this example code below, there is a parent container plugin which renders a list of child plugins inside a navigation block, then the actual plugin contents inside a ``DIV.contentgroup-items`` block. In this example, the navigation block is always shown, but the items are only shown once the corresponding navigation element is clicked. Adding this ``render_plugin_block`` makes it significantly more intuitive to edit a child plugin's content, by double-clicking its navigation item in edit mode. Arguments: - ``plugin`` Example:: {% load cms_tags l10n %} {% block section_content %} <div class="contentgroup-container"> <nav class="contentgroup"> <div class="inner"> <ul class="contentgroup-items">{% for child in children %} {% if child.enabled %} <li class="item{{ forloop.counter0|unlocalize }}"> {% render_plugin_block child %} <a href="#item{{ child.id|unlocalize }}">{{ child.title|safe }}</a> {% endrender_plugin_block %} </li>{% endif %} {% endfor %} </ul> </div> </nav> <div class="contentgroup-items">{% for child in children %} <div class="contentgroup-item item{{ child.id|unlocalize }}{% if not forloop.counter0 %} active{% endif %}"> {% render_plugin child %} </div>{% endfor %} </div> </div> {% endblock %} .. templatetag:: render_model .. versionadded:: 3.0 render_model ============ .. warning:: ``render_model`` marks as safe the content of the rendered model attribute. This may be a security risk if used on fields which may contains non-trusted content. Be aware, and use the template tag accordingly. ``render_model`` is the way to add frontend editing to any Django model. It both renders the content of the given attribute of the model instance and makes it clickable to edit the related model. If the toolbar is not enabled, the value of the attribute is rendered in the template without further action. If the toolbar is enabled, click to call frontend editing code is added. By using this template tag you can show and edit page titles as well as fields in standard django models, see :ref:`frontend-editable-fields` for examples and further documentation. Example: .. code-block:: html+django <h1>{% render_model my_model "title" "title,abstract" %}</h1> This will render to: .. code-block:: html+django <!-- The content of the H1 is the active area that triggers the frontend editor --> <h1><div class="cms-plugin cms-plugin-myapp-mymodel-title-1">{{ my_model.title }}</div></h1> **Arguments:** * ``instance``: instance of your model in the template * ``attribute``: the name of the attribute you want to show in the template; it can be a context variable name; it's possible to target field, property or callable for the specified model; when used on a page object this argument accepts the special ``titles`` value which will show the page **title** field, while allowing editing **title**, **menu title** and **page title** fields in the same form; * ``edit_fields`` (optional): a comma separated list of fields editable in the popup editor; when template tag is used on a page object this argument accepts the special ``changelist`` value which allows editing the pages **changelist** (items list); * ``language`` (optional): the admin language tab to be linked. Useful only for `django-hvad`_ enabled models. * ``filters`` (optional): a string containing chained filters to apply to the output content; works the same way as :ttag:`django:filter` template tag; * ``view_url`` (optional): the name of a URL that will be reversed using the instance ``pk`` and the ``language`` as arguments; * ``view_method`` (optional): a method name that will return a URL to a view; the method must accept ``request`` as first parameter. * ``varname`` (optional): the template tag output can be saved as a context variable for later use. .. warning:: ``render_model`` is only partially compatible with django-hvad: using it with hvad-translated fields (say {% render_model object 'translated_field' %} return error if the hvad-enabled object does not exists in the current language. As a workaround ``render_model_icon`` can be used instead. .. templatetag:: render_model_block .. versionadded:: 3.0 render_model_block ================== ``render_model_block`` is the block-level equivalent of ``render_model``: .. code-block:: html+django {% render_model_block my_model %} <h1>{{ instance.title }}</h1> <div class="body"> {{ instance.date|date:"d F Y" }} {{ instance.text }} </div> {% endrender_model_block %} This will render to: .. code-block:: html+django <!-- This whole block is the active area that triggers the frontend editor --> <div class="cms-plugin cms-plugin-myapp-mymodel-1"> <h1>{{ my_model.title }}</h1> <div class="body"> {{ my_model.date|date:"d F Y" }} {{ my_model.text }} </div> </div> In the block the ``my_model`` is aliased as ``instance`` and every attribute and method is available; also template tags and filters are available in the block. .. warning:: If the ``{% render_model_block %}`` contains template tags or template code that rely on or manipulate context data that the ``{% render_model_block %}`` also makes use of, you may experience some unexpected effects. Unless you are sure that such conflicts will not occur it is advised to keep the code within a ``{% render_model_block %}`` as simple and short as possible. **Arguments:** * ``instance``: instance of your model in the template * ``edit_fields`` (optional): a comma separated list of fields editable in the popup editor; when template tag is used on a page object this argument accepts the special ``changelist`` value which allows editing the pages **changelist** (items list); * ``language`` (optional): the admin language tab to be linked. Useful only for `django-hvad`_ enabled models. * ``view_url`` (optional): the name of a URL that will be reversed using the instance ``pk`` and the ``language`` as arguments; * ``view_method`` (optional): a method name that will return a URL to a view; the method must accept ``request`` as first parameter. * ``varname`` (optional): the template tag output can be saved as a context variable for later use. .. templatetag:: render_model_icon .. versionadded:: 3.0 render_model_icon ================= ``render_model_icon`` is intended for use where the relevant object attribute is not available for user interaction (for example, already has a link on it, think of a title in a list of items and the titles are linked to the object detail view); when in edit mode, it renders an **edit** icon, which will trigger the editing change form for the provided fields. .. code-block:: html+django <h3><a href="{{ my_model.get_absolute_url }}">{{ my_model.title }}</a> {% render_model_icon my_model %}</h3> It will render to something like: .. code-block:: html+django <h3> <a href="{{ my_model.get_absolute_url }}">{{ my_model.title }}</a> <div class="cms-plugin cms-plugin-myapp-mymodel-1 cms-render-model-icon"> <!-- The image below is the active area that triggers the frontend editor --> <img src="/static/cms/img/toolbar/render_model_placeholder.png"> </div> </h3> .. note:: Icon and position can be customised via CSS by setting a background to the ``.cms-render-model-icon img`` selector. **Arguments:** * ``instance``: instance of your model in the template * ``edit_fields`` (optional): a comma separated list of fields editable in the popup editor; when template tag is used on a page object this argument accepts the special ``changelist`` value which allows editing the pages **changelist** (items list); * ``language`` (optional): the admin language tab to be linked. Useful only for `django-hvad`_ enabled models. * ``view_url`` (optional): the name of a URL that will be reversed using the instance ``pk`` and the ``language`` as arguments; * ``view_method`` (optional): a method name that will return a URL to a view; the method must accept ``request`` as first parameter. * ``varname`` (optional): the template tag output can be saved as a context variable for later use. .. templatetag:: render_model_add .. versionadded:: 3.0 render_model_add ================ ``render_model_add`` is similar to ``render_model_icon`` but it will enable to create instances of the given instance class; when in edit mode, it renders an **add** icon, which will trigger the editing add form for the provided model. .. code-block:: html+django <h3><a href="{{ my_model.get_absolute_url }}">{{ my_model.title }}</a> {% render_model_add my_model %}</h3> It will render to something like: .. code-block:: html+django <h3> <a href="{{ my_model.get_absolute_url }}">{{ my_model.title }}</a> <div class="cms-plugin cms-plugin-myapp-mymodel-1 cms-render-model-add"> <!-- The image below is the active area that triggers the frontend editor --> <img src="/static/cms/img/toolbar/render_model_placeholder.png"> </div> </h3> .. note:: Icon and position can be customised via CSS by setting a background to the ``.cms-render-model-add img`` selector. **Arguments:** * ``instance``: instance of your model, or model class to be added * ``edit_fields`` (optional): a comma separated list of fields editable in the popup editor; * ``language`` (optional): the admin language tab to be linked. Useful only for `django-hvad`_ enabled models. * ``view_url`` (optional): the name of a url that will be reversed using the instance ``pk`` and the ``language`` as arguments; * ``view_method`` (optional): a method name that will return a URL to a view; the method must accept ``request`` as first parameter. * ``varname`` (optional): the template tag output can be saved as a context variable for later use. ..warning:: If passing a class, instead of an instance, and using ``view_method``, please bear in mind that the method will be called over an **empty instance** of the class, so attributes are all empty, and the instance does not exists on the database. .. _django-hvad: https://github.com/kristianoellegaard/django-hvad .. templatetag:: render_model_add_block .. versionadded:: 3.1 render_model_add_block ====================== ``render_model_add_block`` is similar to ``render_model_add`` but instead of emitting an icon that is linked to the add model form in a modal dialog, it wraps arbitrary markup with the same "link". This allows the developer to create front-end editing experiences better suited to the project. All arguments are identical to ``render_model_add``, but the template tag is used in two parts to wrap the markup that should be wrapped. .. code-block:: html+django {% render_model_add_block my_model_instance %}<div>New Object</div>{% endrender_model_add_block %} It will render to something like: .. code-block:: html+django <div class="cms-plugin cms-plugin-myapp-mymodel-1 cms-render-model-add"> <div>New Object</div> </div> .. warning:: You **must** pass an *instance* of your model as instance parameter. The instance passed could be an existing models instance, or one newly created in your view/plugin. It does not even have to be saved, it is introspected by the template tag to determine the desired model class. **Arguments:** * ``instance``: instance of your model in the template * ``edit_fields`` (optional): a comma separated list of fields editable in the popup editor; * ``language`` (optional): the admin language tab to be linked. Useful only for `django-hvad`_ enabled models. * ``view_url`` (optional): the name of a URL that will be reversed using the instance ``pk`` and the ``language`` as arguments; * ``view_method`` (optional): a method name that will return a URL to a view; the method must accept ``request`` as first parameter. * ``varname`` (optional): the template tag output can be saved as a context variable for later use. .. _django-hvad: https://github.com/kristianoellegaard/django-hvad .. templatetag:: page_language_url page_language_url ================= Returns the URL of the current page in an other language:: {% page_language_url de %} {% page_language_url fr %} {% page_language_url en %} If the current URL has no CMS Page and is handled by a navigation extender and the URL changes based on the language, you will need to set a ``language_changer`` function with the ``set_language_changer`` function in ``menus.utils``. For more information, see :doc:`/topics/i18n`. .. templatetag:: language_chooser language_chooser ================ The ``language_chooser`` template tag will display a language chooser for the current page. You can modify the template in ``menu/language_chooser.html`` or provide your own template if necessary. Example:: {% language_chooser %} or with custom template:: {% language_chooser "myapp/language_chooser.html" %} The language_chooser has three different modes in which it will display the languages you can choose from: "raw" (default), "native", "current" and "short". It can be passed as the last argument to the ``language_chooser tag`` as a string. In "raw" mode, the language will be displayed like its verbose name in the settings. In "native" mode the languages are displayed in their actual language (eg. German will be displayed "Deutsch", Japanese as "日本語" etc). In "current" mode the languages are translated into the current language the user is seeing the site in (eg. if the site is displayed in German, Japanese will be displayed as "Japanisch"). "Short" mode takes the language code (eg. "en") to display. If the current URL has no CMS Page and is handled by a navigation extender and the URL changes based on the language, you will need to set a ``language_changer`` function with the ``set_language_changer`` function in ``menus.utils``. For more information, see :doc:`/topics/i18n`. ********************* Toolbar template tags ********************* .. highlightlang:: html+django The ``cms_toolbar`` template tag is included in the ``cms_tags`` library and will add the required CSS and javascript to the sekizai blocks in the base template. The template tag has to be placed after the ``<body>`` tag and before any ``{% cms_placeholder %}`` occurrences within your HTML. Example:: <body> {% cms_toolbar %} {% placeholder "home" %} ... .. note:: Be aware that you can not surround the cms_toolbar tag with block tags. The toolbar tag will render everything below it to collect all plugins and placeholders, before it renders itself. Block tags interfere with this.
{ "content_hash": "ffed80ca77b1baad0509b74069b1c4ba", "timestamp": "", "source": "github", "line_count": 799, "max_line_length": 191, "avg_line_length": 35.545682102628284, "alnum_prop": 0.6982148515897327, "repo_name": "iddqd1/django-cms", "id": "7a83caabff8525ded5bc33cc605a7ff4914eceea", "size": "28423", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "docs/reference/templatetags.rst", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "128012" }, { "name": "HTML", "bytes": "104983" }, { "name": "JavaScript", "bytes": "665955" }, { "name": "Python", "bytes": "1943557" }, { "name": "XSLT", "bytes": "5917" } ], "symlink_target": "" }
var path = require('path-extra') var express = require('express') var mkpath = require('mkpath') var Colu = require(__dirname + '/../colu.js') var bodyParser = require('body-parser') var jf = require('jsonfile') var hash = require('crypto-hashing') var morgan = require('morgan')('dev') var serverSettings = path.join(path.datadir('colu'), 'settings.json') var settings try { settings = jf.readFileSync(serverSettings) } catch (e) { settings = { colu: { network: 'testnet' }, server: { port: 8081, host: '127.0.0.1' } } var dirname = path.dirname(serverSettings) mkpath.sync(dirname, settings) jf.writeFileSync(serverSettings, settings) } var colu = new Colu(settings.colu) var app = express() app.use(morgan) app.use(function (req, res, next) { res.header('Access-Control-Allow-Origin', '*') res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept') next() }) app.use(bodyParser.json()) app.use(bodyParser.urlencoded({ extended: true })) app.use(function (req, res, next) { if (!settings.selfApiKey) return next() if (hash.sha256(req.headers['x-access-token']) !== settings.selfApiKey) return res.send(401) }) // ////////// Colu Wrappers //////////// app.post('/sendasset', function (req, res, next) { colu.sendAsset(req.body, function (err, result) { if (err) return next(err) res.send(result) }) }) app.post('/issueasset', function (req, res, next) { colu.issueAsset(req.body, function (err, result) { if (err) return next(err) res.send(result) }) }) // ///////////////////////////////////////////////// // Colored Coins End Points /////// app.post('/coloredcoins/issue', function (req, res, next) { colu.coloredCoins.getSendAssetTx(req.body, function (err, result) { if (err) return next(err) res.send(result) }) }) app.post('/coloredcoins/sendasset', function (req, res, next) { var settings = req.body.settings colu.issueAsset(settings, function (err, result) { if (err) return next(err) res.send(result) }) }) app.get('/coloredcoins/addressinfo/:address', function (req, res, next) { colu.coloredCoins.getAddressInfo(req.params.address, function (err, result) { if (err) return next(err) res.send(result) }) }) app.get('/coloredcoins/stakeholders/:assetId/:numConfirmations', function (req, res, next) { colu.coloredCoins.getStakeHolders(req.params.assetId, req.params.numConfirmations || 0, function (err, result) { if (err) return next(err) res.send(result) }) }) app.get('/coloredcoins/assetmetadata/:assetId/:utxo', function (req, res, next) { colu.coloredCoins.getAssetMetadata(req.params.assetId, req.params.utxo, function (err, result) { if (err) return next(err) res.send(result) }) }) app.post('/coloredcoins/assetdata/:assetid/:numconfirmations', function (req, res, next) { var settings = { assetId: req.params.assetid, numConfirmations: req.params.numconfirmations, addresses: req.body.addresses } colu.coloredCoins.getAssetData(settings, function (err, result) { if (err) return next(err) res.send(result) }) }) app.post('/coloredcoins/signtx/:unsignedtx/:privatekey', function (req, res, next) { return res.send(colu.coloredCoins.signTx(req.params.unsignedtx, req.params.privatekey)) }) app.get('/coloredcoins/inputaddresses/:txhex/:network', function (req, res, next) { return res.send(colu.coloredCoins.getInputAddresses(req.params.txhex, req.params.network)) }) // ///////////////////////////////////////////////// // ////////// Utility Functions wrapper //////////// app.post('/broadcast', function (req, res, next) { colu.coloredCoins.broadcastTx(req.body, function (err, result) { if (err) return next(err) res.send(result) }) }) app.post('/signandbroadcast/:txHex/:last_txid/:host', function (req, res, next) { colu.signAndTransmit(req.params.txHex, req.params.last_txid, req.params.host, function (err, result) { if (err) return next(err) res.send(result) }) }) // ///////////////////////////////////////////////// app.get('/hdwallet/address', function (req, res, next) { return res.send(colu.hdwallet.getAddress()) }) app.get('/hdwallet/address/:account/:addressindex', function (req, res, next) { return res.send(colu.hdwallet.getAddress(req.params.account, req.params.addressindex)) }) // ////////// app.use(function (req, res, next) { res.status(404) if (req.accepts('json')) return res.send({ error: 'Not found' }) res.type('txt').send('Not found') }) colu.on('connect', function () { app.listen(settings.server.port, settings.server.host, function () { console.log('server started on port', settings.server.port) }) }) colu.init()
{ "content_hash": "5f46725d91891902527df3753fc5bb32", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 114, "avg_line_length": 29.99367088607595, "alnum_prop": 0.654357459379616, "repo_name": "citizencode/colu-nodejs", "id": "8a174eb3923051563d38ffb607c032da843354b1", "size": "4759", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bin/run.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2180917" } ], "symlink_target": "" }
/* More information about these options at jshint.com/docs/options */ /* globals TestCase, assertEquals, xhrs:true, MockWebSocket, FAKE_WSS_POST_URL, Constants, webSockets:true, FAKE_WSS_URL, WebSocket:true, MockXMLHttpRequest, XMLHttpRequest:true, FAKE_SEND_EXCEPTION */ 'use strict'; var FAKE_MESSAGE = JSON.stringify({ cmd: 'send', msg: JSON.stringify({type: 'bye'}) }); var MockEvent = function(addListener) { this.addListener_ = addListener; }; MockEvent.prototype.addListener = function(callback) { this.addListener_(callback); }; var MockPort = function() { this.onDisconnectCallback_ = null; this.onMessageCallback_ = null; this.onPostMessage = null; this.onDisconnect = new MockEvent(function(callback) { this.onDisconnectCallback_ = callback; }.bind(this)); this.onMessage = new MockEvent(function(callback) { this.onMessageCallback_ = callback; }.bind(this)); }; MockPort.prototype.disconnect = function() { if (this.onDisconnectCallback_) { this.onDisconnectCallback_(); } }; MockPort.prototype.message = function(message) { if (this.onMessageCallback_) { this.onMessageCallback_(message); } }; MockPort.prototype.postMessage = function(message) { if (this.onPostMessage) { this.onPostMessage(message); } }; MockPort.prototype.createWebSocket = function() { assertEquals(0, webSockets.length); this.message({ action: Constants.WS_ACTION, wsAction: Constants.WS_CREATE_ACTION, wssUrl: FAKE_WSS_URL, wssPostUrl: FAKE_WSS_POST_URL }); assertEquals(1, webSockets.length); assertEquals(WebSocket.CONNECTING, this.webSocket_.readyState); }; var BackgroundTest = new TestCase('BackgroundTest'); BackgroundTest.prototype.setUp = function() { webSockets = []; xhrs = []; this.realWebSocket = WebSocket; WebSocket = MockWebSocket; this.mockPort_ = new MockPort(); window.chrome.callOnConnect(this.mockPort_); }; BackgroundTest.prototype.tearDown = function() { WebSocket = this.realWebSocket; }; BackgroundTest.prototype.testCreateWebSocket = function() { this.mockPort_.createWebSocket(); }; BackgroundTest.prototype.testCloseWebSocket = function() { this.mockPort_.createWebSocket(); this.mockPort_.message({ action: Constants.WS_ACTION, wsAction: Constants.WS_CLOSE_ACTION }); assertEquals(WebSocket.CLOSED, this.mockPort_.webSocket_.readyState); }; BackgroundTest.prototype.testSendWebSocket = function() { this.mockPort_.createWebSocket(); this.mockPort_.webSocket_.simulateOpenResult(true); assertEquals(0, this.mockPort_.webSocket_.messages.length); this.mockPort_.message({ action: Constants.WS_ACTION, wsAction: Constants.WS_SEND_ACTION, data: FAKE_MESSAGE }); assertEquals(1, this.mockPort_.webSocket_.messages.length); assertEquals(FAKE_MESSAGE, this.mockPort_.webSocket_.messages[0]); }; BackgroundTest.prototype.testSendWebSocketNotReady = function() { this.mockPort_.createWebSocket(); // Send without socket being in open state. assertEquals(0, this.mockPort_.webSocket_.messages.length); var realXMLHttpRequest = XMLHttpRequest; XMLHttpRequest = MockXMLHttpRequest; this.mockPort_.message({ action: Constants.WS_ACTION, wsAction: Constants.WS_SEND_ACTION, data: FAKE_MESSAGE }); XMLHttpRequest = realXMLHttpRequest; // No messages posted to web socket. assertEquals(0, this.mockPort_.webSocket_.messages.length); // Message sent via xhr instead. assertEquals(1, xhrs.length); assertEquals(2, xhrs[0].readyState); assertEquals(FAKE_WSS_POST_URL, xhrs[0].url); assertEquals('POST', xhrs[0].method); assertEquals(JSON.stringify({type: 'bye'}), xhrs[0].body); }; BackgroundTest.prototype.testSendWebSocketThrows = function() { this.mockPort_.createWebSocket(); this.mockPort_.webSocket_.simulateOpenResult(true); // Set mock web socket to throw exception on send(). this.mockPort_.webSocket_.throwOnSend = true; var message = null; this.mockPort_.onPostMessage = function(msg) { message = msg; }; assertEquals(0, this.mockPort_.webSocket_.messages.length); this.mockPort_.message({ action: Constants.WS_ACTION, wsAction: Constants.WS_SEND_ACTION, data: FAKE_MESSAGE }); assertEquals(0, this.mockPort_.webSocket_.messages.length); this.checkMessage_(message, Constants.WS_EVENT_SENDERROR, FAKE_SEND_EXCEPTION); }; BackgroundTest.prototype.checkMessage_ = function(m, wsEvent, data) { assertEquals(Constants.WS_ACTION, m.action); assertEquals(Constants.EVENT_ACTION, m.wsAction); assertEquals(wsEvent, m.wsEvent); if (data) { assertEquals(data, m.data); } }; BackgroundTest.prototype.testWebSocketEvents = function() { this.mockPort_.createWebSocket(); var message = null; this.mockPort_.onPostMessage = function(msg) { message = msg; }; var ws = this.mockPort_.webSocket_; ws.onopen(); this.checkMessage_(message, Constants.WS_EVENT_ONOPEN); ws.onerror(); this.checkMessage_(message, Constants.WS_EVENT_ONERROR); ws.onclose(FAKE_MESSAGE); this.checkMessage_(message, Constants.WS_EVENT_ONCLOSE, FAKE_MESSAGE); ws.onmessage(FAKE_MESSAGE); this.checkMessage_(message, Constants.WS_EVENT_ONMESSAGE, FAKE_MESSAGE); }; BackgroundTest.prototype.testDisconnectClosesWebSocket = function() { // Disconnect should cause web socket to be closed. var socketClosed = false; this.mockPort_.webSocket_ = { close: function() { socketClosed = true; } }; this.mockPort_.disconnect(); assertEquals(true, socketClosed); }; BackgroundTest.prototype.testQueueMessages = function() { assertEquals(null, this.mockPort_.queue_); this.mockPort_.message({ action: Constants.QUEUEADD_ACTION, queueMessage: { action: Constants.XHR_ACTION, method: 'POST', url: '/go/home', body: null } }); assertEquals(1, this.mockPort_.queue_.length); this.mockPort_.message({ action: Constants.QUEUEADD_ACTION, queueMessage: { action: Constants.WS_ACTION, wsAction: Constants.WS_SEND_ACTION, data: JSON.stringify({ cmd: 'send', msg: JSON.stringify({type: 'bye'}) }) } }); assertEquals(2, this.mockPort_.queue_.length); this.mockPort_.message({action: Constants.QUEUECLEAR_ACTION}); assertEquals([], this.mockPort_.queue_); };
{ "content_hash": "960384915582f8edd81c013731a6ab74", "timestamp": "", "source": "github", "line_count": 242, "max_line_length": 80, "avg_line_length": 26.41322314049587, "alnum_prop": 0.7076032540675845, "repo_name": "jiayliu/apprtc", "id": "737443200e6ba0d4f30fa4b948e1eef829ad91a2", "size": "6614", "binary": false, "copies": "21", "ref": "refs/heads/master", "path": "src/web_app/js/background_test.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "6265" }, { "name": "Go", "bytes": "38374" }, { "name": "HTML", "bytes": "16182" }, { "name": "JavaScript", "bytes": "201169" }, { "name": "Python", "bytes": "97931" }, { "name": "Shell", "bytes": "1340" } ], "symlink_target": "" }
var searchData= [ ['vignetteandchromaticaberration',['VignetteAndChromaticAberration',['../class_unity_standard_assets_1_1_image_effects_1_1_vignette_and_chromatic_aberration.html',1,'UnityStandardAssets::ImageEffects']]], ['vortex',['Vortex',['../class_unity_standard_assets_1_1_image_effects_1_1_vortex.html',1,'UnityStandardAssets::ImageEffects']]] ];
{ "content_hash": "4a2673927dd2b151133eb6ff9d0ce952", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 206, "avg_line_length": 71.8, "alnum_prop": 0.7604456824512534, "repo_name": "kesumu/dokidoki", "id": "78489a96a3bd9b7727d18017f4371ac69a969a5c", "size": "359", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/api/html/search/classes_e.js", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "374881" }, { "name": "GLSL", "bytes": "33375" }, { "name": "Smalltalk", "bytes": "20286" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('acelibraryapp', '0008_auto_20161120_2040'), ] operations = [ migrations.AlterField( model_name='resources', name='URL', field=models.CharField(max_length=150), ), ]
{ "content_hash": "dc56dcd9cde89ba55ff28198e46b5133", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 53, "avg_line_length": 21.38888888888889, "alnum_prop": 0.5974025974025974, "repo_name": "ashishpahwa7/Library-Portal", "id": "cfc66d055cf0e10bbca8b146433da2ca26fd69c4", "size": "458", "binary": false, "copies": "2", "ref": "refs/heads/development", "path": "acelibraryapp/migrations/0009_auto_20161120_2110.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16952" }, { "name": "HTML", "bytes": "32528" }, { "name": "JavaScript", "bytes": "978" }, { "name": "Python", "bytes": "36225" } ], "symlink_target": "" }
function (elementProxy, win) { 'use strict'; var w = win || window; // window injected for testing var ElementProxy = elementProxy.ElementProxy; var proxyCache = {}; function releaseOrphanProxies () { for(var proxy in proxyCache) { if(!proxyCache[proxy].el.checkNodeIsInDom()) { proxyCache[proxy].el.release(); } } } function bakeOff() { for(var proxy in proxyCache) { if(proxyCache[proxy].el.isDirty()) { proxyCache[proxy].el.bake(); } } } function getReferenceCount(proxy) { var id = proxy.id; if (!id || !proxyCache[id]) { return 0; } else { return proxyCache[id].refCount; } } function release(prx) { var proxy = proxyCache[prx.id]; if(proxy) { --proxy.refCount; } if(proxy && proxy.refCount <= 0 ) { delete proxyCache[prx.id]; } // if there is no proxy in the cache // this is a no-op } function elementProxyFactory(el) { var key, newProxy; if(el !== w) { $A.assert(el && el.nodeType && (el.nodeType !== 1 || el.nodeType !== 11), "Element Proxy requires an element"); } //validate node if(el !== w && !el.id) { el.id = w.$A.getComponent(el).getGlobalId(); } if(el === w) { key = 'window'; } else { key = el.id; } if(proxyCache[key]) { proxyCache[key].refCount++; return proxyCache[key].el; } else { newProxy = new ElementProxy(el, key); newProxy.setReleaseCallback(release, newProxy); proxyCache[key] = { el: newProxy, refCount : 1 }; } // run GC w.setTimeout(releaseOrphanProxies, 0); return proxyCache[key].el; } function reset(){ proxyCache = {}; } return { _proxyCache: proxyCache, getReferenceCount: getReferenceCount, getElement : elementProxyFactory, bakeOff : bakeOff, resetFactory: reset, release: release }; }
{ "content_hash": "241ea7c4832298fdc9de67b061776958", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 123, "avg_line_length": 23.632653061224488, "alnum_prop": 0.4853195164075993, "repo_name": "lcnbala/aura", "id": "cfc1139f67300ec1fa48e51d5686e494f248a5b0", "size": "2927", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "aura-components/src/main/components/ui/panelPositioningLib/elementProxyFactory.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "884345" }, { "name": "GAP", "bytes": "10087" }, { "name": "HTML", "bytes": "2899877" }, { "name": "Java", "bytes": "7844855" }, { "name": "JavaScript", "bytes": "16276400" }, { "name": "PHP", "bytes": "3345441" }, { "name": "Python", "bytes": "9744" }, { "name": "Shell", "bytes": "19650" }, { "name": "XSLT", "bytes": "2725" } ], "symlink_target": "" }
from __future__ import absolute_import from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module from copy import deepcopy import logging import random import string # Import Salt Testing libs from salttesting.unit import skipIf, TestCase from salttesting.mock import ( MagicMock, NO_MOCK, NO_MOCK_REASON, patch ) from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import Salt libs import salt.config import salt.loader from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin # pylint: disable=import-error,no-name-in-module,unused-import from unit.modules.boto_s3_bucket_test import BotoS3BucketTestCaseMixin # Import 3rd-party libs try: import boto import boto3 from botocore.exceptions import ClientError HAS_BOTO = True except ImportError: HAS_BOTO = False # pylint: enable=import-error,no-name-in-module,unused-import # the boto_s3_bucket module relies on the connect_to_region() method # which was added in boto 2.8.0 # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 required_boto3_version = '1.2.1' log = logging.getLogger(__name__) opts = salt.config.DEFAULT_MINION_OPTS context = {} utils = salt.loader.utils(opts, whitelist=['boto3'], context=context) serializers = salt.loader.serializers(opts) funcs = salt.loader.minion_mods(opts, context=context, utils=utils, whitelist=['boto_s3_bucket']) salt_states = salt.loader.states(opts=opts, functions=funcs, utils=utils, whitelist=['boto_s3_bucket'], serializers=serializers) def _has_required_boto(): ''' Returns True/False boolean depending on if Boto is installed and correct version. ''' if not HAS_BOTO: return False elif LooseVersion(boto3.__version__) < LooseVersion(required_boto3_version): return False else: return True if _has_required_boto(): region = 'us-east-1' access_key = 'GKTADJGHEIQSXMKKRBJ08H' secret_key = 'askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs' conn_parameters = {'region': region, 'key': access_key, 'keyid': secret_key, 'profile': {}} error_message = 'An error occurred (101) when calling the {0} operation: Test-defined error' not_found_error = ClientError({ 'Error': { 'Code': '404', 'Message': "Test-defined error" } }, 'msg') error_content = { 'Error': { 'Code': 101, 'Message': "Test-defined error" } } list_ret = { 'Buckets': [{ 'Name': 'mybucket', 'CreationDate': None }], 'Owner': { 'Type': 'CanonicalUser', 'DisplayName': 'testuser', 'ID': '111111222222' }, 'ResponseMetadata': {'Key': 'Value'} } config_in = { 'LocationConstraint': 'EU', 'ACL': { 'ACL': 'public-read' }, 'CORSRules': [{ 'AllowedMethods': ["GET"], 'AllowedOrigins': ["*"], }], 'LifecycleConfiguration': [{ 'Expiration': { 'Days': 1 }, 'Prefix': 'prefix', 'Status': 'Enabled', 'ID': 'asdfghjklpoiuytrewq' }], 'Logging': { 'TargetBucket': 'my-bucket', 'TargetPrefix': 'prefix' }, 'NotificationConfiguration': { 'LambdaFunctionConfigurations': [{ 'LambdaFunctionArn': 'arn:aws:lambda:us-east-1:111111222222:function:my-function', 'Id': 'zxcvbnmlkjhgfdsa', 'Events': ["s3:ObjectCreated:*"], 'Filter': { 'Key': { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'string' }] } } }] }, 'Policy': { 'Version': "2012-10-17", 'Statement': [{ 'Sid': "", 'Effect': "Allow", 'Principal': { 'AWS': "arn:aws:iam::111111222222:root" }, 'Action': "s3:PutObject", 'Resource': "arn:aws:s3:::my-bucket/*" }] }, 'Replication': { 'Role': 'arn:aws:iam::11111222222:my-role', 'Rules': [{ 'ID': "r1", 'Prefix': "prefix", 'Status': "Enabled", 'Destination': { 'Bucket': "arn:aws:s3:::my-bucket" } }] }, 'RequestPayment': { 'Payer': 'Requester' }, 'Tagging': { 'a': 'b', 'c': 'd' }, 'Versioning': { 'Status': 'Enabled' }, 'Website': { 'ErrorDocument': { 'Key': 'error.html' }, 'IndexDocument': { 'Suffix': 'index.html' } } } config_ret = { 'get_bucket_acl': { 'Grants': [{ 'Grantee': { 'DisplayName': 'testuser', 'ID': '111111222222' }, 'Permission': 'FULL_CONTROL' }, { 'Grantee': { 'URI': 'http://acs.amazonaws.com/groups/global/AllUsers' }, 'Permission': 'READ' }], 'Owner': { 'DisplayName': 'testuser', 'ID': '111111222222' } }, 'get_bucket_cors': { 'CORSRules': [{ 'AllowedMethods': ["GET"], 'AllowedOrigins': ["*"], }] }, 'get_bucket_lifecycle_configuration': { 'Rules': [{ 'Expiration': { 'Days': 1 }, 'Prefix': 'prefix', 'Status': 'Enabled', 'ID': 'asdfghjklpoiuytrewq' }] }, 'get_bucket_location': { 'LocationConstraint': 'EU' }, 'get_bucket_logging': { 'LoggingEnabled': { 'TargetBucket': 'my-bucket', 'TargetPrefix': 'prefix' } }, 'get_bucket_notification_configuration': { 'LambdaFunctionConfigurations': [{ 'LambdaFunctionArn': 'arn:aws:lambda:us-east-1:111111222222:function:my-function', 'Id': 'zxcvbnmlkjhgfdsa', 'Events': ["s3:ObjectCreated:*"], 'Filter': { 'Key': { 'FilterRules': [{ 'Name': 'prefix', 'Value': 'string' }] } } }] }, 'get_bucket_policy': { 'Policy': '{"Version":"2012-10-17","Statement":[{"Sid":"","Effect":"Allow","Principal":{"AWS":"arn:aws:iam::111111222222:root"},"Action":"s3:PutObject","Resource":"arn:aws:s3:::my-bucket/*"}]}' }, 'get_bucket_replication': { 'ReplicationConfiguration': { 'Role': 'arn:aws:iam::11111222222:my-role', 'Rules': [{ 'ID': "r1", 'Prefix': "prefix", 'Status': "Enabled", 'Destination': { 'Bucket': "arn:aws:s3:::my-bucket" } }] } }, 'get_bucket_request_payment': {'Payer': 'Requester'}, 'get_bucket_tagging': { 'TagSet': [{ 'Key': 'c', 'Value': 'd' }, { 'Key': 'a', 'Value': 'b', }] }, 'get_bucket_versioning': { 'Status': 'Enabled' }, 'get_bucket_website': { 'ErrorDocument': { 'Key': 'error.html' }, 'IndexDocument': { 'Suffix': 'index.html' } } } bucket_ret = { 'Location': 'EU' } class BotoS3BucketStateTestCaseBase(TestCase): conn = None # Set up MagicMock to replace the boto3 session def setUp(self): context.clear() # connections keep getting cached from prior tests, can't find the # correct context object to clear it. So randomize the cache key, to prevent any # cache hits conn_parameters['key'] = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(50)) self.patcher = patch('boto3.session.Session') self.addCleanup(self.patcher.stop) mock_session = self.patcher.start() session_instance = mock_session.return_value self.conn = MagicMock() session_instance.client.return_value = self.conn @skipIf(HAS_BOTO is False, 'The boto module must be installed.') @skipIf(_has_required_boto() is False, 'The boto3 module must be greater than' ' or equal to version {0}' .format(required_boto3_version)) @skipIf(NO_MOCK, NO_MOCK_REASON) class BotoS3BucketTestCase(BotoS3BucketStateTestCaseBase, BotoS3BucketTestCaseMixin): ''' TestCase for salt.modules.boto_s3_bucket state.module ''' def test_present_when_bucket_does_not_exist(self): ''' Tests present on a bucket that does not exist. ''' self.conn.head_bucket.side_effect = [not_found_error, None] self.conn.list_buckets.return_value = deepcopy(list_ret) self.conn.create_bucket.return_value = bucket_ret for key, value in config_ret.iteritems(): getattr(self.conn, key).return_value = deepcopy(value) with patch.dict(funcs, {'boto_iam.get_account_id': MagicMock(return_value='111111222222')}): result = salt_states['boto_s3_bucket.present']( 'bucket present', Bucket='testbucket', **config_in ) self.assertTrue(result['result']) self.assertEqual(result['changes']['new']['bucket']['Location'], config_ret['get_bucket_location']) def test_present_when_bucket_exists_no_mods(self): self.conn.list_buckets.return_value = deepcopy(list_ret) for key, value in config_ret.iteritems(): getattr(self.conn, key).return_value = deepcopy(value) with patch.dict(funcs, {'boto_iam.get_account_id': MagicMock(return_value='111111222222')}): result = salt_states['boto_s3_bucket.present']( 'bucket present', Bucket='testbucket', **config_in ) self.assertTrue(result['result']) self.assertEqual(result['changes'], {}) def test_present_when_bucket_exists_all_mods(self): self.conn.list_buckets.return_value = deepcopy(list_ret) for key, value in config_ret.iteritems(): getattr(self.conn, key).return_value = deepcopy(value) with patch.dict(funcs, {'boto_iam.get_account_id': MagicMock(return_value='111111222222')}): result = salt_states['boto_s3_bucket.present']( 'bucket present', Bucket='testbucket', LocationConstraint=config_in['LocationConstraint'] ) self.assertTrue(result['result']) self.assertNotEqual(result['changes'], {}) def test_present_with_failure(self): self.conn.head_bucket.side_effect = [not_found_error, None] self.conn.list_buckets.return_value = deepcopy(list_ret) self.conn.create_bucket.side_effect = ClientError(error_content, 'create_bucket') with patch.dict(funcs, {'boto_iam.get_account_id': MagicMock(return_value='111111222222')}): result = salt_states['boto_s3_bucket.present']( 'bucket present', Bucket='testbucket', **config_in ) self.assertFalse(result['result']) self.assertTrue('An error occurred' in result['comment']) def test_absent_when_bucket_does_not_exist(self): ''' Tests absent on a bucket that does not exist. ''' self.conn.head_bucket.side_effect = [not_found_error, None] result = salt_states['boto_s3_bucket.absent']('test', 'mybucket') self.assertTrue(result['result']) self.assertEqual(result['changes'], {}) def test_absent_when_bucket_exists(self): result = salt_states['boto_s3_bucket.absent']('test', 'testbucket') self.assertTrue(result['result']) self.assertEqual(result['changes']['new']['bucket'], None) def test_absent_with_failure(self): self.conn.delete_bucket.side_effect = ClientError(error_content, 'delete_bucket') result = salt_states['boto_s3_bucket.absent']('test', 'testbucket') self.assertFalse(result['result']) self.assertTrue('An error occurred' in result['comment'])
{ "content_hash": "75f50bf0211fa755afd4a1ae6ff4d19c", "timestamp": "", "source": "github", "line_count": 385, "max_line_length": 199, "avg_line_length": 34.52987012987013, "alnum_prop": 0.5130886114036407, "repo_name": "stephane-martin/salt-debian-packaging", "id": "4049e9ae20c79dc954af4e0b1b9eee5a02c04ade", "size": "13340", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "salt-2016.3.2/tests/unit/states/boto_s3_bucket_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "13798" }, { "name": "C", "bytes": "986" }, { "name": "Groff", "bytes": "13634346" }, { "name": "HTML", "bytes": "39558" }, { "name": "Makefile", "bytes": "20902" }, { "name": "NSIS", "bytes": "22316" }, { "name": "PowerShell", "bytes": "38719" }, { "name": "Python", "bytes": "40857506" }, { "name": "SaltStack", "bytes": "58278" }, { "name": "Scheme", "bytes": "1790" }, { "name": "Shell", "bytes": "829927" }, { "name": "Tcl", "bytes": "6532" }, { "name": "TeX", "bytes": "11632" } ], "symlink_target": "" }
""" Runs the SPARQL 1.1 test suite from. """ from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources from test.utils.iri import URIMapper from test.utils.sparql_checker import ( SKIP_TYPES, SPARQLEntry, check_entry, ctx_configure_rdflib, ) from typing import Generator import pytest from pytest import MonkeyPatch REMOTE_BASE_IRI = "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/" LOCAL_BASE_DIR = TEST_DATA_DIR / "suites/w3c/sparql11/" MAPPER = URIMapper.from_mappings( (REMOTE_BASE_IRI, ensure_suffix(LOCAL_BASE_DIR.as_uri(), "/")), ) MARK_DICT: MarksDictType = { f"{REMOTE_BASE_IRI}aggregates/manifest#agg-err-01": pytest.mark.xfail( reason="Error in AVG should return no binding but it does." ), f"{REMOTE_BASE_IRI}aggregates/manifest#agg08": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}aggregates/manifest#agg09": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}aggregates/manifest#agg10": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}aggregates/manifest#agg11": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}aggregates/manifest#agg12": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}delete/manifest#dawg-delete-using-02a": pytest.mark.xfail( reason="known issue" ), f"{REMOTE_BASE_IRI}delete/manifest#dawg-delete-using-06a": pytest.mark.xfail( reason="known issue" ), f"{REMOTE_BASE_IRI}entailment/manifest#paper-sparqldl-Q1-rdfs": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#paper-sparqldl-Q1": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#paper-sparqldl-Q2": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#paper-sparqldl-Q3": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#paper-sparqldl-Q4": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent10": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent3": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent4": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent5": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent6": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent7": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent8": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#parent9": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdf01": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs01": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs02": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs03": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs04": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs05": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs06": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs07": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs09": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs10": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#rdfs11": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple1": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple2": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple3": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple4": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple5": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple6": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple7": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#simple8": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-02": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-03": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-10": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-11": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-12": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}entailment/manifest#sparqldl-13": pytest.mark.xfail( reason="entailment not implemented" ), f"{REMOTE_BASE_IRI}functions/manifest#strdt01": pytest.mark.xfail( reason="Reason for test failure is not clear." ), f"{REMOTE_BASE_IRI}functions/manifest#strdt03": pytest.mark.xfail( reason="Reason for test failure is not clear." ), f"{REMOTE_BASE_IRI}grouping/manifest#group06": pytest.mark.xfail( reason="Accepts invalid query." ), f"{REMOTE_BASE_IRI}grouping/manifest#group07": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}property-path/manifest#pp37": pytest.mark.xfail( reason="RDFLib produces one extra row" ), f"{REMOTE_BASE_IRI}service/manifest#service1": pytest.mark.skip( reason="need custom handling" ), f"{REMOTE_BASE_IRI}service/manifest#service2": pytest.mark.skip( reason="need custom handling" ), f"{REMOTE_BASE_IRI}service/manifest#service3": pytest.mark.skip( reason="need custom handling" ), f"{REMOTE_BASE_IRI}service/manifest#service4a": pytest.mark.skip( reason="need custom handling" ), f"{REMOTE_BASE_IRI}service/manifest#service5": pytest.mark.skip( reason="test not supported" ), f"{REMOTE_BASE_IRI}service/manifest#service6": pytest.mark.skip( reason="need custom handling" ), f"{REMOTE_BASE_IRI}service/manifest#service7": pytest.mark.skip( reason="test not supported" ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_43": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_44": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_45": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_60": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_61a": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_62a": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-query/manifest#test_65": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_43": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_44": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_50": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_51": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_52": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), f"{REMOTE_BASE_IRI}syntax-update-1/manifest#test_54": pytest.mark.xfail( reason="Parses sucessfully instead of failing." ), } @pytest.fixture(scope="module", autouse=True) def configure_rdflib() -> Generator[None, None, None]: with ctx_configure_rdflib(): yield None @pytest.mark.parametrize( ["manifest_entry"], params_from_sources( MAPPER, SPARQLEntry, LOCAL_BASE_DIR / "manifest-all.ttl", mark_dict=MARK_DICT, markers=( lambda entry: pytest.mark.skip(reason="tester not implemented") if entry.type in SKIP_TYPES else None, ), report_prefix="rdflib_w3c_sparql11", ), ) def test_entry_sparql11(monkeypatch: MonkeyPatch, manifest_entry: SPARQLEntry) -> None: check_entry(monkeypatch, manifest_entry)
{ "content_hash": "3bce9e6b5bd530fbd8aa7a809d8709d4", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 87, "avg_line_length": 39.3041825095057, "alnum_prop": 0.6775660249588855, "repo_name": "RDFLib/rdflib", "id": "6bfcb31f1ba6a4c9eb4b8734d3dbe2588d846760", "size": "10337", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "test/test_w3c_spec/test_sparql11_w3c.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Dockerfile", "bytes": "635" }, { "name": "HTML", "bytes": "41303" }, { "name": "Python", "bytes": "2828721" }, { "name": "Ruby", "bytes": "31777" }, { "name": "Shell", "bytes": "6030" }, { "name": "XSLT", "bytes": "1588" } ], "symlink_target": "" }
package org.gradle.api.internal.artifacts.ivyservice.resolveengine.result; import org.gradle.api.artifacts.component.ComponentSelector; import org.gradle.api.artifacts.result.ResolutionResult; import org.gradle.api.artifacts.result.ResolvedComponentResult; import org.gradle.api.internal.artifacts.ImmutableModuleIdentifierFactory; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.ComponentResult; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyGraphComponent; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyGraphEdge; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyGraphNode; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyGraphSelector; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyGraphVisitor; import org.gradle.api.internal.artifacts.ivyservice.resolveengine.graph.DependencyResult; import org.gradle.api.internal.artifacts.result.DefaultResolutionResult; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; import org.gradle.cache.internal.BinaryStore; import org.gradle.cache.internal.Store; import org.gradle.internal.Factory; import org.gradle.internal.resolve.ModuleVersionResolveException; import org.gradle.internal.serialize.Decoder; import org.gradle.internal.serialize.Encoder; import org.gradle.internal.time.Time; import org.gradle.internal.time.Timer; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.gradle.internal.UncheckedException.throwAsUncheckedException; public class StreamingResolutionResultBuilder implements DependencyGraphVisitor { private final static byte ROOT = 1; private final static byte COMPONENT = 2; private final static byte SELECTOR = 4; private final static byte DEPENDENCY = 5; private final Map<ComponentSelector, ModuleVersionResolveException> failures = new HashMap<ComponentSelector, ModuleVersionResolveException>(); private final BinaryStore store; private final ComponentResultSerializer componentResultSerializer; private final Store<ResolvedComponentResult> cache; private final ComponentSelectorSerializer componentSelectorSerializer = new ComponentSelectorSerializer(); private final DependencyResultSerializer dependencyResultSerializer = new DependencyResultSerializer(); private final Set<Long> visitedComponents = new HashSet<Long>(); public StreamingResolutionResultBuilder(BinaryStore store, Store<ResolvedComponentResult> cache, ImmutableModuleIdentifierFactory moduleIdentifierFactory) { this.componentResultSerializer = new ComponentResultSerializer(moduleIdentifierFactory); this.store = store; this.cache = cache; } public ResolutionResult complete() { BinaryStore.BinaryData data = store.done(); RootFactory rootSource = new RootFactory(data, failures, cache, componentSelectorSerializer, dependencyResultSerializer, componentResultSerializer); return new DefaultResolutionResult(rootSource); } @Override public void start(final DependencyGraphNode root) { } @Override public void finish(final DependencyGraphNode root) { store.write(new BinaryStore.WriteAction() { public void write(Encoder encoder) throws IOException { encoder.writeByte(ROOT); encoder.writeSmallLong(root.getOwner().getResultId()); } }); } @Override public void visitNode(DependencyGraphNode node) { final DependencyGraphComponent component = node.getOwner(); if (visitedComponents.add(component.getResultId())) { store.write(new BinaryStore.WriteAction() { public void write(Encoder encoder) throws IOException { encoder.writeByte(COMPONENT); componentResultSerializer.write(encoder, component); } }); } } @Override public void visitSelector(final DependencyGraphSelector selector) { store.write(new BinaryStore.WriteAction() { @Override public void write(Encoder encoder) throws IOException { encoder.writeByte(SELECTOR); encoder.writeSmallLong(selector.getResultId()); componentSelectorSerializer.write(encoder, selector.getRequested()); } }); } @Override public void visitEdges(DependencyGraphNode node) { final Long fromComponent = node.getOwner().getResultId(); final Set<? extends DependencyGraphEdge> dependencies = node.getOutgoingEdges(); if (!dependencies.isEmpty()) { store.write(new BinaryStore.WriteAction() { public void write(Encoder encoder) throws IOException { encoder.writeByte(DEPENDENCY); encoder.writeSmallLong(fromComponent); encoder.writeSmallInt(dependencies.size()); for (DependencyGraphEdge dependency : dependencies) { dependencyResultSerializer.write(encoder, dependency); if (dependency.getFailure() != null) { //by keying the failures only by 'requested' we lose some precision //at edge case we'll lose info about a different exception if we have different failure for the same requested version failures.put(dependency.getRequested(), dependency.getFailure()); } } } }); } } private static class RootFactory implements Factory<ResolvedComponentResult> { private final static Logger LOG = Logging.getLogger(RootFactory.class); private final ComponentResultSerializer componentResultSerializer; private final BinaryStore.BinaryData data; private final Map<ComponentSelector, ModuleVersionResolveException> failures; private final Store<ResolvedComponentResult> cache; private final Object lock = new Object(); private final ComponentSelectorSerializer componentSelectorSerializer; private final DependencyResultSerializer dependencyResultSerializer; RootFactory(BinaryStore.BinaryData data, Map<ComponentSelector, ModuleVersionResolveException> failures, Store<ResolvedComponentResult> cache, ComponentSelectorSerializer componentSelectorSerializer, DependencyResultSerializer dependencyResultSerializer, ComponentResultSerializer componentResultSerializer) { this.data = data; this.failures = failures; this.cache = cache; this.componentResultSerializer = componentResultSerializer; this.componentSelectorSerializer = componentSelectorSerializer; this.dependencyResultSerializer = dependencyResultSerializer; } public ResolvedComponentResult create() { synchronized (lock) { return cache.load(new Factory<ResolvedComponentResult>() { public ResolvedComponentResult create() { try { return data.read(new BinaryStore.ReadAction<ResolvedComponentResult>() { public ResolvedComponentResult read(Decoder decoder) throws IOException { return deserialize(decoder); } }); } finally { try { data.close(); } catch (IOException e) { throw throwAsUncheckedException(e); } } } }); } } private ResolvedComponentResult deserialize(Decoder decoder) { int valuesRead = 0; byte type = -1; Timer clock = Time.startTimer(); try { DefaultResolutionResultBuilder builder = new DefaultResolutionResultBuilder(); Map<Long, ComponentSelector> selectors = new HashMap<Long, ComponentSelector>(); while (true) { type = decoder.readByte(); valuesRead++; switch (type) { case ROOT: // Last entry, complete the result Long rootId = decoder.readSmallLong(); ResolvedComponentResult root = builder.complete(rootId).getRoot(); LOG.debug("Loaded resolution results ({}) from {}", clock.getElapsed(), data); return root; case COMPONENT: ComponentResult component = componentResultSerializer.read(decoder); builder.visitComponent(component); break; case SELECTOR: Long id = decoder.readSmallLong(); ComponentSelector selector = componentSelectorSerializer.read(decoder); selectors.put(id, selector); break; case DEPENDENCY: Long fromId = decoder.readSmallLong(); int size = decoder.readSmallInt(); List<DependencyResult> deps = new ArrayList<DependencyResult>(size); for (int i = 0; i < size; i++) { deps.add(dependencyResultSerializer.read(decoder, selectors, failures)); } builder.visitOutgoingEdges(fromId, deps); break; default: throw new IOException("Unknown value type read from stream: " + type); } } } catch (IOException e) { throw new RuntimeException("Problems loading the resolution results (" + clock.getElapsed() + "). " + "Read " + valuesRead + " values, last was: " + type, e); } } } }
{ "content_hash": "33b49d084ecc7f7a9d84fb1c8dde1eab", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 317, "avg_line_length": 49.345794392523366, "alnum_prop": 0.6321022727272727, "repo_name": "gstevey/gradle", "id": "48a48f5618a6e166dda3d11218e465fffca74bb0", "size": "11175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "subprojects/dependency-management/src/main/java/org/gradle/api/internal/artifacts/ivyservice/resolveengine/result/StreamingResolutionResultBuilder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "277" }, { "name": "Brainfuck", "bytes": "54" }, { "name": "C", "bytes": "98528" }, { "name": "C++", "bytes": "1806123" }, { "name": "CSS", "bytes": "47413" }, { "name": "CoffeeScript", "bytes": "620" }, { "name": "GAP", "bytes": "212" }, { "name": "Gherkin", "bytes": "191" }, { "name": "Groovy", "bytes": "18963577" }, { "name": "HTML", "bytes": "28452" }, { "name": "Java", "bytes": "19713820" }, { "name": "JavaScript", "bytes": "204280" }, { "name": "Kotlin", "bytes": "4763" }, { "name": "Objective-C", "bytes": "652" }, { "name": "Objective-C++", "bytes": "441" }, { "name": "Python", "bytes": "57" }, { "name": "Ruby", "bytes": "1087" }, { "name": "Scala", "bytes": "23713" }, { "name": "Shell", "bytes": "6858" }, { "name": "XSLT", "bytes": "35797" } ], "symlink_target": "" }
namespace Google.Cloud.Video.Stitcher.V1.Snippets { // [START videostitcher_v1_generated_VideoStitcherService_CreateVodSession_async_flattened_resourceNames] using Google.Api.Gax.ResourceNames; using Google.Cloud.Video.Stitcher.V1; using System.Threading.Tasks; public sealed partial class GeneratedVideoStitcherServiceClientSnippets { /// <summary>Snippet for CreateVodSessionAsync</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public async Task CreateVodSessionResourceNamesAsync() { // Create client VideoStitcherServiceClient videoStitcherServiceClient = await VideoStitcherServiceClient.CreateAsync(); // Initialize request argument(s) LocationName parent = LocationName.FromProjectLocation("[PROJECT]", "[LOCATION]"); VodSession vodSession = new VodSession(); // Make the request VodSession response = await videoStitcherServiceClient.CreateVodSessionAsync(parent, vodSession); } } // [END videostitcher_v1_generated_VideoStitcherService_CreateVodSession_async_flattened_resourceNames] }
{ "content_hash": "7d5f1cd521affb2b90fa1fc13e5d9613", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 115, "avg_line_length": 48.592592592592595, "alnum_prop": 0.7088414634146342, "repo_name": "jskeet/gcloud-dotnet", "id": "b487042fb808a081534b16272e8463c2c2e18eb6", "size": "1934", "binary": false, "copies": "2", "ref": "refs/heads/bq-migration", "path": "apis/Google.Cloud.Video.Stitcher.V1/Google.Cloud.Video.Stitcher.V1.GeneratedSnippets/VideoStitcherServiceClient.CreateVodSessionResourceNamesAsyncSnippet.g.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1725" }, { "name": "C#", "bytes": "1829733" } ], "symlink_target": "" }
namespace NBug.Core.UI.WinForms { using NBug.Core.UI.WinForms.Panels; partial class Full { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(Full)); this.mainTabs = new System.Windows.Forms.TabControl(); this.generalTabPage = new System.Windows.Forms.TabPage(); this.warningLabel = new System.Windows.Forms.Label(); this.exceptionTypeLabel = new System.Windows.Forms.Label(); this.exceptionTextBox = new System.Windows.Forms.TextBox(); this.descriptionTextBox = new System.Windows.Forms.TextBox(); this.errorDescriptionLabel = new System.Windows.Forms.Label(); this.clrTextBox = new System.Windows.Forms.TextBox(); this.clrLabel = new System.Windows.Forms.Label(); this.dateTimeTextBox = new System.Windows.Forms.TextBox(); this.dateTimeLabel = new System.Windows.Forms.Label(); this.nbugTextBox = new System.Windows.Forms.TextBox(); this.nbugLabel = new System.Windows.Forms.Label(); this.applicationTextBox = new System.Windows.Forms.TextBox(); this.applicationLabel = new System.Windows.Forms.Label(); this.targetSiteTextBox = new System.Windows.Forms.TextBox(); this.targetSiteLabel = new System.Windows.Forms.Label(); this.exceptionMessageTextBox = new System.Windows.Forms.TextBox(); this.warningPictureBox = new System.Windows.Forms.PictureBox(); this.exceptionTabPage = new System.Windows.Forms.TabPage(); this.exceptionDetails = new NBug.Core.UI.WinForms.Panels.ExceptionDetails(); this.reportContentsTabPage = new System.Windows.Forms.TabPage(); this.reportPreviewTextBox = new System.Windows.Forms.TextBox(); this.previewLabel = new System.Windows.Forms.Label(); this.contentsLabel = new System.Windows.Forms.Label(); this.reportContentsListView = new System.Windows.Forms.ListView(); this.nameColumnHeader = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); this.descriptionColumnHeader = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); this.sizeColumnHeader = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); this.sendAndQuitButton = new System.Windows.Forms.Button(); this.quitButton = new System.Windows.Forms.Button(); this.toolTip = new System.Windows.Forms.ToolTip(this.components); this.mainTabs.SuspendLayout(); this.generalTabPage.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.warningPictureBox)).BeginInit(); this.exceptionTabPage.SuspendLayout(); this.reportContentsTabPage.SuspendLayout(); this.SuspendLayout(); // // mainTabs // this.mainTabs.Controls.Add(this.generalTabPage); this.mainTabs.Controls.Add(this.exceptionTabPage); this.mainTabs.Controls.Add(this.reportContentsTabPage); this.mainTabs.Location = new System.Drawing.Point(9, 6); this.mainTabs.Margin = new System.Windows.Forms.Padding(0); this.mainTabs.Name = "mainTabs"; this.mainTabs.SelectedIndex = 0; this.mainTabs.Size = new System.Drawing.Size(475, 361); this.mainTabs.TabIndex = 0; // // generalTabPage // this.generalTabPage.Controls.Add(this.warningLabel); this.generalTabPage.Controls.Add(this.exceptionTypeLabel); this.generalTabPage.Controls.Add(this.exceptionTextBox); this.generalTabPage.Controls.Add(this.descriptionTextBox); this.generalTabPage.Controls.Add(this.errorDescriptionLabel); this.generalTabPage.Controls.Add(this.clrTextBox); this.generalTabPage.Controls.Add(this.clrLabel); this.generalTabPage.Controls.Add(this.dateTimeTextBox); this.generalTabPage.Controls.Add(this.dateTimeLabel); this.generalTabPage.Controls.Add(this.nbugTextBox); this.generalTabPage.Controls.Add(this.nbugLabel); this.generalTabPage.Controls.Add(this.applicationTextBox); this.generalTabPage.Controls.Add(this.applicationLabel); this.generalTabPage.Controls.Add(this.targetSiteTextBox); this.generalTabPage.Controls.Add(this.targetSiteLabel); this.generalTabPage.Controls.Add(this.exceptionMessageTextBox); this.generalTabPage.Controls.Add(this.warningPictureBox); this.generalTabPage.Location = new System.Drawing.Point(4, 22); this.generalTabPage.Name = "generalTabPage"; this.generalTabPage.Padding = new System.Windows.Forms.Padding(3); this.generalTabPage.Size = new System.Drawing.Size(467, 335); this.generalTabPage.TabIndex = 0; this.generalTabPage.Text = "General"; this.generalTabPage.UseVisualStyleBackColor = true; // // warningLabel // this.warningLabel.Location = new System.Drawing.Point(64, 12); this.warningLabel.Name = "warningLabel"; this.warningLabel.Size = new System.Drawing.Size(388, 43); this.warningLabel.TabIndex = 18; this.warningLabel.Text = resources.GetString("warningLabel.Text"); // // exceptionTypeLabel // this.exceptionTypeLabel.Image = global::NBug.Properties.Resources.NBug_Icon_PNG_16; this.exceptionTypeLabel.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.exceptionTypeLabel.Location = new System.Drawing.Point(21, 69); this.exceptionTypeLabel.Name = "exceptionTypeLabel"; this.exceptionTypeLabel.Size = new System.Drawing.Size(106, 16); this.exceptionTypeLabel.TabIndex = 17; this.exceptionTypeLabel.Text = "Exception Type:"; this.exceptionTypeLabel.TextAlign = System.Drawing.ContentAlignment.MiddleRight; // // exceptionTextBox // this.exceptionTextBox.Location = new System.Drawing.Point(135, 68); this.exceptionTextBox.Name = "exceptionTextBox"; this.exceptionTextBox.Size = new System.Drawing.Size(317, 20); this.exceptionTextBox.TabIndex = 2; // // descriptionTextBox // this.descriptionTextBox.Location = new System.Drawing.Point(13, 267); this.descriptionTextBox.Multiline = true; this.descriptionTextBox.Name = "descriptionTextBox"; this.descriptionTextBox.Size = new System.Drawing.Size(439, 60); this.descriptionTextBox.TabIndex = 15; // // errorDescriptionLabel // this.errorDescriptionLabel.AutoSize = true; this.errorDescriptionLabel.Location = new System.Drawing.Point(10, 251); this.errorDescriptionLabel.Name = "errorDescriptionLabel"; this.errorDescriptionLabel.Size = new System.Drawing.Size(315, 13); this.errorDescriptionLabel.TabIndex = 14; this.errorDescriptionLabel.Text = "Please add a brief description of how we can reproduce the error:"; // // clrTextBox // this.clrTextBox.Location = new System.Drawing.Point(307, 216); this.clrTextBox.Name = "clrTextBox"; this.clrTextBox.Size = new System.Drawing.Size(145, 20); this.clrTextBox.TabIndex = 13; // // clrLabel // this.clrLabel.AutoSize = true; this.clrLabel.Location = new System.Drawing.Point(259, 219); this.clrLabel.Name = "clrLabel"; this.clrLabel.Size = new System.Drawing.Size(31, 13); this.clrLabel.TabIndex = 12; this.clrLabel.Text = "CLR:"; // // dateTimeTextBox // this.dateTimeTextBox.Location = new System.Drawing.Point(78, 216); this.dateTimeTextBox.Name = "dateTimeTextBox"; this.dateTimeTextBox.Size = new System.Drawing.Size(145, 20); this.dateTimeTextBox.TabIndex = 11; // // dateTimeLabel // this.dateTimeLabel.AutoSize = true; this.dateTimeLabel.Location = new System.Drawing.Point(10, 219); this.dateTimeLabel.Name = "dateTimeLabel"; this.dateTimeLabel.Size = new System.Drawing.Size(61, 13); this.dateTimeLabel.TabIndex = 10; this.dateTimeLabel.Text = "Date/Time:"; // // nbugTextBox // this.nbugTextBox.Location = new System.Drawing.Point(307, 182); this.nbugTextBox.Name = "nbugTextBox"; this.nbugTextBox.Size = new System.Drawing.Size(145, 20); this.nbugTextBox.TabIndex = 9; // // nbugLabel // this.nbugLabel.AutoSize = true; this.nbugLabel.Location = new System.Drawing.Point(259, 185); this.nbugLabel.Name = "nbugLabel"; this.nbugLabel.Size = new System.Drawing.Size(37, 13); this.nbugLabel.TabIndex = 8; this.nbugLabel.Text = "NBug:"; // // applicationTextBox // this.applicationTextBox.Location = new System.Drawing.Point(78, 182); this.applicationTextBox.Name = "applicationTextBox"; this.applicationTextBox.Size = new System.Drawing.Size(145, 20); this.applicationTextBox.TabIndex = 7; // // applicationLabel // this.applicationLabel.AutoSize = true; this.applicationLabel.Location = new System.Drawing.Point(10, 185); this.applicationLabel.Name = "applicationLabel"; this.applicationLabel.Size = new System.Drawing.Size(62, 13); this.applicationLabel.TabIndex = 6; this.applicationLabel.Text = "Application:"; // // targetSiteTextBox // this.targetSiteTextBox.Location = new System.Drawing.Point(78, 148); this.targetSiteTextBox.Name = "targetSiteTextBox"; this.targetSiteTextBox.Size = new System.Drawing.Size(374, 20); this.targetSiteTextBox.TabIndex = 5; // // targetSiteLabel // this.targetSiteLabel.AutoSize = true; this.targetSiteLabel.Location = new System.Drawing.Point(10, 151); this.targetSiteLabel.Name = "targetSiteLabel"; this.targetSiteLabel.Size = new System.Drawing.Size(62, 13); this.targetSiteLabel.TabIndex = 4; this.targetSiteLabel.Text = "Target Site:"; // // exceptionMessageTextBox // this.exceptionMessageTextBox.Location = new System.Drawing.Point(13, 98); this.exceptionMessageTextBox.Multiline = true; this.exceptionMessageTextBox.Name = "exceptionMessageTextBox"; this.exceptionMessageTextBox.ScrollBars = System.Windows.Forms.ScrollBars.Vertical; this.exceptionMessageTextBox.Size = new System.Drawing.Size(439, 35); this.exceptionMessageTextBox.TabIndex = 3; // // warningPictureBox // this.warningPictureBox.Location = new System.Drawing.Point(16, 15); this.warningPictureBox.Name = "warningPictureBox"; this.warningPictureBox.Size = new System.Drawing.Size(32, 32); this.warningPictureBox.TabIndex = 1; this.warningPictureBox.TabStop = false; // // exceptionTabPage // this.exceptionTabPage.Controls.Add(this.exceptionDetails); this.exceptionTabPage.Location = new System.Drawing.Point(4, 22); this.exceptionTabPage.Name = "exceptionTabPage"; this.exceptionTabPage.Padding = new System.Windows.Forms.Padding(3); this.exceptionTabPage.Size = new System.Drawing.Size(467, 335); this.exceptionTabPage.TabIndex = 2; this.exceptionTabPage.Text = "Exception"; this.exceptionTabPage.UseVisualStyleBackColor = true; // // exceptionDetails // this.exceptionDetails.InformationColumnWidth = 350; this.exceptionDetails.Location = new System.Drawing.Point(3, 3); this.exceptionDetails.Name = "exceptionDetails"; this.exceptionDetails.PropertyColumnWidth = 101; this.exceptionDetails.Size = new System.Drawing.Size(461, 330); this.exceptionDetails.TabIndex = 0; // // reportContentsTabPage // this.reportContentsTabPage.Controls.Add(this.reportPreviewTextBox); this.reportContentsTabPage.Controls.Add(this.previewLabel); this.reportContentsTabPage.Controls.Add(this.contentsLabel); this.reportContentsTabPage.Controls.Add(this.reportContentsListView); this.reportContentsTabPage.Location = new System.Drawing.Point(4, 22); this.reportContentsTabPage.Name = "reportContentsTabPage"; this.reportContentsTabPage.Padding = new System.Windows.Forms.Padding(3); this.reportContentsTabPage.Size = new System.Drawing.Size(467, 335); this.reportContentsTabPage.TabIndex = 3; this.reportContentsTabPage.Text = "Report Contents"; this.reportContentsTabPage.UseVisualStyleBackColor = true; this.reportContentsTabPage.Enter += new System.EventHandler(this.ReportContentsTabPage_Enter); // // reportPreviewTextBox // this.reportPreviewTextBox.Location = new System.Drawing.Point(6, 148); this.reportPreviewTextBox.Multiline = true; this.reportPreviewTextBox.Name = "reportPreviewTextBox"; this.reportPreviewTextBox.Size = new System.Drawing.Size(455, 172); this.reportPreviewTextBox.TabIndex = 5; // // previewLabel // this.previewLabel.AutoSize = true; this.previewLabel.Location = new System.Drawing.Point(6, 131); this.previewLabel.Name = "previewLabel"; this.previewLabel.Size = new System.Drawing.Size(48, 13); this.previewLabel.TabIndex = 4; this.previewLabel.Text = "Preview:"; // // contentsLabel // this.contentsLabel.AutoSize = true; this.contentsLabel.Location = new System.Drawing.Point(6, 7); this.contentsLabel.Name = "contentsLabel"; this.contentsLabel.Size = new System.Drawing.Size(288, 13); this.contentsLabel.TabIndex = 3; this.contentsLabel.Text = "Double-click an item to open it with the associated program."; // // reportContentsListView // this.reportContentsListView.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { this.nameColumnHeader, this.descriptionColumnHeader, this.sizeColumnHeader}); this.reportContentsListView.Location = new System.Drawing.Point(6, 24); this.reportContentsListView.Name = "reportContentsListView"; this.reportContentsListView.Size = new System.Drawing.Size(455, 97); this.reportContentsListView.TabIndex = 0; this.reportContentsListView.UseCompatibleStateImageBehavior = false; this.reportContentsListView.View = System.Windows.Forms.View.Details; // // nameColumnHeader // this.nameColumnHeader.Text = "Name"; this.nameColumnHeader.Width = 120; // // descriptionColumnHeader // this.descriptionColumnHeader.Text = "Description"; this.descriptionColumnHeader.Width = 240; // // sizeColumnHeader // this.sizeColumnHeader.Text = "Size"; this.sizeColumnHeader.Width = 80; // // sendAndQuitButton // this.sendAndQuitButton.Image = global::NBug.Properties.Resources.Send; this.sendAndQuitButton.ImageAlign = System.Drawing.ContentAlignment.MiddleLeft; this.sendAndQuitButton.Location = new System.Drawing.Point(382, 374); this.sendAndQuitButton.Name = "sendAndQuitButton"; this.sendAndQuitButton.Size = new System.Drawing.Size(102, 23); this.sendAndQuitButton.TabIndex = 1; this.sendAndQuitButton.Text = "&Send and Quit"; this.sendAndQuitButton.TextAlign = System.Drawing.ContentAlignment.MiddleRight; this.sendAndQuitButton.UseVisualStyleBackColor = true; this.sendAndQuitButton.Click += new System.EventHandler(this.SendAndQuitButton_Click); // // quitButton // this.quitButton.DialogResult = System.Windows.Forms.DialogResult.Cancel; this.quitButton.Location = new System.Drawing.Point(296, 374); this.quitButton.Name = "quitButton"; this.quitButton.Size = new System.Drawing.Size(75, 23); this.quitButton.TabIndex = 2; this.quitButton.Text = "&Quit"; this.quitButton.UseVisualStyleBackColor = true; this.quitButton.Click += new System.EventHandler(this.QuitButton_Click); // // toolTip // this.toolTip.AutomaticDelay = 100; this.toolTip.UseAnimation = false; this.toolTip.UseFading = false; // // Full // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.CancelButton = this.quitButton; this.ClientSize = new System.Drawing.Size(494, 403); this.Controls.Add(this.quitButton); this.Controls.Add(this.sendAndQuitButton); this.Controls.Add(this.mainTabs); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "Full"; this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Hide; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.Text = "{HostApplication} Error"; this.TopMost = true; this.mainTabs.ResumeLayout(false); this.generalTabPage.ResumeLayout(false); this.generalTabPage.PerformLayout(); ((System.ComponentModel.ISupportInitialize)(this.warningPictureBox)).EndInit(); this.exceptionTabPage.ResumeLayout(false); this.reportContentsTabPage.ResumeLayout(false); this.reportContentsTabPage.PerformLayout(); this.ResumeLayout(false); } #endregion private System.Windows.Forms.TabControl mainTabs; private System.Windows.Forms.TabPage generalTabPage; private System.Windows.Forms.Button sendAndQuitButton; private System.Windows.Forms.Button quitButton; private System.Windows.Forms.TabPage exceptionTabPage; private System.Windows.Forms.PictureBox warningPictureBox; private System.Windows.Forms.TextBox exceptionMessageTextBox; private System.Windows.Forms.TextBox exceptionTextBox; private System.Windows.Forms.TextBox targetSiteTextBox; private System.Windows.Forms.Label targetSiteLabel; private System.Windows.Forms.TextBox nbugTextBox; private System.Windows.Forms.Label nbugLabel; private System.Windows.Forms.TextBox applicationTextBox; private System.Windows.Forms.Label applicationLabel; private System.Windows.Forms.TextBox descriptionTextBox; private System.Windows.Forms.Label errorDescriptionLabel; private System.Windows.Forms.TextBox clrTextBox; private System.Windows.Forms.Label clrLabel; private System.Windows.Forms.TextBox dateTimeTextBox; private System.Windows.Forms.Label dateTimeLabel; private System.Windows.Forms.TabPage reportContentsTabPage; private System.Windows.Forms.TextBox reportPreviewTextBox; private System.Windows.Forms.Label previewLabel; private System.Windows.Forms.Label contentsLabel; private System.Windows.Forms.ListView reportContentsListView; private System.Windows.Forms.ColumnHeader nameColumnHeader; private System.Windows.Forms.ColumnHeader descriptionColumnHeader; private System.Windows.Forms.ColumnHeader sizeColumnHeader; private System.Windows.Forms.Label exceptionTypeLabel; private System.Windows.Forms.ToolTip toolTip; private System.Windows.Forms.Label warningLabel; private ExceptionDetails exceptionDetails; } }
{ "content_hash": "58e6771cb2e09c539e02ee0ad7e7ddaa", "timestamp": "", "source": "github", "line_count": 435, "max_line_length": 127, "avg_line_length": 44.44137931034483, "alnum_prop": 0.7300331057314298, "repo_name": "JuliusSweetland/NBug", "id": "ea8048ed34d3f33ea08a23d87d930d9b89329737", "size": "19334", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "NBug/Core/UI/WinForms/Full.Designer.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "425615" } ], "symlink_target": "" }
package com.valarhao.valarnews.module.zhihu.common; import android.content.Context; import android.support.v7.widget.CardView; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import com.valarhao.valarnews.R; import com.valarhao.valarnews.common.app.App; import com.valarhao.valarnews.common.util.GlideUtil; import com.valarhao.valarnews.common.util.Utils; import java.util.ArrayList; import java.util.List; public class RecyclerTabAdapter extends RecyclerView.Adapter<RecyclerTabAdapter.ViewHolder> { private List<RecyclerItem> mRecyclerItems; private LayoutInflater mInflater; private Context mContext; private OnItemClickListener mOnItemClickListener; public RecyclerTabAdapter(Context context) { mContext = context; mInflater = LayoutInflater.from(mContext); mRecyclerItems = new ArrayList<>(); } public void addRecyclerItem(RecyclerItem recyclerItem) { if (!mRecyclerItems.contains(recyclerItem)) { mRecyclerItems.add(recyclerItem); } } public RecyclerItem getRecyclerItem(int position) { return mRecyclerItems.get(position); } public void clear() { mRecyclerItems.clear(); } @Override public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = mInflater.inflate(R.layout.zhihu_item_tab, parent, false); return new ViewHolder(view); } @Override public void onBindViewHolder(final ViewHolder holder, int position) { RecyclerItem recyclerItem = mRecyclerItems.get(position); holder.itemTxtTitle.setText(recyclerItem.getTitle()); //加载图片 if (recyclerItem.getImgLink() != null) { //由于ImageView和Bitmap实际大小不符,进行优化 ViewGroup.LayoutParams lp = holder.itemImg.getLayoutParams(); lp.width = (App.SCREEN_WIDTH - Utils.dp2px(mContext, 12)) / 2; lp.height = Utils.dp2px(mContext, 120); GlideUtil.load(mContext, recyclerItem.getImgLink(), holder.itemImg); } //点击item监听 if (mOnItemClickListener != null) { //holder.itemCard.setBackgroundResource(R.drawable.ripple); //点击水波纹效果 holder.itemCard.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { int position = holder.getLayoutPosition(); mOnItemClickListener.onItemClick(holder.itemCard, position); } }); } } @Override public int getItemCount() { return mRecyclerItems.size(); } public class ViewHolder extends RecyclerView.ViewHolder { public CardView itemCard; public ImageView itemImg; public TextView itemTxtTitle; public ViewHolder(View view) { super(view); itemCard = (CardView) view.findViewById(R.id.cardItemTab); itemImg = (ImageView) view.findViewById(R.id.imgItemTab); itemTxtTitle = (TextView) view.findViewById(R.id.txtItemTab); } } public interface OnItemClickListener { void onItemClick(View view, int position); } public void setOnItemClickListener(OnItemClickListener onItemClickListener) { mOnItemClickListener = onItemClickListener; } }
{ "content_hash": "1975e22d9145196c42dcddcab4ff771f", "timestamp": "", "source": "github", "line_count": 105, "max_line_length": 93, "avg_line_length": 33.17142857142857, "alnum_prop": 0.6770025839793282, "repo_name": "ValarHao/ValarNews", "id": "eea51647598824dc2dc686ddc62fdbbe1a84b60d", "size": "3541", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/valarhao/valarnews/module/zhihu/common/RecyclerTabAdapter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "135177" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>qarith-stern-brocot: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.1 / qarith-stern-brocot - 8.14.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> qarith-stern-brocot <small> 8.14.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-05-31 19:42:23 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-05-31 19:42:23 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 4 Virtual package relying on a GMP lib system installation coq 8.13.1 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.12.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.12.1 Official release 4.12.1 ocaml-config 2 OCaml Switch Configuration ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled ocamlfind 1.9.3 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;palmskog@gmail.com&quot; homepage: &quot;https://github.com/coq-community/qarith-stern-brocot&quot; dev-repo: &quot;git+https://github.com/coq-community/qarith-stern-brocot.git&quot; bug-reports: &quot;https://github.com/coq-community/qarith-stern-brocot/issues&quot; license: &quot;LGPL-2.1-or-later&quot; synopsis: &quot;Binary rational numbers in Coq&quot; description: &quot;&quot;&quot; Development of rational numbers in Coq as finite binary lists and defining field operations on them in two different ways: strict and lazy. &quot;&quot;&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] depends: [ &quot;coq&quot; {&gt;= &quot;8.14&quot; &amp; &lt; &quot;8.15~&quot;} ] tags: [ &quot;category:Mathematics/Arithmetic and Number Theory/Rational numbers&quot; &quot;category:Miscellaneous/Extracted Programs/Arithmetic&quot; &quot;keyword:rational numbers&quot; &quot;keyword:arithmetic&quot; &quot;keyword:field tactic&quot; &quot;keyword:binary lists&quot; &quot;keyword:Stern-Brocot&quot; &quot;logpath:QArithSternBrocot&quot; &quot;date:2021-10-30&quot; ] authors: [ &quot;Milad Niqui&quot; &quot;Yves Bertot&quot; ] url { src: &quot;https://github.com/coq-community/qarith-stern-brocot/archive/v8.14.0.tar.gz&quot; checksum: &quot;sha512=bc553ba930da80608c87c968016b7b8733a806aa65c510cc9834aca4933710ac21173d3691837bf09c3aba34849f320442086bf05081e22121fef97b084611b4&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-qarith-stern-brocot.8.14.0 coq.8.13.1</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.1). The following dependencies couldn&#39;t be met: - coq-qarith-stern-brocot -&gt; coq &gt;= 8.14 Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-qarith-stern-brocot.8.14.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "b03f46e1c2bdc92c0343d82d1787e7b2", "timestamp": "", "source": "github", "line_count": 177, "max_line_length": 159, "avg_line_length": 41.93785310734463, "alnum_prop": 0.5621716287215411, "repo_name": "coq-bench/coq-bench.github.io", "id": "b9a37aa9cad9e17ed72eba6942b51a55cf918579", "size": "7448", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.12.1-2.0.8/released/8.13.1/qarith-stern-brocot/8.14.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
/** @file precompiled.h @brief The precompiled source for sdl. @author HRYKY @version $Id: precompiled.h 373 2014-07-27 04:20:16Z hryky.private@gmail.com $ */ #ifndef PRECOMPILED_H_20140224111343134PRECOMPILED_H_ #define PRECOMPILED_H_20140224111343134PRECOMPILED_H_ #include "hryky/config.h" #include "hryky/pragma.h" #include "hryky/gl.h" #include "hryky/log.h" #include "hryky/mempool.h" #include "hryky/sdl/sdl_common.h" #include "hryky/windows.h" //------------------------------------------------------------------------------ // macro definition //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ // namespace //------------------------------------------------------------------------------ namespace hryky { namespace sdl { } // namespace sdl } // namespace hryky //------------------------------------------------------------------------------ // struct declaration //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ // class declaration //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ // global function prototypes //------------------------------------------------------------------------------ namespace hryky { namespace sdl { } // namespace sdl } // namespace hryky //------------------------------------------------------------------------------ // global function definitions //------------------------------------------------------------------------------ #endif // PRECOMPILED_H_20140224111343134PRECOMPILED_H_ // end of file
{ "content_hash": "fd40a1056f80647b83f112ac05ea2f01", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 81, "avg_line_length": 38.276595744680854, "alnum_prop": 0.3390772651473041, "repo_name": "hiroyuki-seki/hryky-codebase", "id": "4ffd2fb1843bebc342593fc1f8d4c782c82b4ef3", "size": "1799", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/sdl/src/precompiled.h", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "80878" }, { "name": "C", "bytes": "408" }, { "name": "C++", "bytes": "5645481" }, { "name": "CMake", "bytes": "165309" }, { "name": "Common Lisp", "bytes": "96981" }, { "name": "JavaScript", "bytes": "26" }, { "name": "M4", "bytes": "1801" }, { "name": "Makefile", "bytes": "2674" }, { "name": "Ruby", "bytes": "8244" }, { "name": "Shell", "bytes": "5675" }, { "name": "Vim script", "bytes": "40988" }, { "name": "Yacc", "bytes": "119704" } ], "symlink_target": "" }
from django.conf import settings STAR_RATINGS_RANGE = getattr(settings, "STAR_RATINGS_RANGE", 5)
{ "content_hash": "15d9aaf2aa8277944490e0b7c850fa60", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 63, "avg_line_length": 24.75, "alnum_prop": 0.7676767676767676, "repo_name": "citizenline/citizenline", "id": "1df914d19f4e44417be872b6fbe3012660d769fb", "size": "99", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "star_ratings/app_settings.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3470" }, { "name": "HTML", "bytes": "22853" }, { "name": "JavaScript", "bytes": "8389" }, { "name": "Python", "bytes": "86277" }, { "name": "Ruby", "bytes": "198" } ], "symlink_target": "" }
require "spec_helper" describe "Rails::Mongoid" do before(:all) do require "rails/mongoid" end describe ".create_indexes" do let(:pattern) do "spec/app/models/**/*.rb" end let(:logger) do stub end let(:klass) do User end let(:model_paths) do [ "spec/app/models/user.rb" ] end let(:indexes) do Rails::Mongoid.create_indexes(pattern) end before do Dir.expects(:glob).with(pattern).returns(model_paths).once Logger.expects(:new).returns(logger).twice end context "with ordinary Rails models" do it "creates the indexes for the models" do klass.expects(:create_indexes).once logger.expects(:info).twice indexes end end context "with a model without indexes" do let(:model_paths) do [ "spec/app/models/account.rb" ] end let(:klass) do Account end it "does nothing" do klass.expects(:create_indexes).never indexes end end context "when an exception is raised" do it "is not swallowed" do Rails::Mongoid.expects(:determine_model).returns(klass) klass.expects(:create_indexes).raises(ArgumentError) expect { indexes }.to raise_error(ArgumentError) end end context "when index is defined on embedded model" do let(:klass) do Address end let(:model_paths) do [ "spec/app/models/address.rb" ] end before do klass.index_options = { city: {} } end it "does nothing, but logging" do klass.expects(:create_indexes).never logger.expects(:info).once indexes end end end describe ".remove_indexes" do let(:pattern) do "spec/app/models/**/*.rb" end let(:logger) do stub end let(:klass) do User end let(:model_paths) do [ "spec/app/models/user.rb" ] end before do Dir.expects(:glob).with(pattern).returns(model_paths).times(2) Logger.expects(:new).returns(logger).times(4) logger.expects(:info).times(3) end let(:indexes) do klass.collection.indexes end before :each do Rails::Mongoid.create_indexes(pattern) Rails::Mongoid.remove_indexes(pattern) end it "removes indexes from klass" do indexes.reject{ |doc| doc["name"] == "_id_" }.should be_empty end it "leaves _id index untouched" do indexes.select{ |doc| doc["name"] == "_id_" }.should_not be_empty end end describe ".models" do let(:pattern) do "spec/app/models/**/*.rb" end let(:logger) do stub end let(:klass) do User end let(:model_paths) do [ "spec/app/models/user.rb" ] end let(:models) do Rails::Mongoid.models(pattern) end before do Dir.expects(:glob).with(pattern).returns(model_paths).once end it "returns models which files matching the pattern" do models.should eq([klass]) end end describe ".determine_model" do let(:logger) do stub end let(:klass) do User end let(:file) do "app/models/user.rb" end let(:model) do Rails::Mongoid.send(:determine_model, file, logger) end module Twitter class Follow include Mongoid::Document end module List class Tweet include Mongoid::Document end end end context "when file is nil" do let(:file) do nil end it "returns nil" do model.should be_nil end end context "when logger is nil" do let(:logger) do nil end it "returns nil" do model.should be_nil end end context "when path is invalid" do let(:file) do "fu/bar.rb" end it "returns nil" do model.should be_nil end end context "when file is not in a subdir" do context "when file is from normal model" do it "returns klass" do model.should eq(klass) end end context "when file is in a module" do let(:klass) do Twitter::Follow end let(:file) do "app/models/follow.rb" end it "raises NameError" do logger.expects(:info) expect { model.should eq(klass) }.to raise_error(NameError) end end end context "when file is in a subdir" do context "with file from normal model" do let(:file) do "app/models/fu/user.rb" end it "returns klass" do logger.expects(:info) model.should eq(klass) end end context "when file is in a module" do let(:klass) do Twitter::Follow end let(:file) do "app/models/twitter/follow.rb" end it "returns klass in module" do model.should eq(klass) end end context "when file is in two modules" do let(:klass) do Twitter::List::Tweet end let(:file) do "app/models/twitter/list/tweet.rb" end it "returns klass in module" do model.should eq(klass) end end end context "with models present in Rails engines" do let(:file) do "/gem_path/engines/some_engine_gem/app/models/user.rb" end let(:klass) do User end it "requires the models by base name from the engine's app/models dir" do model.should eq(klass) end end end describe ".preload_models" do let(:app) do stub(config: config) end let(:config) do stub(paths: paths) end let(:paths) do { "app/models" => [ "/rails/root/app/models" ] } end context "when preload models config is false" do let(:files) do [ "/rails/root/app/models/user.rb", "/rails/root/app/models/address.rb" ] end before(:all) do Mongoid.preload_models = false Dir.stubs(:glob).with("/rails/root/app/models/**/*.rb").returns(files) end it "does not load any models" do Rails::Mongoid.expects(:load_model).never Rails::Mongoid.preload_models(app) end end context "when preload models config is true" do before(:all) do Mongoid.preload_models = true end context "when all models are in the models directory" do let(:files) do [ "/rails/root/app/models/user.rb", "/rails/root/app/models/address.rb" ] end before do Dir.expects(:glob).with("/rails/root/app/models/**/*.rb").returns(files) end it "requires the models by basename" do Rails::Mongoid.expects(:load_model).with("address") Rails::Mongoid.expects(:load_model).with("user") Rails::Mongoid.preload_models(app) end end context "when models exist in subdirectories" do let(:files) do [ "/rails/root/app/models/mongoid/behaviour.rb" ] end before do Dir.expects(:glob).with("/rails/root/app/models/**/*.rb").returns(files) end it "requires the models by subdirectory and basename" do Rails::Mongoid.expects(:load_model).with("mongoid/behaviour") Rails::Mongoid.preload_models(app) end end end end describe ".load_models" do let(:app) do stub(config: config) end let(:config) do stub(paths: paths) end let(:paths) do { "app/models" => [ "/rails/root/app/models" ] } end context "even when preload models config is false" do let(:files) do [ "/rails/root/app/models/user.rb", "/rails/root/app/models/address.rb" ] end before(:all) do Mongoid.preload_models = false Dir.stubs(:glob).with("/rails/root/app/models/**/*.rb").returns(files) end it "loads all models" do Rails::Mongoid.expects(:load_model).with("address") Rails::Mongoid.expects(:load_model).with("user") Rails::Mongoid.load_models(app) end end context "when list of models to load was configured" do let(:files) do [ "/rails/root/app/models/user.rb", "/rails/root/app/models/address.rb" ] end before(:all) do Mongoid.preload_models = ["user"] Dir.stubs(:glob).with("/rails/root/app/models/**/*.rb").returns(files) end it "loads selected models only" do Rails::Mongoid.expects(:load_model).with("user") Rails::Mongoid.expects(:load_model).with("address").never Rails::Mongoid.load_models(app) end end end end
{ "content_hash": "d60dddb7cc5bd72cded5bfa3946f7268", "timestamp": "", "source": "github", "line_count": 450, "max_line_length": 82, "avg_line_length": 19.953333333333333, "alnum_prop": 0.5595277870586925, "repo_name": "peterwillcn/mongoid", "id": "525e3c8c9a17da88808fedbcab65ab0c05680265", "size": "8979", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "spec/rails/mongoid_spec.rb", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
using System; namespace Furball.Common.Attributes { public class DefaultControllerAttribute : Attribute { } }
{ "content_hash": "cb7db96312d05c15ebce50c76611337b", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 55, "avg_line_length": 15.5, "alnum_prop": 0.717741935483871, "repo_name": "michaeldotknox/Furball", "id": "9cecd4a20ece038f5df48ba78125581eab3abe08", "size": "126", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Common/Attributes/DefaultControllerAttribute.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "113" }, { "name": "C#", "bytes": "36377" } ], "symlink_target": "" }
{% extends "base_nav_fluid_sidebar.html" %} {% load pagination %} {% block content %} {% if perms.workshops.add_organization %} <p><a href="{% url 'organization_add' %}" class="btn btn-success">New organization</a></p> {% else %} <p><a href="{% url 'organization_add' %}" class="btn btn-success disabled">New organization</a></p> {% endif %} {% if all_organizations %} <table class="table table-striped"> <tr> <th>full name</th> <th>domain</th> <th>current membership</th> <th>notes</th> <th class="additional-links"></th> </tr> {% for organization in all_organizations %} <tr> <td><a href="{% url 'organization_details' organization.domain %}">{{ organization.fullname }}</a></td> <td><a href="http://{{ organization.domain }}" target="_blank">{{ organization.domain }}</a></td> <td> {% for membership in organization.current_memberships %} <a href="{% url 'membership_details' membership.pk %}"> {{ membership.get_variant_display }} </a><br /> {% empty %} — {% endfor %} </td> <td>{{ organization.notes|truncatechars:40 }}</td> <td> <a href="{% url 'organization_details' organization.domain %}" title="View {{ organization.fullname }}"><span class="glyphicon glyphicon-info-sign"></span></a> &nbsp; {% if perms.workshops.change_organization %} <a href="{% url 'organization_edit' organization.domain %}" title="Edit {{ organization.fullname }}"><span class="glyphicon glyphicon-pencil"></span></a> {% endif %} </td> </tr> {% endfor %} </table> {% pagination all_organizations %} {% if perms.workshops.add_organization %} <p><a href="{% url 'organization_add' %}" class="btn btn-success">New organization</a></p> {% else %} <p><a href="{% url 'organization_add' %}" class="btn btn-success disabled">New organization</a></p> {% endif %} {% else %} <p>No organizations.</p> {% endif %} {% endblock %}
{ "content_hash": "a143b0d24013aebf8cb15dd7f4b8fb2f", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 173, "avg_line_length": 41.9811320754717, "alnum_prop": 0.5325842696629214, "repo_name": "vahtras/amy", "id": "531ced6c63350cacf7e108b8c23fbf62099fee06", "size": "2227", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "workshops/templates/workshops/all_organizations.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4505" }, { "name": "HTML", "bytes": "216300" }, { "name": "JavaScript", "bytes": "16883" }, { "name": "Makefile", "bytes": "2167" }, { "name": "Python", "bytes": "1090706" } ], "symlink_target": "" }
import mock from neutron.openstack.common import jsonutils from neutron.plugins.vmware.api_client import exception from neutron.plugins.vmware.common import utils as nsx_utils from neutron.plugins.vmware import nsxlib from neutron.plugins.vmware.nsxlib import l2gateway as l2gwlib from neutron.plugins.vmware.nsxlib import switch as switchlib from neutron.tests.unit import test_api_v2 from neutron.tests.unit.vmware.nsxlib import base _uuid = test_api_v2._uuid class L2GatewayNegativeTestCase(base.NsxlibNegativeBaseTestCase): def test_create_l2_gw_service_on_failure(self): self.assertRaises(exception.NsxApiException, l2gwlib.create_l2_gw_service, self.fake_cluster, 'fake-tenant', 'fake-gateway', [{'id': _uuid(), 'interface_name': 'xxx'}]) def test_delete_l2_gw_service_on_failure(self): self.assertRaises(exception.NsxApiException, l2gwlib.delete_l2_gw_service, self.fake_cluster, 'fake-gateway') def test_get_l2_gw_service_on_failure(self): self.assertRaises(exception.NsxApiException, l2gwlib.get_l2_gw_service, self.fake_cluster, 'fake-gateway') def test_update_l2_gw_service_on_failure(self): self.assertRaises(exception.NsxApiException, l2gwlib.update_l2_gw_service, self.fake_cluster, 'fake-gateway', 'pluto') class L2GatewayTestCase(base.NsxlibTestCase): def _create_gw_service(self, node_uuid, display_name, tenant_id='fake_tenant'): return l2gwlib.create_l2_gw_service(self.fake_cluster, tenant_id, display_name, [{'id': node_uuid, 'interface_name': 'xxx'}]) def test_create_l2_gw_service(self): display_name = 'fake-gateway' node_uuid = _uuid() response = self._create_gw_service(node_uuid, display_name) self.assertEqual(response.get('type'), 'L2GatewayServiceConfig') self.assertEqual(response.get('display_name'), display_name) gateways = response.get('gateways', []) self.assertEqual(len(gateways), 1) self.assertEqual(gateways[0]['type'], 'L2Gateway') self.assertEqual(gateways[0]['device_id'], 'xxx') self.assertEqual(gateways[0]['transport_node_uuid'], node_uuid) def test_update_l2_gw_service(self): display_name = 'fake-gateway' new_display_name = 'still-fake-gateway' node_uuid = _uuid() res1 = self._create_gw_service(node_uuid, display_name) gw_id = res1['uuid'] res2 = l2gwlib.update_l2_gw_service( self.fake_cluster, gw_id, new_display_name) self.assertEqual(res2['display_name'], new_display_name) def test_get_l2_gw_service(self): display_name = 'fake-gateway' node_uuid = _uuid() gw_id = self._create_gw_service(node_uuid, display_name)['uuid'] response = l2gwlib.get_l2_gw_service(self.fake_cluster, gw_id) self.assertEqual(response.get('type'), 'L2GatewayServiceConfig') self.assertEqual(response.get('display_name'), display_name) self.assertEqual(response.get('uuid'), gw_id) def test_list_l2_gw_service(self): gw_ids = [] for name in ('fake-1', 'fake-2'): gw_ids.append(self._create_gw_service(_uuid(), name)['uuid']) results = l2gwlib.get_l2_gw_services(self.fake_cluster) self.assertEqual(len(results), 2) self.assertEqual(sorted(gw_ids), sorted([r['uuid'] for r in results])) def test_list_l2_gw_service_by_tenant(self): gw_ids = [self._create_gw_service( _uuid(), name, tenant_id=name)['uuid'] for name in ('fake-1', 'fake-2')] results = l2gwlib.get_l2_gw_services(self.fake_cluster, tenant_id='fake-1') self.assertEqual(len(results), 1) self.assertEqual(results[0]['uuid'], gw_ids[0]) def test_delete_l2_gw_service(self): display_name = 'fake-gateway' node_uuid = _uuid() gw_id = self._create_gw_service(node_uuid, display_name)['uuid'] l2gwlib.delete_l2_gw_service(self.fake_cluster, gw_id) results = l2gwlib.get_l2_gw_services(self.fake_cluster) self.assertEqual(len(results), 0) def test_plug_l2_gw_port_attachment(self): tenant_id = 'pippo' node_uuid = _uuid() transport_zones_config = [{'zone_uuid': _uuid(), 'transport_type': 'stt'}] lswitch = switchlib.create_lswitch( self.fake_cluster, _uuid(), tenant_id, 'fake-switch', transport_zones_config) gw_id = self._create_gw_service(node_uuid, 'fake-gw')['uuid'] lport = switchlib.create_lport( self.fake_cluster, lswitch['uuid'], tenant_id, _uuid(), 'fake-gw-port', gw_id, True) l2gwlib.plug_l2_gw_service( self.fake_cluster, lswitch['uuid'], lport['uuid'], gw_id) uri = nsxlib._build_uri_path(switchlib.LSWITCHPORT_RESOURCE, lport['uuid'], lswitch['uuid'], is_attachment=True) resp_obj = nsxlib.do_request("GET", uri, cluster=self.fake_cluster) self.assertIn('LogicalPortAttachment', resp_obj) self.assertEqual(resp_obj['LogicalPortAttachment']['type'], 'L2GatewayAttachment') def _create_expected_req_body(self, display_name, neutron_id, connector_type, connector_ip, client_certificate): body = { "display_name": display_name, "tags": [{"tag": neutron_id, "scope": "q_gw_dev_id"}, {"tag": 'fake_tenant', "scope": "os_tid"}, {"tag": nsx_utils.NEUTRON_VERSION, "scope": "quantum"}], "transport_connectors": [ {"transport_zone_uuid": 'fake_tz_uuid', "ip_address": connector_ip, "type": '%sConnector' % connector_type}], "admin_status_enabled": True } body.get("tags").sort() if client_certificate: body["credential"] = { "client_certificate": { "pem_encoded": client_certificate}, "type": "SecurityCertificateCredential"} return body def test_create_gw_device(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client display_name = 'fake-device' neutron_id = 'whatever' connector_type = 'stt' connector_ip = '1.1.1.1' client_certificate = 'this_should_be_a_certificate' with mock.patch.object(nsxlib, 'do_request') as request_mock: expected_req_body = self._create_expected_req_body( display_name, neutron_id, connector_type.upper(), connector_ip, client_certificate) l2gwlib.create_gateway_device( self.fake_cluster, 'fake_tenant', display_name, neutron_id, 'fake_tz_uuid', connector_type, connector_ip, client_certificate) request_mock.assert_called_once_with( "POST", "/ws.v1/transport-node", jsonutils.dumps(expected_req_body, sort_keys=True), cluster=self.fake_cluster) def test_update_gw_device(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client display_name = 'fake-device' neutron_id = 'whatever' connector_type = 'stt' connector_ip = '1.1.1.1' client_certificate = 'this_should_be_a_certificate' with mock.patch.object(nsxlib, 'do_request') as request_mock: expected_req_body = self._create_expected_req_body( display_name, neutron_id, connector_type.upper(), connector_ip, client_certificate) l2gwlib.update_gateway_device( self.fake_cluster, 'whatever', 'fake_tenant', display_name, neutron_id, 'fake_tz_uuid', connector_type, connector_ip, client_certificate) request_mock.assert_called_once_with( "PUT", "/ws.v1/transport-node/whatever", jsonutils.dumps(expected_req_body, sort_keys=True), cluster=self.fake_cluster) def test_update_gw_device_without_certificate(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client display_name = 'fake-device' neutron_id = 'whatever' connector_type = 'stt' connector_ip = '1.1.1.1' with mock.patch.object(nsxlib, 'do_request') as request_mock: expected_req_body = self._create_expected_req_body( display_name, neutron_id, connector_type.upper(), connector_ip, None) l2gwlib.update_gateway_device( self.fake_cluster, 'whatever', 'fake_tenant', display_name, neutron_id, 'fake_tz_uuid', connector_type, connector_ip, client_certificate=None) request_mock.assert_called_once_with( "PUT", "/ws.v1/transport-node/whatever", jsonutils.dumps(expected_req_body, sort_keys=True), cluster=self.fake_cluster) def test_get_gw_device_status(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client with mock.patch.object(nsxlib, 'do_request') as request_mock: l2gwlib.get_gateway_device_status(self.fake_cluster, 'whatever') request_mock.assert_called_once_with( "GET", "/ws.v1/transport-node/whatever/status", cluster=self.fake_cluster) def test_get_gw_devices_status(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client with mock.patch.object(nsxlib, 'do_request') as request_mock: request_mock.return_value = { 'results': [], 'page_cursor': None, 'result_count': 0} l2gwlib.get_gateway_devices_status(self.fake_cluster) request_mock.assert_called_once_with( "GET", ("/ws.v1/transport-node?fields=uuid,tags&" "relations=TransportNodeStatus&" "_page_length=1000&tag_scope=quantum"), cluster=self.fake_cluster) def test_get_gw_devices_status_filter_by_tenant(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client with mock.patch.object(nsxlib, 'do_request') as request_mock: request_mock.return_value = { 'results': [], 'page_cursor': None, 'result_count': 0} l2gwlib.get_gateway_devices_status(self.fake_cluster, tenant_id='ssc_napoli') request_mock.assert_called_once_with( "GET", ("/ws.v1/transport-node?fields=uuid,tags&" "relations=TransportNodeStatus&" "tag=ssc_napoli&tag_scope=os_tid&" "_page_length=1000&tag_scope=quantum"), cluster=self.fake_cluster) def test_delete_gw_device(self): # NOTE(salv-orlando): This unit test mocks backend calls rather than # leveraging the fake NSX API client with mock.patch.object(nsxlib, 'do_request') as request_mock: l2gwlib.delete_gateway_device(self.fake_cluster, 'whatever') request_mock.assert_called_once_with( "DELETE", "/ws.v1/transport-node/whatever", cluster=self.fake_cluster)
{ "content_hash": "6df6bd207ee9af4f648c27ae20acc68d", "timestamp": "", "source": "github", "line_count": 281, "max_line_length": 78, "avg_line_length": 45.20996441281139, "alnum_prop": 0.5576983627204031, "repo_name": "samsu/neutron", "id": "006ad38648dd021b3f056ff324388fafd473c748", "size": "13288", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "tests/unit/vmware/nsxlib/test_l2gateway.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "1043" }, { "name": "Python", "bytes": "10579249" }, { "name": "Shell", "bytes": "1535" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_121) on Tue Sep 25 16:18:25 EDT 2018 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class org.apache.orc.impl.RunLengthIntegerWriterV2.EncodingType (ORC Core 1.5.3 API)</title> <meta name="date" content="2018-09-25"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.orc.impl.RunLengthIntegerWriterV2.EncodingType (ORC Core 1.5.3 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/orc/impl/class-use/RunLengthIntegerWriterV2.EncodingType.html" target="_top">Frames</a></li> <li><a href="RunLengthIntegerWriterV2.EncodingType.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.orc.impl.RunLengthIntegerWriterV2.EncodingType" class="title">Uses of Class<br>org.apache.orc.impl.RunLengthIntegerWriterV2.EncodingType</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">RunLengthIntegerWriterV2.EncodingType</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.apache.orc.impl">org.apache.orc.impl</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.apache.orc.impl"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">RunLengthIntegerWriterV2.EncodingType</a> in <a href="../../../../../org/apache/orc/impl/package-summary.html">org.apache.orc.impl</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/apache/orc/impl/package-summary.html">org.apache.orc.impl</a> that return <a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">RunLengthIntegerWriterV2.EncodingType</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>static <a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">RunLengthIntegerWriterV2.EncodingType</a></code></td> <td class="colLast"><span class="typeNameLabel">RunLengthIntegerWriterV2.EncodingType.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html#valueOf-java.lang.String-">valueOf</a></span>(<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</code> <div class="block">Returns the enum constant of this type with the specified name.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><code>static <a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">RunLengthIntegerWriterV2.EncodingType</a>[]</code></td> <td class="colLast"><span class="typeNameLabel">RunLengthIntegerWriterV2.EncodingType.</span><code><span class="memberNameLink"><a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html#values--">values</a></span>()</code> <div class="block">Returns an array containing the constants of this enum type, in the order they are declared.</div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/apache/orc/impl/RunLengthIntegerWriterV2.EncodingType.html" title="enum in org.apache.orc.impl">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/apache/orc/impl/class-use/RunLengthIntegerWriterV2.EncodingType.html" target="_top">Frames</a></li> <li><a href="RunLengthIntegerWriterV2.EncodingType.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2013&#x2013;2018 <a href="https://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "b5d5a21466f9061eaa7cec02a126a521", "timestamp": "", "source": "github", "line_count": 175, "max_line_length": 419, "avg_line_length": 45.34285714285714, "alnum_prop": 0.658349086326402, "repo_name": "majetideepak/orc", "id": "477eca56a68219c5eca6ee8e8441c82e1725f767", "size": "7935", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/api/orc-core/org/apache/orc/impl/class-use/RunLengthIntegerWriterV2.EncodingType.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2682" }, { "name": "C++", "bytes": "1602372" }, { "name": "CMake", "bytes": "46512" }, { "name": "CSS", "bytes": "100837" }, { "name": "Dockerfile", "bytes": "11515" }, { "name": "HTML", "bytes": "15843523" }, { "name": "Java", "bytes": "2250455" }, { "name": "JavaScript", "bytes": "4135" }, { "name": "Ruby", "bytes": "1508" }, { "name": "Shell", "bytes": "4159" } ], "symlink_target": "" }
import datetime import itertools import unittest from copy import copy from django.db import ( DatabaseError, IntegrityError, OperationalError, connection, ) from django.db.models import Model from django.db.models.fields import ( BigIntegerField, BinaryField, BooleanField, CharField, DateTimeField, IntegerField, PositiveIntegerField, SlugField, TextField, ) from django.db.models.fields.related import ( ForeignKey, ManyToManyField, OneToOneField, ) from django.db.transaction import atomic from django.test import TransactionTestCase, skipIfDBFeature from .fields import CustomManyToManyField, InheritedManyToManyField from .models import ( Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, Note, NoteRename, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, new_apps, ) class SchemaTests(TransactionTestCase): """ Tests that the schema-alteration code works correctly. Be aware that these tests are more liable than most to false results, as sometimes the code to check if a test has worked is almost as complex as the code it is testing. """ available_apps = [] models = [ Author, AuthorWithDefaultHeight, AuthorWithEvenLongerName, Book, BookWeak, BookWithLongName, BookWithO2O, BookWithSlug, Note, Tag, TagIndexed, TagM2MTest, TagUniqueRename, Thing, UniqueTest, ] # Utility functions def setUp(self): # local_models should contain test dependent model classes that will be # automatically removed from the app cache on test tear down. self.local_models = [] def tearDown(self): # Delete any tables made for our models self.delete_tables() new_apps.clear_cache() for model in new_apps.get_models(): model._meta._expire_cache() if 'schema' in new_apps.all_models: for model in self.local_models: del new_apps.all_models['schema'][model._meta.model_name] def delete_tables(self): "Deletes all model tables for our models for a clean test environment" with connection.cursor() as cursor: connection.disable_constraint_checking() table_names = connection.introspection.table_names(cursor) for model in itertools.chain(SchemaTests.models, self.local_models): # Remove any M2M tables first for field in model._meta.local_many_to_many: with atomic(): tbl = field.remote_field.through._meta.db_table if tbl in table_names: cursor.execute(connection.schema_editor().sql_delete_table % { "table": connection.ops.quote_name(tbl), }) table_names.remove(tbl) # Then remove the main tables with atomic(): tbl = model._meta.db_table if tbl in table_names: cursor.execute(connection.schema_editor().sql_delete_table % { "table": connection.ops.quote_name(tbl), }) table_names.remove(tbl) connection.enable_constraint_checking() def column_classes(self, model): with connection.cursor() as cursor: columns = { d[0]: (connection.introspection.get_field_type(d[1], d), d) for d in connection.introspection.get_table_description( cursor, model._meta.db_table, ) } # SQLite has a different format for field_type for name, (type, desc) in columns.items(): if isinstance(type, tuple): columns[name] = (type[0], desc) # SQLite also doesn't error properly if not columns: raise DatabaseError("Table does not exist (empty pragma)") return columns def get_indexes(self, table): """ Get the indexes on the table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_indexes(cursor, table) def get_constraints(self, table): """ Get the constraints on a table using a new cursor. """ with connection.cursor() as cursor: return connection.introspection.get_constraints(cursor, table) # Tests def test_creation_deletion(self): """ Tries creating a model's table, and then deleting it. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Check that it's there list(Author.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Author) # Check that it's gone self.assertRaises( DatabaseError, lambda: list(Author.objects.all()), ) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_fk(self): "Tests that creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Check that initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint old_field = Book._meta.get_field("author") new_field = ForeignKey(Tag) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Make sure the new FK constraint is present constraints = self.get_constraints(Book._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: self.fail("No FK constraint for author_id found") @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_fk_db_constraint(self): "Tests that the db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Check that initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) # Check that BookWeak doesn't have an FK constraint constraints = self.get_constraints(BookWeak._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.fail("FK constraint for author_id found") # Make a db_constraint=False FK new_field = ForeignKey(Tag, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Make sure no FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.fail("FK constraint for tag_id found") # Alter to one with a constraint new_field2 = ForeignKey(Tag) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Make sure the new FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: self.fail("No FK constraint for tag_id found") # Alter to one without a constraint again new_field2 = ForeignKey(Tag) new_field2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field(Author, new_field2, new_field, strict=True) # Make sure no FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.fail("FK constraint for tag_id found") def _test_m2m_db_constraint(self, M2MFieldClass): class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(LocalAuthorWithM2M) # Check that initial tables are there list(LocalAuthorWithM2M.objects.all()) list(Tag.objects.all()) # Make a db_constraint=False FK new_field = M2MFieldClass(Tag, related_name="authors", db_constraint=False) new_field.contribute_to_class(LocalAuthorWithM2M, "tags") # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Make sure no FK constraint is present constraints = self.get_constraints(new_field.remote_field.through._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.fail("FK constraint for tag_id found") @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_m2m_db_constraint(self): self._test_m2m_db_constraint(ManyToManyField) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_m2m_db_constraint_custom(self): self._test_m2m_db_constraint(CustomManyToManyField) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_m2m_db_constraint_inherited(self): self._test_m2m_db_constraint(InheritedManyToManyField) def test_add_field(self): """ Tests adding fields to models """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add the new field new_field = IntegerField(null=True) new_field.set_attributes_from_name("age") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['age'][0], "IntegerField") self.assertEqual(columns['age'][1][6], True) def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], "CharField") self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls) def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_boolean_field_type(new_field, created_separately=True)) def test_add_field_default_transform(self): """ Tests adding fields to models with a default that is not directly valid in the database (#22581) """ class TestTransformField(IntegerField): # Weird field that saves the count of items in its value def get_default(self): return self.default def get_prep_value(self, value): if value is None: return 0 return len(value) # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add the field with a default it needs to cast (to string in this case) new_field = TestTransformField(default={1: 2}) new_field.set_attributes_from_name("thing") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is there columns = self.column_classes(Author) field_type, field_info = columns['thing'] self.assertEqual(field_type, 'IntegerField') # Make sure the values were transformed correctly self.assertEqual(Author.objects.extra(where=["thing = 1"]).count(), 2) def test_add_field_binary(self): """ Tests binary fields get a sane default (#22851) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Add the new field new_field = BinaryField(blank=True) new_field.set_attributes_from_name("bits") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) # MySQL annoyingly uses the same backend, so it'll come back as one of # these two types. self.assertIn(columns['bits'][0], ("BinaryField", "TextField")) def test_alter(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) # Alter the name field to a TextField old_field = Author._meta.get_field("name") new_field = TextField(null=True) new_field.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(columns['name'][1][6], True) # Change nullability again new_field2 = TextField(null=False) new_field2.set_attributes_from_name("name") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "TextField") self.assertEqual(bool(columns['name'][1][6]), bool(connection.features.interprets_empty_strings_as_nulls)) def test_alter_text_field(self): # Regression for "BLOB/TEXT column 'info' can't have a default value") # on MySQL. # Create the table with connection.schema_editor() as editor: editor.create_model(Note) old_field = Note._meta.get_field("info") new_field = TextField(blank=True) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_alter_textual_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = CharField(max_length=50) new_field.set_attributes_from_name("info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): Note.objects.create(info=None) def test_alter_numeric_field_keep_null_status(self): """ Changing a field type shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(UniqueTest) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='aaa') old_field = UniqueTest._meta.get_field("year") new_field = BigIntegerField() new_field.set_attributes_from_name("year") with connection.schema_editor() as editor: editor.alter_field(UniqueTest, old_field, new_field, strict=True) with self.assertRaises(IntegrityError): UniqueTest.objects.create(year=None, slug='bbb') def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default old_field = Author._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42) def test_alter_charfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a CharField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Change the CharField to null old_field = Author._meta.get_field('name') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field) def test_alter_textfield_to_null(self): """ #24307 - Should skip an alter statement on databases with interprets_empty_strings_as_null when changing a TextField to null. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Note) # Change the TextField to null old_field = Note._meta.get_field('info') new_field = copy(old_field) new_field.null = True with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field) @unittest.skipUnless(connection.features.supports_combined_alters, "No combined ALTER support") def test_alter_null_to_not_null_keeping_default(self): """ #23738 - Can change a nullable field with default to non-nullable with the same default. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithDefaultHeight) # Ensure the field is right to begin with columns = self.column_classes(AuthorWithDefaultHeight) self.assertTrue(columns['height'][1][6]) # Alter the height field to NOT NULL keeping the previous default old_field = AuthorWithDefaultHeight._meta.get_field("height") new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(AuthorWithDefaultHeight, old_field, new_field) # Ensure the field is right afterwards columns = self.column_classes(AuthorWithDefaultHeight) self.assertFalse(columns['height'][1][6]) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_alter_fk(self): """ Tests altering of FKs """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Make sure the FK constraint is present constraints = self.get_constraints(Book._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_author', 'id')) break else: self.fail("No FK constraint for author_id found") # Alter the FK old_field = Book._meta.get_field("author") new_field = ForeignKey(Author, editable=False) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Make sure the FK constraint is present constraints = self.get_constraints(Book._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_author', 'id')) break else: self.fail("No FK constraint for author_id found") @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_alter_to_fk(self): """ #24447 - Tests adding a FK constraint for an existing column """ class LocalBook(Model): author = IntegerField() title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalBook] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBook) # Ensure no FK constraint exists constraints = self.get_constraints(LocalBook._meta.db_table) for name, details in constraints.items(): if details['foreign_key']: self.fail('Found an unexpected FK constraint to %s' % details['columns']) old_field = LocalBook._meta.get_field("author") new_field = ForeignKey(Author) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(LocalBook, old_field, new_field, strict=True) constraints = self.get_constraints(LocalBook._meta.db_table) # Ensure FK constraint exists for name, details in constraints.items(): if details['foreign_key'] and details['columns'] == ["author_id"]: self.assertEqual(details['foreign_key'], ('schema_author', 'id')) break else: self.fail("No FK constraint for author_id found") @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_alter_o2o_to_fk(self): """ #24163 - Tests altering of OneToOneField to ForeignKey """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(BookWithO2O) # Ensure the field is right to begin with columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is unique author = Author.objects.create(name="Joe") BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) BookWithO2O.objects.all().delete() # Make sure the FK constraint is present constraints = self.get_constraints(BookWithO2O._meta.db_table) author_is_fk = False for name, details in constraints.items(): if details['columns'] == ['author_id']: if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'): author_is_fk = True self.assertTrue(author_is_fk, "No FK constraint for author_id found") # Alter the OneToOneField to ForeignKey old_field = BookWithO2O._meta.get_field("author") new_field = ForeignKey(Author) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(BookWithO2O, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is not unique anymore Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) # Make sure the FK constraint is still present constraints = self.get_constraints(Book._meta.db_table) author_is_fk = False for name, details in constraints.items(): if details['columns'] == ['author_id']: if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'): author_is_fk = True self.assertTrue(author_is_fk, "No FK constraint for author_id found") @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_alter_fk_to_o2o(self): """ #24163 - Tests altering of ForeignKey to OneToOneField """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the field is right to begin with columns = self.column_classes(Book) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is not unique author = Author.objects.create(name="Joe") Book.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) Book.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) Book.objects.all().delete() # Make sure the FK constraint is present constraints = self.get_constraints(Book._meta.db_table) author_is_fk = False for name, details in constraints.items(): if details['columns'] == ['author_id']: if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'): author_is_fk = True self.assertTrue(author_is_fk, "No FK constraint for author_id found") # Alter the ForeignKey to OneToOneField old_field = Book._meta.get_field("author") new_field = OneToOneField(Author) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(BookWithO2O) self.assertEqual(columns['author_id'][0], "IntegerField") # Ensure the field is unique now BookWithO2O.objects.create(author=author, title="Django 1", pub_date=datetime.datetime.now()) with self.assertRaises(IntegrityError): BookWithO2O.objects.create(author=author, title="Django 2", pub_date=datetime.datetime.now()) # Make sure the FK constraint is present constraints = self.get_constraints(BookWithO2O._meta.db_table) author_is_fk = False for name, details in constraints.items(): if details['columns'] == ['author_id']: if details['foreign_key'] and details['foreign_key'] == ('schema_author', 'id'): author_is_fk = True self.assertTrue(author_is_fk, "No FK constraint for author_id found") def test_alter_implicit_id_to_explicit(self): """ Should be able to convert an implicit "id" field to an explicit "id" primary key field. """ with connection.schema_editor() as editor: editor.create_model(Author) old_field = Author._meta.get_field("id") new_field = IntegerField(primary_key=True) new_field.set_attributes_from_name("id") new_field.model = Author with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # This will fail if DROP DEFAULT is inadvertently executed on this # field which drops the id sequence, at least on PostgreSQL. Author.objects.create(name='Foo') def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") self.assertNotIn("display_name", columns) # Alter the name field's name old_field = Author._meta.get_field("name") new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], "CharField") self.assertNotIn("name", columns) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_rename_keep_null_status(self): """ Renaming a field shouldn't affect the not null status. """ with connection.schema_editor() as editor: editor.create_model(Note) with self.assertRaises(IntegrityError): Note.objects.create(info=None) old_field = Note._meta.get_field("info") new_field = TextField() new_field.set_attributes_from_name("detail_info") with connection.schema_editor() as editor: editor.alter_field(Note, old_field, new_field, strict=True) columns = self.column_classes(Note) self.assertEqual(columns['detail_info'][0], "TextField") self.assertNotIn("info", columns) with self.assertRaises(IntegrityError): NoteRename.objects.create(detail_info=None) def _test_m2m_create(self, M2MFieldClass): """ Tests M2M fields on models during creation """ class LocalBookWithM2M(Model): author = ForeignKey(Author) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [ LocalBookWithM2M, LocalBookWithM2M._meta.get_field('tags').remote_field.through, ] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2M) # Ensure there is now an m2m table there columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") def test_m2m_create(self): self._test_m2m_create(ManyToManyField) def test_m2m_create_custom(self): self._test_m2m_create(CustomManyToManyField) def test_m2m_create_inherited(self): self._test_m2m_create(InheritedManyToManyField) def _test_m2m_create_through(self, M2MFieldClass): """ Tests M2M fields on models during creation with through models """ class LocalTagThrough(Model): book = ForeignKey("schema.LocalBookWithM2MThrough") tag = ForeignKey("schema.TagM2MTest") class Meta: app_label = 'schema' apps = new_apps class LocalBookWithM2MThrough(Model): tags = M2MFieldClass("TagM2MTest", related_name="books", through=LocalTagThrough) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalTagThrough, LocalBookWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalTagThrough) editor.create_model(TagM2MTest) editor.create_model(LocalBookWithM2MThrough) # Ensure there is now an m2m table there columns = self.column_classes(LocalTagThrough) self.assertEqual(columns['book_id'][0], "IntegerField") self.assertEqual(columns['tag_id'][0], "IntegerField") def test_m2m_create_through(self): self._test_m2m_create_through(ManyToManyField) def test_m2m_create_through_custom(self): self._test_m2m_create_through(CustomManyToManyField) def test_m2m_create_through_inherited(self): self._test_m2m_create_through(InheritedManyToManyField) def _test_m2m(self, M2MFieldClass): """ Tests adding/removing M2M fields on models """ class LocalAuthorWithM2M(Model): name = CharField(max_length=255) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorWithM2M] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(LocalAuthorWithM2M, "tags") self.local_models += [new_field.remote_field.through] # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.remote_field.through) # Add the field with connection.schema_editor() as editor: editor.add_field(LocalAuthorWithM2M, new_field) # Ensure there is now an m2m table there columns = self.column_classes(new_field.remote_field.through) self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") # "Alter" the field. This should not rename the DB table to itself. with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2M, new_field, new_field) # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field(LocalAuthorWithM2M, new_field) # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.remote_field.through) def test_m2m(self): self._test_m2m(ManyToManyField) def test_m2m_custom(self): self._test_m2m(CustomManyToManyField) def test_m2m_inherited(self): self._test_m2m(InheritedManyToManyField) def _test_m2m_through_alter(self, M2MFieldClass): """ Tests altering M2Ms with explicit through models (should no-op) """ class LocalAuthorTag(Model): author = ForeignKey("schema.LocalAuthorWithM2MThrough") tag = ForeignKey("schema.TagM2MTest") class Meta: app_label = 'schema' apps = new_apps class LocalAuthorWithM2MThrough(Model): name = CharField(max_length=255) tags = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) class Meta: app_label = 'schema' apps = new_apps self.local_models = [LocalAuthorTag, LocalAuthorWithM2MThrough] # Create the tables with connection.schema_editor() as editor: editor.create_model(LocalAuthorTag) editor.create_model(LocalAuthorWithM2MThrough) editor.create_model(TagM2MTest) # Ensure the m2m table is there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) # "Alter" the field's blankness. This should not actually do anything. old_field = LocalAuthorWithM2MThrough._meta.get_field("tags") new_field = M2MFieldClass("schema.TagM2MTest", related_name="authors", through=LocalAuthorTag) new_field.contribute_to_class(LocalAuthorWithM2MThrough, "tags") with connection.schema_editor() as editor: editor.alter_field(LocalAuthorWithM2MThrough, old_field, new_field) # Ensure the m2m table is still there self.assertEqual(len(self.column_classes(LocalAuthorTag)), 3) def test_m2m_through_alter(self): self._test_m2m_through_alter(ManyToManyField) def test_m2m_through_alter_custom(self): self._test_m2m_through_alter(CustomManyToManyField) def test_m2m_through_alter_inherited(self): self._test_m2m_through_alter(InheritedManyToManyField) def _test_m2m_repoint(self, M2MFieldClass): """ Tests repointing M2M fields """ class LocalBookWithM2M(Model): author = ForeignKey(Author) title = CharField(max_length=100, db_index=True) pub_date = DateTimeField() tags = M2MFieldClass("TagM2MTest", related_name="books") class Meta: app_label = 'schema' apps = new_apps self.local_models = [ LocalBookWithM2M, LocalBookWithM2M._meta.get_field('tags').remote_field.through, ] # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(LocalBookWithM2M) editor.create_model(TagM2MTest) editor.create_model(UniqueTest) # Ensure the M2M exists and points to TagM2MTest constraints = self.get_constraints(LocalBookWithM2M._meta.get_field("tags").remote_field.through._meta.db_table) if connection.features.supports_foreign_keys: for name, details in constraints.items(): if details['columns'] == ["tagm2mtest_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tagm2mtest', 'id')) break else: self.fail("No FK constraint for tagm2mtest_id found") # Repoint the M2M old_field = LocalBookWithM2M._meta.get_field("tags") new_field = M2MFieldClass(UniqueTest) new_field.contribute_to_class(LocalBookWithM2M, "uniques") self.local_models += [new_field.remote_field.through] with connection.schema_editor() as editor: editor.alter_field(LocalBookWithM2M, old_field, new_field) # Ensure old M2M is gone self.assertRaises(DatabaseError, self.column_classes, LocalBookWithM2M._meta.get_field("tags").remote_field.through) # Ensure the new M2M exists and points to UniqueTest constraints = self.get_constraints(new_field.remote_field.through._meta.db_table) if connection.features.supports_foreign_keys: for name, details in constraints.items(): if details['columns'] == ["uniquetest_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_uniquetest', 'id')) break else: self.fail("No FK constraint for uniquetest_id found") def test_m2m_repoint(self): self._test_m2m_repoint(ManyToManyField) def test_m2m_repoint_custom(self): self._test_m2m_repoint(CustomManyToManyField) def test_m2m_repoint_inherited(self): self._test_m2m_repoint(InheritedManyToManyField) @unittest.skipUnless(connection.features.supports_column_check_constraints, "No check constraints") def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["height"] and details['check']: break else: self.fail("No check constraint for height found") # Alter the column to remove it old_field = Author._meta.get_field("height") new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field, strict=True) constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["height"] and details['check']: self.fail("Check constraint for height found") # Alter the column to re-add it new_field2 = Author._meta.get_field("height") with connection.schema_editor() as editor: editor.alter_field(Author, new_field, new_field2, strict=True) constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["height"] and details['check']: break else: self.fail("No check constraint for height found") def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique old_field = Tag._meta.get_field("slug") new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, old_field, new_field, strict=True) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field, new_field2, strict=True) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field3 = SlugField(unique=True) new_field3.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field(Tag, new_field2, new_field3, strict=True) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo") Tag.objects.all().delete() def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to its non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") UniqueTest.objects.all().delete() def test_unique_together_with_fk(self): """ Tests removing and adding unique_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.unique_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_unique_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_unique_together(Book, [['author', 'title']], []) def test_index_together(self): """ Tests removing and adding index_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure there's no index on the year/slug columns first self.assertEqual( False, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) # Alter the model to add an index with connection.schema_editor() as editor: editor.alter_index_together(Tag, [], [("slug", "title")]) # Ensure there is now an index self.assertEqual( True, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) # Alter it back new_field2 = SlugField(unique=True) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_index_together(Tag, [("slug", "title")], []) # Ensure there's no index self.assertEqual( False, any( c["index"] for c in self.get_constraints("schema_tag").values() if c['columns'] == ["slug", "title"] ), ) def test_index_together_with_fk(self): """ Tests removing and adding index_together constraints that include a foreign key. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the fields are unique to begin with self.assertEqual(Book._meta.index_together, ()) # Add the unique_together constraint with connection.schema_editor() as editor: editor.alter_index_together(Book, [], [['author', 'title']]) # Alter it back with connection.schema_editor() as editor: editor.alter_index_together(Book, [['author', 'title']], []) def test_create_index_together(self): """ Tests creating models with index_together already defined """ # Create the table with connection.schema_editor() as editor: editor.create_model(TagIndexed) # Ensure there is an index self.assertEqual( True, any( c["index"] for c in self.get_constraints("schema_tagindexed").values() if c['columns'] == ["slug", "title"] ), ) def test_db_table(self): """ Tests renaming of the table """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table with connection.schema_editor() as editor: editor.alter_db_table(Author, "schema_author", "schema_otherauthor") # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table again with connection.schema_editor() as editor: editor.alter_db_table(Author, "schema_otherauthor", "schema_author") # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index old_field = Book._meta.get_field("title") new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, old_field, new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index new_field2 = Book._meta.get_field("title") with connection.schema_editor() as editor: editor.alter_field(Book, new_field, new_field2, strict=True) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index new_field3 = BookWithSlug._meta.get_field("slug") with connection.schema_editor() as editor: editor.add_field(Book, new_field3) self.assertIn( "slug", self.get_indexes(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field4 = CharField(max_length=20, unique=False) new_field4.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, new_field3, new_field4, strict=True) self.assertNotIn( "slug", self.get_indexes(Book._meta.db_table), ) def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertTrue( self.get_indexes(Tag._meta.db_table)['id']['primary_key'], ) # Alter to change the PK id_field = Tag._meta.get_field("id") old_field = Tag._meta.get_field("slug") new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, id_field) editor.alter_field(Tag, old_field, new_field) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertTrue( self.get_indexes(Tag._meta.db_table)['slug']['primary_key'], ) def test_context_manager_exit(self): """ Ensures transaction is correctly closed when an error occurs inside a SchemaEditor context. """ class SomeError(Exception): pass try: with connection.schema_editor(): raise SomeError except SomeError: self.assertFalse(connection.in_atomic_block) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_foreign_key_index_long_names_regression(self): """ Regression test for #21497. Only affects databases that supports foreign keys. """ # Create the table with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Find the properly shortened column name column_name = connection.ops.quote_name("author_foreign_key_with_really_long_field_name_id") column_name = column_name[1:-1].lower() # unquote, and, for Oracle, un-upcase # Ensure the table is there and has an index on the column self.assertIn( column_name, self.get_indexes(BookWithLongName._meta.db_table), ) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_add_foreign_key_long_names(self): """ Regression test for #23009. Only affects databases that supports foreign keys. """ # Create the initial tables with connection.schema_editor() as editor: editor.create_model(AuthorWithEvenLongerName) editor.create_model(BookWithLongName) # Add a second FK, this would fail due to long ref name before the fix new_field = ForeignKey(AuthorWithEvenLongerName, related_name="something") new_field.set_attributes_from_name("author_other_really_long_named_i_mean_so_long_fk") with connection.schema_editor() as editor: editor.add_field(BookWithLongName, new_field) def test_creation_deletion_reserved_names(self): """ Tries creating a model's table, and then deleting it when it has a SQL reserved name. """ # Create the table with connection.schema_editor() as editor: try: editor.create_model(Thing) except OperationalError as e: self.fail("Errors when applying initial migration for a model " "with a table named after a SQL reserved word: %s" % e) # Check that it's there list(Thing.objects.all()) # Clean up that table with connection.schema_editor() as editor: editor.delete_model(Thing) # Check that it's gone self.assertRaises( DatabaseError, lambda: list(Thing.objects.all()), ) @unittest.skipUnless(connection.features.supports_foreign_keys, "No FK support") def test_remove_constraints_capital_letters(self): """ #23065 - Constraint names must be quoted if they contain capital letters. """ def get_field(*args, **kwargs): kwargs['db_column'] = "CamelCase" field = kwargs.pop('field_class', IntegerField)(*args, **kwargs) field.set_attributes_from_name("CamelCase") return field model = Author field = get_field() table = model._meta.db_table column = field.column with connection.schema_editor() as editor: editor.create_model(model) editor.add_field(model, field) editor.execute( editor.sql_create_index % { "table": editor.quote_name(table), "name": editor.quote_name("CamelCaseIndex"), "columns": editor.quote_name(column), "extra": "", } ) editor.alter_field(model, get_field(db_index=True), field) editor.execute( editor.sql_create_unique % { "table": editor.quote_name(table), "name": editor.quote_name("CamelCaseUniqConstraint"), "columns": editor.quote_name(field.column), } ) editor.alter_field(model, get_field(unique=True), field) editor.execute( editor.sql_create_fk % { "table": editor.quote_name(table), "name": editor.quote_name("CamelCaseFKConstraint"), "column": editor.quote_name(column), "to_table": editor.quote_name(table), "to_column": editor.quote_name(model._meta.auto_field.column), } ) editor.alter_field(model, get_field(Author, field_class=ForeignKey), field) def test_add_field_use_effective_default(self): """ #23987 - effective_default() should be used as the field default when adding a new field. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField to ensure default will be used from effective_default new_field = CharField(max_length=15, blank=True) new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], None if connection.features.interprets_empty_strings_as_nulls else '') def test_add_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no surname field columns = self.column_classes(Author) self.assertNotIn("surname", columns) # Create a row Author.objects.create(name='Anonymous1') # Add new CharField with a default new_field = CharField(max_length=15, blank=True, default='surname default') new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field(Author, new_field) # Ensure field was added with the right default with connection.cursor() as cursor: cursor.execute("SELECT surname FROM schema_author;") item = cursor.fetchall()[0] self.assertEqual(item[0], 'surname default') # And that the default is no longer set in the database. field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "surname" ) if connection.features.can_introspect_default: self.assertIsNone(field.default) def test_alter_field_default_dropped(self): # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Create a row Author.objects.create(name='Anonymous1') self.assertEqual(Author.objects.get().height, None) old_field = Author._meta.get_field('height') # The default from the new field is used in updating existing rows. new_field = IntegerField(blank=True, default=42) new_field.set_attributes_from_name('height') with connection.schema_editor() as editor: editor.alter_field(Author, old_field, new_field) self.assertEqual(Author.objects.get().height, 42) # The database default should be removed. with connection.cursor() as cursor: field = next( f for f in connection.introspection.get_table_description(cursor, "schema_author") if f.name == "height" ) if connection.features.can_introspect_default: self.assertIsNone(field.default)
{ "content_hash": "4f9fc5bb0ea1712c25b0c7fee8491ce6", "timestamp": "", "source": "github", "line_count": 1508, "max_line_length": 125, "avg_line_length": 43.28978779840849, "alnum_prop": 0.6144513717620748, "repo_name": "shtouff/django", "id": "5128c9ec1e1960d945d9d78b7af11d233c628ab7", "size": "65281", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/schema/tests.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "43177" }, { "name": "HTML", "bytes": "171768" }, { "name": "JavaScript", "bytes": "105066" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "10907314" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }
package ac.at.tuwien.mt.gui.primefaces.exception; /** * @author Florin Bogdan Balint * */ @SuppressWarnings("serial") public class CommunicationException extends Exception { public CommunicationException(String message) { super(message); } }
{ "content_hash": "31d780bff6f144a28994aab387ce1b96", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 55, "avg_line_length": 16.866666666666667, "alnum_prop": 0.7470355731225297, "repo_name": "e0725439/idac", "id": "260d6aab15f21beb4d9b0b7897c7716919c563df", "size": "581", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "prototype/ac.at.tuwien.mt.gui/src/main/java/ac/at/tuwien/mt/gui/primefaces/exception/CommunicationException.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "8476" }, { "name": "HTML", "bytes": "333174" }, { "name": "Java", "bytes": "812118" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using EnvCrypt.Core.EncrypedData.Poco; using EnvCrypt.Core.EncryptionAlgo; using EnvCrypt.Core.Key.Aes; using EnvCrypt.Core.Key.PlainText; using EnvCrypt.Core.Key.Rsa; using EnvCrypt.Core.Verb.DecryptEntry; using EnvCrypt.Core.Verb.DecryptEntry.Aes; using EnvCrypt.Core.Verb.DecryptEntry.Generic; using EnvCrypt.Core.Verb.DecryptEntry.PlainText; using EnvCrypt.Core.Verb.DecryptEntry.Rsa; using EnvCrypt.Core.Verb.LoadDat; using FluentAssertions; using Moq; using NUnit.Framework; namespace EnvCrypt.Core.UnitTest.Verb.DecryptEntry.Generic { [TestFixture] public class DecryptGenericWorkflowTest { [Test] public void Given_TwoDifferentEncAlgosRequestedToBeDecrypted_When_Run_Then_ExceptionThrown() { // Arrange Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock; var plainTextEntryWorkflowBuilderMock = new Mock<IDecryptPlainTextEntryWorkflowBuilder>(MockBehavior.Strict); var rsaEntryWorkflowBuilderMock = new Mock<IDecryptRsaEntryWorkflowBuilder>(MockBehavior.Strict); var aesEntryWorkflowBuilderMock = new Mock<IDecryptAesEntryWorkflowBuilder>(MockBehavior.Strict); var options = TwoDifferentAlgosExceptionTestSetup(out datLoaderMock); // Act var workflow = new DecryptGenericWorkflow(datLoaderMock.Object, plainTextEntryWorkflowBuilderMock.Object, rsaEntryWorkflowBuilderMock.Object, aesEntryWorkflowBuilderMock.Object); Action act = () => workflow.Run(options); // Assert act.ShouldThrow<EnvCryptException>(); } [Test] public void Given_TwoDifferentEncAlgos_When_NotRequestedToBeEncrypted_Then_ExceptionNotThrown() { // Arrange Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock; var plainTextEntryWorkflowBuilderMock = new Mock<IDecryptPlainTextEntryWorkflowBuilder>(MockBehavior.Strict); var rsaEntryWorkflowBuilderMock = new Mock<IDecryptRsaEntryWorkflowBuilder>(MockBehavior.Strict); var aesEntryWorkflowBuilderMock = new Mock<IDecryptAesEntryWorkflowBuilder>(MockBehavior.Strict); aesEntryWorkflowBuilderMock.Setup( b => b.WithDatLoader(It.IsAny<IDatLoader<DatFromFileLoaderOptions>>())) .Returns(aesEntryWorkflowBuilderMock.Object); aesEntryWorkflowBuilderMock.Setup( b => b.Run(It.IsAny<DecryptEntryWorkflowOptions>())) .Returns(new List<EntriesDecrypterResult<AesKey>>()); var options = TwoDifferentAlgosExceptionTestSetup(out datLoaderMock); options.CategoryEntryPair.RemoveAt(0); // Act var workflow = new DecryptGenericWorkflow(datLoaderMock.Object, plainTextEntryWorkflowBuilderMock.Object, rsaEntryWorkflowBuilderMock.Object, aesEntryWorkflowBuilderMock.Object); Action act = () => workflow.Run(options); // Assert act.ShouldNotThrow<EnvCryptException>(); } [Test] public void Given_PlainTextDecryptionRequest_When_Run_Then_PlainTextWorkflowCalled() { // Arrange Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock; var rsaEntryWorkflowBuilderMock = new Mock<IDecryptRsaEntryWorkflowBuilder>(MockBehavior.Strict); var aesEntryWorkflowBuilderMock = new Mock<IDecryptAesEntryWorkflowBuilder>(MockBehavior.Strict); var plainTextEntryWorkflowBuilderMock = new Mock<IDecryptPlainTextEntryWorkflowBuilder>(MockBehavior.Strict); plainTextEntryWorkflowBuilderMock.Setup( b => b.WithDatLoader(It.IsAny<IDatLoader<DatFromFileLoaderOptions>>())) .Returns(plainTextEntryWorkflowBuilderMock.Object); plainTextEntryWorkflowBuilderMock.Setup( b => b.Build()) .Returns(plainTextEntryWorkflowBuilderMock.Object); plainTextEntryWorkflowBuilderMock.Setup( b => b.Run(It.IsAny<DecryptPlainTextEntryWorkflowOptions>())) .Returns(new List<EntriesDecrypterResult<PlainTextKey>>()); var options = TwoDifferentAlgosExceptionTestSetup(out datLoaderMock); options.CategoryEntryPair = options.CategoryEntryPair.Where(p => p.Category == "dev").ToList(); // Act var workflow = new DecryptGenericWorkflow(datLoaderMock.Object, plainTextEntryWorkflowBuilderMock.Object, rsaEntryWorkflowBuilderMock.Object, aesEntryWorkflowBuilderMock.Object); var res = workflow.Run(options); // Assert plainTextEntryWorkflowBuilderMock.Verify( b => b.Run(It.IsAny<DecryptPlainTextEntryWorkflowOptions>()), Times.Once); } [Test] public void Given_RsaDecryptionRequest_When_Run_Then_RsaWorkflowCalled() { // Arrange Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock; var rsaEntryWorkflowBuilderMock = new Mock<IDecryptRsaEntryWorkflowBuilder>(MockBehavior.Strict); var aesEntryWorkflowBuilderMock = new Mock<IDecryptAesEntryWorkflowBuilder>(MockBehavior.Strict); var plainTextEntryWorkflowBuilderMock = new Mock<IDecryptPlainTextEntryWorkflowBuilder>(MockBehavior.Strict); rsaEntryWorkflowBuilderMock.Setup( b => b.WithDatLoader(It.IsAny<IDatLoader<DatFromFileLoaderOptions>>())) .Returns(rsaEntryWorkflowBuilderMock.Object); rsaEntryWorkflowBuilderMock.Setup( b => b.Build()) .Returns(rsaEntryWorkflowBuilderMock.Object); rsaEntryWorkflowBuilderMock.Setup( b => b.Run(It.IsAny<DecryptEntryWorkflowOptions>())) .Returns(new List<EntriesDecrypterResult<RsaKey>>()); var options = TwoDifferentAlgosExceptionTestSetup(out datLoaderMock); options.CategoryEntryPair = options.CategoryEntryPair.Where(p => p.Category == "prod").ToList(); // Act var workflow = new DecryptGenericWorkflow( datLoaderMock.Object, plainTextEntryWorkflowBuilderMock.Object, rsaEntryWorkflowBuilderMock.Object, aesEntryWorkflowBuilderMock.Object); workflow.Run(options); // Assert rsaEntryWorkflowBuilderMock.Verify( b => b.Run(It.IsAny<DecryptEntryWorkflowOptions>()), Times.Once); } [Test] public void Given_AesDecryptionRequest_When_Run_Then_AesWorkflowCalled() { // Arrange Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock; var rsaEntryWorkflowBuilderMock = new Mock<IDecryptRsaEntryWorkflowBuilder>(MockBehavior.Strict); var aesEntryWorkflowBuilderMock = new Mock<IDecryptAesEntryWorkflowBuilder>(MockBehavior.Strict); var plainTextEntryWorkflowBuilderMock = new Mock<IDecryptPlainTextEntryWorkflowBuilder>(MockBehavior.Strict); aesEntryWorkflowBuilderMock.Setup( b => b.WithDatLoader(It.IsAny<IDatLoader<DatFromFileLoaderOptions>>())) .Returns(aesEntryWorkflowBuilderMock.Object); aesEntryWorkflowBuilderMock.Setup( b => b.Build()) .Returns(aesEntryWorkflowBuilderMock.Object); aesEntryWorkflowBuilderMock.Setup( b => b.Run(It.IsAny<DecryptEntryWorkflowOptions>())) .Returns(new List<EntriesDecrypterResult<AesKey>>()); var options = TwoDifferentAlgosExceptionTestSetup(out datLoaderMock); options.CategoryEntryPair = options.CategoryEntryPair.Where(p => p.Category == "uat").ToList(); // Act var workflow = new DecryptGenericWorkflow( datLoaderMock.Object, plainTextEntryWorkflowBuilderMock.Object, rsaEntryWorkflowBuilderMock.Object, aesEntryWorkflowBuilderMock.Object); workflow.Run(options); // Assert aesEntryWorkflowBuilderMock.Verify( b => b.Run(It.IsAny<DecryptEntryWorkflowOptions>()), Times.Once); } private static DecryptGenericWorkflowOptions TwoDifferentAlgosExceptionTestSetup(out Mock<IDatLoader<DatFromFileLoaderOptions>> datLoaderMock) { var options = new DecryptGenericWorkflowOptions() { DatFilePath = @"X:\tmp\generic.dat", CategoryEntryPair = new List<CategoryEntryPair>() { new CategoryEntryPair("prod", "password"), new CategoryEntryPair("uat", "password"), new CategoryEntryPair("dev", "username"), } }; var dat = new EnvCryptDat() { Categories = new[] { new Category() { Name = options.CategoryEntryPair[0].Category, Entries = new[] { new Entry() { Name = options.CategoryEntryPair[0].Entry, EncryptionAlgorithm = EnvCryptAlgoEnum.Rsa } } }, new Category() { Name = options.CategoryEntryPair[1].Category, Entries = new[] { new Entry() { Name = options.CategoryEntryPair[1].Entry, EncryptionAlgorithm = EnvCryptAlgoEnum.Aes } } }, new Category() { Name = options.CategoryEntryPair[2].Category, Entries = new[] { new Entry() { Name = options.CategoryEntryPair[2].Entry, EncryptionAlgorithm = EnvCryptAlgoEnum.PlainText } } } } }; datLoaderMock = new Mock<IDatLoader<DatFromFileLoaderOptions>>(MockBehavior.Strict); datLoaderMock.Setup(l => l.Load(It.Is<DatFromFileLoaderOptions>(o => o.DatFilePath == options.DatFilePath))) .Returns(dat); return options; } } }
{ "content_hash": "b9200e2173f826d541e44e7a72d20264", "timestamp": "", "source": "github", "line_count": 250, "max_line_length": 190, "avg_line_length": 43.576, "alnum_prop": 0.6089590600330457, "repo_name": "lammichael/EnvCrypt.Core", "id": "ada1592af976bd81739c7efe177515cafc0cceb7", "size": "10896", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/EnvCrypt.Core.UnitTest/Verb/DecryptEntry/Generic/DecryptGenericWorkflowTest.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1449" }, { "name": "C#", "bytes": "400738" }, { "name": "Ruby", "bytes": "5965" } ], "symlink_target": "" }
import { CompilerCallback, ImplementationCompilerOptions } from '../index.d'; /** * compile/convert implementation (controller, helper, model) as module * * @param {string} content the implementation code as string * @param {string} implFile the path of the implementation file (for debugging/error information) * @param {ImplLoadOptions} options the ImplLoadOptions with property mapping (list of ImplOptions) * @param {Function} callback the callback when impl. compilation has been completed: callback(error | null, compiledImpl, map, meta) * @param {any} [_map] source mapping (unused) * @param {any} [_meta] meta data (unused) */ declare function compile(content: string, implFile: string, options: ImplementationCompilerOptions, callback: CompilerCallback, _map: any, _meta: any): void; declare const _default: { compile: typeof compile; }; export = _default;
{ "content_hash": "01d344aaa6c15c2cc6888499f9343882", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 157, "avg_line_length": 56.3125, "alnum_prop": 0.732519422863485, "repo_name": "mmig/mmir-tooling", "id": "ee0aaeeb7c15be813d06d065b5104d0944d94606", "size": "901", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "impl/impl-gen.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "267236" }, { "name": "TypeScript", "bytes": "222069" } ], "symlink_target": "" }
import mongoose , { Schema } from 'mongoose'; import { USER } from '../constants/index'; const UserSchema = new Schema({ id:String, name:String, email:String, password:String, registerTime:{ type:Date, default:Date.now }, role:{ type:String, default:USER.ROLE.NORMAL }, status:{ type:String, default:USER.STATUS.ACTIVE }, score:{ type:Number, default:0 }, tag:{ type:String, default:"" } }); UserSchema.statics.checkUser=function(params,cb){ return this.findOne(params,'id name',cb); } UserSchema.statics.getOne=function(params,cb){ return this.findOne(params,cb); } UserSchema.statics.getAllByIds=function(params,fieldsStr,ids,cb){ return this.find(params).select(fieldsStr).where('id').in(ids).exec(cb); } mongoose.model('User',UserSchema);
{ "content_hash": "516ba4d58065d015d8fa493a12d9fef4", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 76, "avg_line_length": 21.804878048780488, "alnum_prop": 0.616331096196868, "repo_name": "BSFullStack/isomorphic-redux-app", "id": "90f760ed3bf9ff0f4820db0d037bfb3ae710938c", "size": "894", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/server/models/User.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "675862" }, { "name": "JavaScript", "bytes": "340413" } ], "symlink_target": "" }
package com.kaaprotech.satu.parser; public enum FieldTypeCategory { DeclaredType, Primitive, ImportedType, Map, Set }
{ "content_hash": "903cb7e88e10ad5c79b6f11b32e4b28b", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 35, "avg_line_length": 12.818181818181818, "alnum_prop": 0.6808510638297872, "repo_name": "kaaprotech/satu", "id": "01ba6e1af5b6f3c35322f215a75f06c7a8ae36ab", "size": "739", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "satu-core/src/main/java/com/kaaprotech/satu/parser/FieldTypeCategory.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "4452" }, { "name": "Java", "bytes": "240052" } ], "symlink_target": "" }
#ifdef _SAME54_AC_COMPONENT_ #ifndef _HRI_AC_E54_H_INCLUDED_ #define _HRI_AC_E54_H_INCLUDED_ #ifdef __cplusplus extern "C" { #endif #include <stdbool.h> #include <hal_atomic.h> #if defined(ENABLE_AC_CRITICAL_SECTIONS) #define AC_CRITICAL_SECTION_ENTER() CRITICAL_SECTION_ENTER() #define AC_CRITICAL_SECTION_LEAVE() CRITICAL_SECTION_LEAVE() #else #define AC_CRITICAL_SECTION_ENTER() #define AC_CRITICAL_SECTION_LEAVE() #endif typedef uint16_t hri_ac_calib_reg_t; typedef uint16_t hri_ac_evctrl_reg_t; typedef uint32_t hri_ac_compctrl_reg_t; typedef uint32_t hri_ac_syncbusy_reg_t; typedef uint8_t hri_ac_ctrla_reg_t; typedef uint8_t hri_ac_ctrlb_reg_t; typedef uint8_t hri_ac_dbgctrl_reg_t; typedef uint8_t hri_ac_intenset_reg_t; typedef uint8_t hri_ac_intflag_reg_t; typedef uint8_t hri_ac_scaler_reg_t; typedef uint8_t hri_ac_statusa_reg_t; typedef uint8_t hri_ac_statusb_reg_t; typedef uint8_t hri_ac_winctrl_reg_t; static inline void hri_ac_wait_for_sync(const void *const hw, hri_ac_syncbusy_reg_t reg) { while (((Ac *)hw)->SYNCBUSY.reg & reg) { }; } static inline bool hri_ac_is_syncing(const void *const hw, hri_ac_syncbusy_reg_t reg) { return ((Ac *)hw)->SYNCBUSY.reg & reg; } static inline bool hri_ac_get_INTFLAG_COMP0_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_COMP0) >> AC_INTFLAG_COMP0_Pos; } static inline void hri_ac_clear_INTFLAG_COMP0_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_COMP0; } static inline bool hri_ac_get_INTFLAG_COMP1_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_COMP1) >> AC_INTFLAG_COMP1_Pos; } static inline void hri_ac_clear_INTFLAG_COMP1_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_COMP1; } static inline bool hri_ac_get_INTFLAG_WIN0_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_WIN0) >> AC_INTFLAG_WIN0_Pos; } static inline void hri_ac_clear_INTFLAG_WIN0_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_WIN0; } static inline bool hri_ac_get_interrupt_COMP0_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_COMP0) >> AC_INTFLAG_COMP0_Pos; } static inline void hri_ac_clear_interrupt_COMP0_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_COMP0; } static inline bool hri_ac_get_interrupt_COMP1_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_COMP1) >> AC_INTFLAG_COMP1_Pos; } static inline void hri_ac_clear_interrupt_COMP1_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_COMP1; } static inline bool hri_ac_get_interrupt_WIN0_bit(const void *const hw) { return (((Ac *)hw)->INTFLAG.reg & AC_INTFLAG_WIN0) >> AC_INTFLAG_WIN0_Pos; } static inline void hri_ac_clear_interrupt_WIN0_bit(const void *const hw) { ((Ac *)hw)->INTFLAG.reg = AC_INTFLAG_WIN0; } static inline hri_ac_intflag_reg_t hri_ac_get_INTFLAG_reg(const void *const hw, hri_ac_intflag_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->INTFLAG.reg; tmp &= mask; return tmp; } static inline hri_ac_intflag_reg_t hri_ac_read_INTFLAG_reg(const void *const hw) { return ((Ac *)hw)->INTFLAG.reg; } static inline void hri_ac_clear_INTFLAG_reg(const void *const hw, hri_ac_intflag_reg_t mask) { ((Ac *)hw)->INTFLAG.reg = mask; } static inline void hri_ac_set_INTEN_COMP0_bit(const void *const hw) { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_COMP0; } static inline bool hri_ac_get_INTEN_COMP0_bit(const void *const hw) { return (((Ac *)hw)->INTENSET.reg & AC_INTENSET_COMP0) >> AC_INTENSET_COMP0_Pos; } static inline void hri_ac_write_INTEN_COMP0_bit(const void *const hw, bool value) { if (value == 0x0) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_COMP0; } else { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_COMP0; } } static inline void hri_ac_clear_INTEN_COMP0_bit(const void *const hw) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_COMP0; } static inline void hri_ac_set_INTEN_COMP1_bit(const void *const hw) { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_COMP1; } static inline bool hri_ac_get_INTEN_COMP1_bit(const void *const hw) { return (((Ac *)hw)->INTENSET.reg & AC_INTENSET_COMP1) >> AC_INTENSET_COMP1_Pos; } static inline void hri_ac_write_INTEN_COMP1_bit(const void *const hw, bool value) { if (value == 0x0) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_COMP1; } else { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_COMP1; } } static inline void hri_ac_clear_INTEN_COMP1_bit(const void *const hw) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_COMP1; } static inline void hri_ac_set_INTEN_WIN0_bit(const void *const hw) { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_WIN0; } static inline bool hri_ac_get_INTEN_WIN0_bit(const void *const hw) { return (((Ac *)hw)->INTENSET.reg & AC_INTENSET_WIN0) >> AC_INTENSET_WIN0_Pos; } static inline void hri_ac_write_INTEN_WIN0_bit(const void *const hw, bool value) { if (value == 0x0) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_WIN0; } else { ((Ac *)hw)->INTENSET.reg = AC_INTENSET_WIN0; } } static inline void hri_ac_clear_INTEN_WIN0_bit(const void *const hw) { ((Ac *)hw)->INTENCLR.reg = AC_INTENSET_WIN0; } static inline void hri_ac_set_INTEN_reg(const void *const hw, hri_ac_intenset_reg_t mask) { ((Ac *)hw)->INTENSET.reg = mask; } static inline hri_ac_intenset_reg_t hri_ac_get_INTEN_reg(const void *const hw, hri_ac_intenset_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->INTENSET.reg; tmp &= mask; return tmp; } static inline hri_ac_intenset_reg_t hri_ac_read_INTEN_reg(const void *const hw) { return ((Ac *)hw)->INTENSET.reg; } static inline void hri_ac_write_INTEN_reg(const void *const hw, hri_ac_intenset_reg_t data) { ((Ac *)hw)->INTENSET.reg = data; ((Ac *)hw)->INTENCLR.reg = ~data; } static inline void hri_ac_clear_INTEN_reg(const void *const hw, hri_ac_intenset_reg_t mask) { ((Ac *)hw)->INTENCLR.reg = mask; } static inline bool hri_ac_get_STATUSA_STATE0_bit(const void *const hw) { return (((Ac *)hw)->STATUSA.reg & AC_STATUSA_STATE0) >> AC_STATUSA_STATE0_Pos; } static inline bool hri_ac_get_STATUSA_STATE1_bit(const void *const hw) { return (((Ac *)hw)->STATUSA.reg & AC_STATUSA_STATE1) >> AC_STATUSA_STATE1_Pos; } static inline hri_ac_statusa_reg_t hri_ac_get_STATUSA_WSTATE0_bf(const void *const hw, hri_ac_statusa_reg_t mask) { return (((Ac *)hw)->STATUSA.reg & AC_STATUSA_WSTATE0(mask)) >> AC_STATUSA_WSTATE0_Pos; } static inline hri_ac_statusa_reg_t hri_ac_read_STATUSA_WSTATE0_bf(const void *const hw) { return (((Ac *)hw)->STATUSA.reg & AC_STATUSA_WSTATE0_Msk) >> AC_STATUSA_WSTATE0_Pos; } static inline hri_ac_statusa_reg_t hri_ac_get_STATUSA_reg(const void *const hw, hri_ac_statusa_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->STATUSA.reg; tmp &= mask; return tmp; } static inline hri_ac_statusa_reg_t hri_ac_read_STATUSA_reg(const void *const hw) { return ((Ac *)hw)->STATUSA.reg; } static inline bool hri_ac_get_STATUSB_READY0_bit(const void *const hw) { return (((Ac *)hw)->STATUSB.reg & AC_STATUSB_READY0) >> AC_STATUSB_READY0_Pos; } static inline bool hri_ac_get_STATUSB_READY1_bit(const void *const hw) { return (((Ac *)hw)->STATUSB.reg & AC_STATUSB_READY1) >> AC_STATUSB_READY1_Pos; } static inline hri_ac_statusb_reg_t hri_ac_get_STATUSB_reg(const void *const hw, hri_ac_statusb_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->STATUSB.reg; tmp &= mask; return tmp; } static inline hri_ac_statusb_reg_t hri_ac_read_STATUSB_reg(const void *const hw) { return ((Ac *)hw)->STATUSB.reg; } static inline bool hri_ac_get_SYNCBUSY_SWRST_bit(const void *const hw) { return (((Ac *)hw)->SYNCBUSY.reg & AC_SYNCBUSY_SWRST) >> AC_SYNCBUSY_SWRST_Pos; } static inline bool hri_ac_get_SYNCBUSY_ENABLE_bit(const void *const hw) { return (((Ac *)hw)->SYNCBUSY.reg & AC_SYNCBUSY_ENABLE) >> AC_SYNCBUSY_ENABLE_Pos; } static inline bool hri_ac_get_SYNCBUSY_WINCTRL_bit(const void *const hw) { return (((Ac *)hw)->SYNCBUSY.reg & AC_SYNCBUSY_WINCTRL) >> AC_SYNCBUSY_WINCTRL_Pos; } static inline bool hri_ac_get_SYNCBUSY_COMPCTRL0_bit(const void *const hw) { return (((Ac *)hw)->SYNCBUSY.reg & AC_SYNCBUSY_COMPCTRL0) >> AC_SYNCBUSY_COMPCTRL0_Pos; } static inline bool hri_ac_get_SYNCBUSY_COMPCTRL1_bit(const void *const hw) { return (((Ac *)hw)->SYNCBUSY.reg & AC_SYNCBUSY_COMPCTRL1) >> AC_SYNCBUSY_COMPCTRL1_Pos; } static inline hri_ac_syncbusy_reg_t hri_ac_get_SYNCBUSY_reg(const void *const hw, hri_ac_syncbusy_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->SYNCBUSY.reg; tmp &= mask; return tmp; } static inline hri_ac_syncbusy_reg_t hri_ac_read_SYNCBUSY_reg(const void *const hw) { return ((Ac *)hw)->SYNCBUSY.reg; } static inline void hri_ac_set_CTRLA_SWRST_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg |= AC_CTRLA_SWRST; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_CTRLA_SWRST_bit(const void *const hw) { uint8_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST); tmp = ((Ac *)hw)->CTRLA.reg; tmp = (tmp & AC_CTRLA_SWRST) >> AC_CTRLA_SWRST_Pos; return (bool)tmp; } static inline void hri_ac_set_CTRLA_ENABLE_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg |= AC_CTRLA_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_CTRLA_ENABLE_bit(const void *const hw) { uint8_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); tmp = ((Ac *)hw)->CTRLA.reg; tmp = (tmp & AC_CTRLA_ENABLE) >> AC_CTRLA_ENABLE_Pos; return (bool)tmp; } static inline void hri_ac_write_CTRLA_ENABLE_bit(const void *const hw, bool value) { uint8_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->CTRLA.reg; tmp &= ~AC_CTRLA_ENABLE; tmp |= value << AC_CTRLA_ENABLE_Pos; ((Ac *)hw)->CTRLA.reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_CTRLA_ENABLE_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg &= ~AC_CTRLA_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_CTRLA_ENABLE_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg ^= AC_CTRLA_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_CTRLA_reg(const void *const hw, hri_ac_ctrla_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg |= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_ctrla_reg_t hri_ac_get_CTRLA_reg(const void *const hw, hri_ac_ctrla_reg_t mask) { uint8_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); tmp = ((Ac *)hw)->CTRLA.reg; tmp &= mask; return tmp; } static inline void hri_ac_write_CTRLA_reg(const void *const hw, hri_ac_ctrla_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg = data; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_CTRLA_reg(const void *const hw, hri_ac_ctrla_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg &= ~mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_CTRLA_reg(const void *const hw, hri_ac_ctrla_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLA.reg ^= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_ctrla_reg_t hri_ac_read_CTRLA_reg(const void *const hw) { hri_ac_wait_for_sync(hw, AC_SYNCBUSY_SWRST | AC_SYNCBUSY_ENABLE); return ((Ac *)hw)->CTRLA.reg; } static inline void hri_ac_set_EVCTRL_COMPEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_COMPEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_COMPEO0_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_COMPEO0) >> AC_EVCTRL_COMPEO0_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_COMPEO0_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_COMPEO0; tmp |= value << AC_EVCTRL_COMPEO0_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_COMPEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_COMPEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_COMPEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_COMPEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_COMPEO1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_COMPEO1; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_COMPEO1_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_COMPEO1) >> AC_EVCTRL_COMPEO1_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_COMPEO1_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_COMPEO1; tmp |= value << AC_EVCTRL_COMPEO1_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_COMPEO1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_COMPEO1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_COMPEO1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_COMPEO1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_WINEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_WINEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_WINEO0_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_WINEO0) >> AC_EVCTRL_WINEO0_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_WINEO0_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_WINEO0; tmp |= value << AC_EVCTRL_WINEO0_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_WINEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_WINEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_WINEO0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_WINEO0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_COMPEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_COMPEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_COMPEI0_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_COMPEI0) >> AC_EVCTRL_COMPEI0_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_COMPEI0_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_COMPEI0; tmp |= value << AC_EVCTRL_COMPEI0_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_COMPEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_COMPEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_COMPEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_COMPEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_COMPEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_COMPEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_COMPEI1_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_COMPEI1) >> AC_EVCTRL_COMPEI1_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_COMPEI1_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_COMPEI1; tmp |= value << AC_EVCTRL_COMPEI1_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_COMPEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_COMPEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_COMPEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_COMPEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_INVEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_INVEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_INVEI0_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_INVEI0) >> AC_EVCTRL_INVEI0_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_INVEI0_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_INVEI0; tmp |= value << AC_EVCTRL_INVEI0_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_INVEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_INVEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_INVEI0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_INVEI0; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_INVEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= AC_EVCTRL_INVEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_EVCTRL_INVEI1_bit(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp = (tmp & AC_EVCTRL_INVEI1) >> AC_EVCTRL_INVEI1_Pos; return (bool)tmp; } static inline void hri_ac_write_EVCTRL_INVEI1_bit(const void *const hw, bool value) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= ~AC_EVCTRL_INVEI1; tmp |= value << AC_EVCTRL_INVEI1_Pos; ((Ac *)hw)->EVCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_INVEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~AC_EVCTRL_INVEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_INVEI1_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= AC_EVCTRL_INVEI1; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_EVCTRL_reg(const void *const hw, hri_ac_evctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg |= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_evctrl_reg_t hri_ac_get_EVCTRL_reg(const void *const hw, hri_ac_evctrl_reg_t mask) { uint16_t tmp; tmp = ((Ac *)hw)->EVCTRL.reg; tmp &= mask; return tmp; } static inline void hri_ac_write_EVCTRL_reg(const void *const hw, hri_ac_evctrl_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg = data; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_EVCTRL_reg(const void *const hw, hri_ac_evctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg &= ~mask; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_EVCTRL_reg(const void *const hw, hri_ac_evctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->EVCTRL.reg ^= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_evctrl_reg_t hri_ac_read_EVCTRL_reg(const void *const hw) { return ((Ac *)hw)->EVCTRL.reg; } static inline void hri_ac_set_DBGCTRL_DBGRUN_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg |= AC_DBGCTRL_DBGRUN; AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_DBGCTRL_DBGRUN_bit(const void *const hw) { uint8_t tmp; tmp = ((Ac *)hw)->DBGCTRL.reg; tmp = (tmp & AC_DBGCTRL_DBGRUN) >> AC_DBGCTRL_DBGRUN_Pos; return (bool)tmp; } static inline void hri_ac_write_DBGCTRL_DBGRUN_bit(const void *const hw, bool value) { uint8_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->DBGCTRL.reg; tmp &= ~AC_DBGCTRL_DBGRUN; tmp |= value << AC_DBGCTRL_DBGRUN_Pos; ((Ac *)hw)->DBGCTRL.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_DBGCTRL_DBGRUN_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg &= ~AC_DBGCTRL_DBGRUN; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_DBGCTRL_DBGRUN_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg ^= AC_DBGCTRL_DBGRUN; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_DBGCTRL_reg(const void *const hw, hri_ac_dbgctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg |= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_dbgctrl_reg_t hri_ac_get_DBGCTRL_reg(const void *const hw, hri_ac_dbgctrl_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->DBGCTRL.reg; tmp &= mask; return tmp; } static inline void hri_ac_write_DBGCTRL_reg(const void *const hw, hri_ac_dbgctrl_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg = data; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_DBGCTRL_reg(const void *const hw, hri_ac_dbgctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg &= ~mask; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_DBGCTRL_reg(const void *const hw, hri_ac_dbgctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->DBGCTRL.reg ^= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_dbgctrl_reg_t hri_ac_read_DBGCTRL_reg(const void *const hw) { return ((Ac *)hw)->DBGCTRL.reg; } static inline void hri_ac_set_WINCTRL_WEN0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg |= AC_WINCTRL_WEN0; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_WINCTRL_WEN0_bit(const void *const hw) { uint8_t tmp; tmp = ((Ac *)hw)->WINCTRL.reg; tmp = (tmp & AC_WINCTRL_WEN0) >> AC_WINCTRL_WEN0_Pos; return (bool)tmp; } static inline void hri_ac_write_WINCTRL_WEN0_bit(const void *const hw, bool value) { uint8_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->WINCTRL.reg; tmp &= ~AC_WINCTRL_WEN0; tmp |= value << AC_WINCTRL_WEN0_Pos; ((Ac *)hw)->WINCTRL.reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_WINCTRL_WEN0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg &= ~AC_WINCTRL_WEN0; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_WINCTRL_WEN0_bit(const void *const hw) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg ^= AC_WINCTRL_WEN0; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_WINCTRL_WINTSEL0_bf(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg |= AC_WINCTRL_WINTSEL0(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_winctrl_reg_t hri_ac_get_WINCTRL_WINTSEL0_bf(const void *const hw, hri_ac_winctrl_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->WINCTRL.reg; tmp = (tmp & AC_WINCTRL_WINTSEL0(mask)) >> AC_WINCTRL_WINTSEL0_Pos; return tmp; } static inline void hri_ac_write_WINCTRL_WINTSEL0_bf(const void *const hw, hri_ac_winctrl_reg_t data) { uint8_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->WINCTRL.reg; tmp &= ~AC_WINCTRL_WINTSEL0_Msk; tmp |= AC_WINCTRL_WINTSEL0(data); ((Ac *)hw)->WINCTRL.reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_WINCTRL_WINTSEL0_bf(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg &= ~AC_WINCTRL_WINTSEL0(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_WINCTRL_WINTSEL0_bf(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg ^= AC_WINCTRL_WINTSEL0(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_winctrl_reg_t hri_ac_read_WINCTRL_WINTSEL0_bf(const void *const hw) { uint8_t tmp; tmp = ((Ac *)hw)->WINCTRL.reg; tmp = (tmp & AC_WINCTRL_WINTSEL0_Msk) >> AC_WINCTRL_WINTSEL0_Pos; return tmp; } static inline void hri_ac_set_WINCTRL_reg(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg |= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_winctrl_reg_t hri_ac_get_WINCTRL_reg(const void *const hw, hri_ac_winctrl_reg_t mask) { uint8_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); tmp = ((Ac *)hw)->WINCTRL.reg; tmp &= mask; return tmp; } static inline void hri_ac_write_WINCTRL_reg(const void *const hw, hri_ac_winctrl_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg = data; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_WINCTRL_reg(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg &= ~mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_WINCTRL_reg(const void *const hw, hri_ac_winctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->WINCTRL.reg ^= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_winctrl_reg_t hri_ac_read_WINCTRL_reg(const void *const hw) { hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); return ((Ac *)hw)->WINCTRL.reg; } static inline void hri_ac_set_SCALER_VALUE_bf(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg |= AC_SCALER_VALUE(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_scaler_reg_t hri_ac_get_SCALER_VALUE_bf(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->SCALER[index].reg; tmp = (tmp & AC_SCALER_VALUE(mask)) >> AC_SCALER_VALUE_Pos; return tmp; } static inline void hri_ac_write_SCALER_VALUE_bf(const void *const hw, uint8_t index, hri_ac_scaler_reg_t data) { uint8_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->SCALER[index].reg; tmp &= ~AC_SCALER_VALUE_Msk; tmp |= AC_SCALER_VALUE(data); ((Ac *)hw)->SCALER[index].reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_SCALER_VALUE_bf(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg &= ~AC_SCALER_VALUE(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_SCALER_VALUE_bf(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg ^= AC_SCALER_VALUE(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_scaler_reg_t hri_ac_read_SCALER_VALUE_bf(const void *const hw, uint8_t index) { uint8_t tmp; tmp = ((Ac *)hw)->SCALER[index].reg; tmp = (tmp & AC_SCALER_VALUE_Msk) >> AC_SCALER_VALUE_Pos; return tmp; } static inline void hri_ac_set_SCALER_reg(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg |= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_scaler_reg_t hri_ac_get_SCALER_reg(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { uint8_t tmp; tmp = ((Ac *)hw)->SCALER[index].reg; tmp &= mask; return tmp; } static inline void hri_ac_write_SCALER_reg(const void *const hw, uint8_t index, hri_ac_scaler_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg = data; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_SCALER_reg(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg &= ~mask; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_SCALER_reg(const void *const hw, uint8_t index, hri_ac_scaler_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->SCALER[index].reg ^= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_scaler_reg_t hri_ac_read_SCALER_reg(const void *const hw, uint8_t index) { return ((Ac *)hw)->SCALER[index].reg; } static inline void hri_ac_set_COMPCTRL_ENABLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_COMPCTRL_ENABLE_bit(const void *const hw, uint8_t index) { uint32_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_ENABLE) >> AC_COMPCTRL_ENABLE_Pos; return (bool)tmp; } static inline void hri_ac_write_COMPCTRL_ENABLE_bit(const void *const hw, uint8_t index, bool value) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_ENABLE; tmp |= value << AC_COMPCTRL_ENABLE_Pos; ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_ENABLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_ENABLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_ENABLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_COMPCTRL_SINGLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_SINGLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_COMPCTRL_SINGLE_bit(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_SINGLE) >> AC_COMPCTRL_SINGLE_Pos; return (bool)tmp; } static inline void hri_ac_write_COMPCTRL_SINGLE_bit(const void *const hw, uint8_t index, bool value) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_SINGLE; tmp |= value << AC_COMPCTRL_SINGLE_Pos; ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_SINGLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_SINGLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_SINGLE_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_SINGLE; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_COMPCTRL_RUNSTDBY_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_RUNSTDBY; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_COMPCTRL_RUNSTDBY_bit(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_RUNSTDBY) >> AC_COMPCTRL_RUNSTDBY_Pos; return (bool)tmp; } static inline void hri_ac_write_COMPCTRL_RUNSTDBY_bit(const void *const hw, uint8_t index, bool value) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_RUNSTDBY; tmp |= value << AC_COMPCTRL_RUNSTDBY_Pos; ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_RUNSTDBY_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_RUNSTDBY; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_RUNSTDBY_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_RUNSTDBY; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_COMPCTRL_SWAP_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_SWAP; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_COMPCTRL_SWAP_bit(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_SWAP) >> AC_COMPCTRL_SWAP_Pos; return (bool)tmp; } static inline void hri_ac_write_COMPCTRL_SWAP_bit(const void *const hw, uint8_t index, bool value) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_SWAP; tmp |= value << AC_COMPCTRL_SWAP_Pos; ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_SWAP_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_SWAP; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_SWAP_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_SWAP; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_COMPCTRL_HYSTEN_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_HYSTEN; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline bool hri_ac_get_COMPCTRL_HYSTEN_bit(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_HYSTEN) >> AC_COMPCTRL_HYSTEN_Pos; return (bool)tmp; } static inline void hri_ac_write_COMPCTRL_HYSTEN_bit(const void *const hw, uint8_t index, bool value) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_HYSTEN; tmp |= value << AC_COMPCTRL_HYSTEN_Pos; ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_HYSTEN_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_HYSTEN; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_HYSTEN_bit(const void *const hw, uint8_t index) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_HYSTEN; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_set_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_INTSEL(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_INTSEL(mask)) >> AC_COMPCTRL_INTSEL_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_INTSEL_Msk; tmp |= AC_COMPCTRL_INTSEL(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_INTSEL(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_INTSEL(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_INTSEL_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_INTSEL_Msk) >> AC_COMPCTRL_INTSEL_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_MUXNEG(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_MUXNEG(mask)) >> AC_COMPCTRL_MUXNEG_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_MUXNEG_Msk; tmp |= AC_COMPCTRL_MUXNEG(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_MUXNEG(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_MUXNEG(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_MUXNEG_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_MUXNEG_Msk) >> AC_COMPCTRL_MUXNEG_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_MUXPOS(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_MUXPOS(mask)) >> AC_COMPCTRL_MUXPOS_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_MUXPOS_Msk; tmp |= AC_COMPCTRL_MUXPOS(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_MUXPOS(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_MUXPOS(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_MUXPOS_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_MUXPOS_Msk) >> AC_COMPCTRL_MUXPOS_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_SPEED(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_SPEED(mask)) >> AC_COMPCTRL_SPEED_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_SPEED_Msk; tmp |= AC_COMPCTRL_SPEED(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_SPEED(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_SPEED(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_SPEED_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_SPEED_Msk) >> AC_COMPCTRL_SPEED_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_HYST_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_HYST(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_HYST_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_HYST(mask)) >> AC_COMPCTRL_HYST_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_HYST_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_HYST_Msk; tmp |= AC_COMPCTRL_HYST(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_HYST_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_HYST(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_HYST_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_HYST(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_HYST_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_HYST_Msk) >> AC_COMPCTRL_HYST_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_FLEN(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_FLEN(mask)) >> AC_COMPCTRL_FLEN_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_FLEN_Msk; tmp |= AC_COMPCTRL_FLEN(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_FLEN(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_FLEN(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_FLEN_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_FLEN_Msk) >> AC_COMPCTRL_FLEN_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_OUT_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= AC_COMPCTRL_OUT(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_OUT_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_OUT(mask)) >> AC_COMPCTRL_OUT_Pos; return tmp; } static inline void hri_ac_write_COMPCTRL_OUT_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { uint32_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= ~AC_COMPCTRL_OUT_Msk; tmp |= AC_COMPCTRL_OUT(data); ((Ac *)hw)->COMPCTRL[index].reg = tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_OUT_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~AC_COMPCTRL_OUT(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_OUT_bf(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= AC_COMPCTRL_OUT(mask); hri_ac_wait_for_sync(hw, AC_SYNCBUSY_MASK); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_OUT_bf(const void *const hw, uint8_t index) { uint32_t tmp; tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp = (tmp & AC_COMPCTRL_OUT_Msk) >> AC_COMPCTRL_OUT_Pos; return tmp; } static inline void hri_ac_set_COMPCTRL_reg(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg |= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_get_COMPCTRL_reg(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { uint32_t tmp; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); tmp = ((Ac *)hw)->COMPCTRL[index].reg; tmp &= mask; return tmp; } static inline void hri_ac_write_COMPCTRL_reg(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg = data; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_COMPCTRL_reg(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg &= ~mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_COMPCTRL_reg(const void *const hw, uint8_t index, hri_ac_compctrl_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->COMPCTRL[index].reg ^= mask; hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_compctrl_reg_t hri_ac_read_COMPCTRL_reg(const void *const hw, uint8_t index) { hri_ac_wait_for_sync(hw, AC_SYNCBUSY_ENABLE); return ((Ac *)hw)->COMPCTRL[index].reg; } static inline void hri_ac_set_CALIB_BIAS0_bf(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg |= AC_CALIB_BIAS0(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_calib_reg_t hri_ac_get_CALIB_BIAS0_bf(const void *const hw, hri_ac_calib_reg_t mask) { uint16_t tmp; tmp = ((Ac *)hw)->CALIB.reg; tmp = (tmp & AC_CALIB_BIAS0(mask)) >> AC_CALIB_BIAS0_Pos; return tmp; } static inline void hri_ac_write_CALIB_BIAS0_bf(const void *const hw, hri_ac_calib_reg_t data) { uint16_t tmp; AC_CRITICAL_SECTION_ENTER(); tmp = ((Ac *)hw)->CALIB.reg; tmp &= ~AC_CALIB_BIAS0_Msk; tmp |= AC_CALIB_BIAS0(data); ((Ac *)hw)->CALIB.reg = tmp; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_CALIB_BIAS0_bf(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg &= ~AC_CALIB_BIAS0(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_CALIB_BIAS0_bf(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg ^= AC_CALIB_BIAS0(mask); AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_calib_reg_t hri_ac_read_CALIB_BIAS0_bf(const void *const hw) { uint16_t tmp; tmp = ((Ac *)hw)->CALIB.reg; tmp = (tmp & AC_CALIB_BIAS0_Msk) >> AC_CALIB_BIAS0_Pos; return tmp; } static inline void hri_ac_set_CALIB_reg(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg |= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_calib_reg_t hri_ac_get_CALIB_reg(const void *const hw, hri_ac_calib_reg_t mask) { uint16_t tmp; tmp = ((Ac *)hw)->CALIB.reg; tmp &= mask; return tmp; } static inline void hri_ac_write_CALIB_reg(const void *const hw, hri_ac_calib_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg = data; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_clear_CALIB_reg(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg &= ~mask; AC_CRITICAL_SECTION_LEAVE(); } static inline void hri_ac_toggle_CALIB_reg(const void *const hw, hri_ac_calib_reg_t mask) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CALIB.reg ^= mask; AC_CRITICAL_SECTION_LEAVE(); } static inline hri_ac_calib_reg_t hri_ac_read_CALIB_reg(const void *const hw) { return ((Ac *)hw)->CALIB.reg; } static inline void hri_ac_write_CTRLB_reg(const void *const hw, hri_ac_ctrlb_reg_t data) { AC_CRITICAL_SECTION_ENTER(); ((Ac *)hw)->CTRLB.reg = data; AC_CRITICAL_SECTION_LEAVE(); } #ifdef __cplusplus } #endif #endif /* _HRI_AC_E54_H_INCLUDED */ #endif /* _SAME54_AC_COMPONENT_ */
{ "content_hash": "cba26fbe3fc2a1be39042de71e9ad61f", "timestamp": "", "source": "github", "line_count": 1805, "max_line_length": 118, "avg_line_length": 29.388919667590027, "alnum_prop": 0.6862028012894226, "repo_name": "nongxiaoming/rt-thread", "id": "588499e4561f4a0a9445522d421bee988c163db2", "size": "54344", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bsp/microchip/same54/bsp/hri/hri_ac_e54.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "17092924" }, { "name": "Batchfile", "bytes": "188371" }, { "name": "C", "bytes": "941229651" }, { "name": "C++", "bytes": "754131" }, { "name": "CMake", "bytes": "250384" }, { "name": "CSS", "bytes": "138218" }, { "name": "GDB", "bytes": "11796" }, { "name": "HTML", "bytes": "4763477" }, { "name": "JavaScript", "bytes": "637" }, { "name": "LLVM", "bytes": "10344" }, { "name": "Lex", "bytes": "7026" }, { "name": "Logos", "bytes": "7238" }, { "name": "M4", "bytes": "17515" }, { "name": "Makefile", "bytes": "444829" }, { "name": "Pawn", "bytes": "1250" }, { "name": "Perl", "bytes": "16728" }, { "name": "Python", "bytes": "2665973" }, { "name": "RPC", "bytes": "14162" }, { "name": "Shell", "bytes": "416898" }, { "name": "Tcl", "bytes": "179" }, { "name": "Yacc", "bytes": "30555" } ], "symlink_target": "" }
class DepthConverter { public: DepthConverter(const float a, const float b) : _a(a), _b(b) {} void operator() (float& v) { // opengl uses fixed-point representation inside, range [0 1] v = _b / (_a - 2.0*v + 1.0); // v = _b / (_a - v); // if the range is [-1 1] } private: const float _a; const float _b; }; DepthBuffer::DepthBuffer(const CCamera* pCamera, PangaeaMeshData* pMesh): _pCamera(pCamera), _pMesh(pMesh) { _width = _pCamera->W(); _height = _pCamera->H(); _depthBufferMin.setZero(_width, _height); _depthBufferMax.setZero(_width, _height); // cout << "number of rows and cols" << endl; // cout << _depthbuffermin.rows() << endl; // cout << _depthbuffermin.cols() << endl; // cout << _depthbuffermin(0,0) << endl; float zMin, zMax; getTotalDepthRange(zMin, zMax); _zMin = zMin * 0.95f; _zMax = zMax * 1.05f; _depthBufferMin.fill(_zMin); _depthBufferMax.fill(_zMax); renderMeshGL(); } void DepthBuffer::getTotalDepthRange(float& zMin, float& zMax) const { zMax = - numeric_limits<float>::max(); zMin = numeric_limits<float>::max(); for(int I = _pMesh->numVertices, i = 0; i < I; ++i) { Vector3d Xc = _pCamera->worldToCamera(_pMesh->vertices[i]); float z = Xc[2]; if(z > zMax) zMax = z; if(z < zMin) zMin = z; } } void DepthBuffer::renderMeshGL() { uint W = _pCamera->W(); uint H = _pCamera->H(); #ifndef _MSC_VER ncv::GlXOffscreenContextPtr context(new ncv::GlXOffscreenContext(W, H)); context->makeActive(); #endif // Should be able to use openGL here.. glViewport(0,0,W,H); glDisable(GL_LIGHTING); glClearColor(0.0f, 0.0f, 0.0f, 1.0f); glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glLoadMatrix(_pCamera->getProjectionMatrix(_zMin, _zMax)); glMatrixMode(GL_MODELVIEW); glLoadIdentity(); glLoadMatrix(_pCamera->getModelViewMatrix()); glEnable(GL_DEPTH_TEST); glClearDepth(1.0f); glDepthFunc(GL_LEQUAL); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Draw mesh.. drawMeshGL(); glFlush(); double P[16]; glGetDoublev(GL_PROJECTION_MATRIX, P); float a = - Map<Matrix4d>(P)(2,2); float b = - Map<Matrix4d>(P)(2,3); MatrixXfRow depthBuffer(H,W); MatrixXfRow depthBufferReverse(H,W); // MatrixXfRow depthBufferReverseTest(H,W); glReadPixels(0, 0, W, H, GL_DEPTH_COMPONENT, GL_FLOAT, depthBuffer.data()); depthBufferReverse = depthBuffer.colwise().reverse(); for_each(depthBufferReverse.data(), depthBufferReverse.data() + W*H, DepthConverter(a,b)); _depthBufferMin = depthBufferReverse; // output depth data to txt file // std::ofstream depthBufferFile; // depthBufferFile.open("/cs/research/vision/humanis3/Rui/data/newsequence_3_19/photo_metric/depthBuffer.txt", // std::ofstream::trunc); // depthBufferFile << _depthBufferMin << endl; // // render again to get the maximum depth // glClearDepth(0.0f); // glDepthFunc(GL_GEQUAL); // glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // drawMeshGL(); // glFlush(); // glReadPixels(0, 0, W, H, GL_DEPTH_COMPONENT, GL_FLOAT, depthBuffer.data()); // depthBufferReverse = depthBuffer.colwise().reverse(); // for_each(depthBufferReverse.data(), depthBufferReverse.data() + W*H, DepthConverter(a,b)); // _depthBufferMax = depthBufferReverse; } void DepthBuffer::drawMeshGL() { int numFaces = _pMesh -> numFaces; glBegin(GL_TRIANGLES); for(int i = 0; i < numFaces; ++i) { for(int k = 0; k < 3; ++k) { int offset = _pMesh->facesVerticesInd[i][k]; glVertex3f(_pMesh->vertices[offset][0], _pMesh->vertices[offset][1], _pMesh->vertices[offset][2]); } } glEnd(); }
{ "content_hash": "7a64a95e98723c7c21bcc72e53eadb2c", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 114, "avg_line_length": 25.761290322580646, "alnum_prop": 0.6055597295266717, "repo_name": "cvfish/PangaeaTracking", "id": "4cc914a62d033cbfcb56cf74f100dbdaf5f540df", "size": "4115", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main_engine/rendering/DepthBuffer.cpp", "mode": "33261", "license": "mit", "language": [ { "name": "C++", "bytes": "672464" }, { "name": "Makefile", "bytes": "4756" }, { "name": "Shell", "bytes": "45" } ], "symlink_target": "" }
class CLocalCppChannel { public: static CLocalCppServer& GetLocalServer(); static CLocalCppLibrary& GetLibrary(); static CLocalChannelsPool& GetLocalChannelsPool(); };
{ "content_hash": "53949a9c17b5c2742b3b1eb7fe00c664", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 51, "avg_line_length": 24.285714285714285, "alnum_prop": 0.8117647058823529, "repo_name": "SoftFx/FDK", "id": "1e32bd57a5bab6f573f408a707c506aab9114ed7", "size": "273", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "FDK/LrpServer/LocalCppChannel.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "93" }, { "name": "Batchfile", "bytes": "11690" }, { "name": "C", "bytes": "1604148" }, { "name": "C#", "bytes": "3812620" }, { "name": "C++", "bytes": "4392467" }, { "name": "CSS", "bytes": "6667" }, { "name": "HTML", "bytes": "749307" }, { "name": "JavaScript", "bytes": "1582" }, { "name": "M4", "bytes": "9024" }, { "name": "MQL4", "bytes": "3591066" }, { "name": "Makefile", "bytes": "551003" }, { "name": "NSIS", "bytes": "44972" }, { "name": "PowerShell", "bytes": "99938" }, { "name": "R", "bytes": "46392" }, { "name": "Ruby", "bytes": "29626" }, { "name": "Shell", "bytes": "28661" }, { "name": "Smalltalk", "bytes": "130" }, { "name": "TeX", "bytes": "6234" }, { "name": "XSLT", "bytes": "8772" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "3210d17505200f3496cce0461a5ae359", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "7f641c6e22ccc99b3e3fa74d3b1c3242e29878eb", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Aster hybridus/ Syn. Aster hybridus hybridus/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
module ActiveRecord module ConnectionAdapters module SQLServer module Type class Float < ActiveRecord::Type::Float def type :float end end end end end end
{ "content_hash": "e172a255b91a786e0260a7dcb438c8e4", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 47, "avg_line_length": 15.266666666666667, "alnum_prop": 0.5589519650655022, "repo_name": "pavels/activerecord-sqlserver-adapter", "id": "f99353f65bd82d2d6b103e0bc00d93d8a6f5dba9", "size": "229", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "lib/active_record/connection_adapters/sqlserver/type/float.rb", "mode": "33188", "license": "mit", "language": [ { "name": "PowerShell", "bytes": "1000" }, { "name": "Ruby", "bytes": "230174" } ], "symlink_target": "" }
package com.sun.jini.test.impl.locatordiscovery; import com.sun.jini.config.Config; import com.sun.jini.qa.harness.QAConfig; import com.sun.jini.qa.harness.QATest; import java.net.DatagramPacket; import java.net.InetAddress; import java.net.MulticastSocket; import java.net.ServerSocket; import java.util.logging.Level; import net.jini.config.Configuration; import net.jini.config.ConfigurationProvider; import net.jini.core.discovery.LookupLocator; import net.jini.discovery.Constants; import net.jini.discovery.LookupDiscovery; import net.jini.discovery.LookupLocatorDiscovery; /** * Tests the <code>initialUnicastDelayRange</code> config entry for * <code>LookupLocatorDiscovery</code>. The test starts up a number of * <code>LookupLocatorDiscovery</code> instances and checks if the average * delay in initiating unicast discovery requests is in the range * <code>0.25 * initialUnicastDelayRange < averageDelay < * .75 * initialUnicastDelayRange</code>. * */ public class UnicastDelay extends QATest { private static final int DISCOVERYPORT = Constants.discoveryPort; private Configuration config; private static final int NUMLD = 100; private Throwable failure = null; private boolean done = false; private long acceptTime[] = new long[NUMLD]; LookupLocatorDiscovery[] ldArray = new LookupLocatorDiscovery[NUMLD]; private class AcceptThread extends Thread { public AcceptThread() { super("unicast request"); setDaemon(true); } public void run() { try { ServerSocket s = new ServerSocket(DISCOVERYPORT); logger.log(Level.FINE, "going to accept"); for (int i = 0; i < NUMLD; i++) { s.accept(); acceptTime[i] = System.currentTimeMillis(); logger.log(Level.FINEST, "Accepted unicast request " + i); } } catch (Throwable t) { failure = t; } finally { synchronized (this) { done = true; // Wakeup main thread - we're done this.notify(); } } } } public void setup(QAConfig qaConfig) throws Exception { super.setup(qaConfig); config = qaConfig.getConfig().getConfiguration(); } public void run() throws Exception { long delay = Config.getLongEntry(config, "net.jini.discovery.LookupLocatorDiscovery", "initialUnicastDelayRange", 10000, 0, Long.MAX_VALUE); long expectedDelay = delay / 2; logger.log(Level.FINE, "Expected average delay " + expectedDelay); long spread = expectedDelay / 2; long lBound = expectedDelay - spread; long uBound = expectedDelay + spread; if ((lBound < 0) || (uBound < 0)) { throw new IllegalArgumentException("Invalid delay " + delay); } if (NUMLD < 2) { throw new IllegalArgumentException("Invalid number of LDs " + NUMLD); } Thread t = new AcceptThread(); t.start(); // Wait for AcceptThread to set up its socket. Thread.sleep(1000); long startTime = System.currentTimeMillis(); for (int i = 0; i < NUMLD; i++) { ldArray[i] = new LookupLocatorDiscovery( new LookupLocator[] { new LookupLocator( InetAddress.getLocalHost().getHostName(), DISCOVERYPORT) }, config); } synchronized(t) { t.wait(delay * 3 / 2); } if (failure != null) { throw new RuntimeException("Test failed ", failure); } if (!done) { throw new RuntimeException("All " + NUMLD + " unicast requests not received"); } float averageDelay = 0f; for (int i = 0; i < NUMLD; i++) { long timei = acceptTime[i] - startTime; averageDelay = averageDelay + (timei / NUMLD); } logger.log(Level.FINE, "Average delay " + (long) averageDelay); if ((averageDelay < lBound) || (averageDelay > uBound)) { throw new RuntimeException("Elapsed time out of expected range " + averageDelay + " lower bound " + lBound + " upper bound " + uBound); } } public void tearDown() { for (int i = 0; i < NUMLD; i++) { ldArray[i].terminate(); } } }
{ "content_hash": "57dc3a9bdef5c20f04d35fc0c1c4d63d", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 74, "avg_line_length": 28.63768115942029, "alnum_prop": 0.6773785425101214, "repo_name": "cdegroot/river", "id": "bcc9619102938857394682cf467c587210984b85", "size": "4758", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "qa/src/com/sun/jini/test/impl/locatordiscovery/UnicastDelay.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2047" }, { "name": "Groovy", "bytes": "16876" }, { "name": "Java", "bytes": "22265383" }, { "name": "Shell", "bytes": "117083" } ], "symlink_target": "" }
import boto from boto.connection import AWSQueryConnection from boto.regioninfo import RegionInfo from boto.exception import JSONResponseError from boto.cloudsearch2 import exceptions from boto.compat import json class CloudSearchConnection(AWSQueryConnection): """ Amazon CloudSearch Configuration Service You use the Amazon CloudSearch configuration service to create, configure, and manage search domains. Configuration service requests are submitted using the AWS Query protocol. AWS Query requests are HTTP or HTTPS requests submitted via HTTP GET or POST with a query parameter named Action. The endpoint for configuration service requests is region- specific: cloudsearch. region .amazonaws.com. For example, cloudsearch.us-east-1.amazonaws.com. For a current list of supported regions and endpoints, see `Regions and Endpoints`_. """ APIVersion = "2013-01-01" DefaultRegionName = "us-east-1" DefaultRegionEndpoint = "cloudsearch.us-east-1.amazonaws.com" ResponseError = JSONResponseError _faults = { "InvalidTypeException": exceptions.InvalidTypeException, "LimitExceededException": exceptions.LimitExceededException, "InternalException": exceptions.InternalException, "DisabledOperationException": exceptions.DisabledOperationException, "ResourceNotFoundException": exceptions.ResourceNotFoundException, "BaseException": exceptions.BaseException, } def __init__(self, **kwargs): region = kwargs.pop('region', None) if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) if 'host' not in kwargs or kwargs['host'] is None: kwargs['host'] = region.endpoint super(CloudSearchConnection, self).__init__(**kwargs) self.region = region def _required_auth_capability(self): return ['hmac-v4'] def build_suggesters(self, domain_name): """ Indexes the search suggestions. :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). """ params = {'DomainName': domain_name, } return self._make_request( action='BuildSuggesters', verb='POST', path='/', params=params) def create_domain(self, domain_name): """ Creates a new search domain. For more information, see `Creating a Search Domain`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A name for the domain you are creating. Allowed characters are a-z (lower-case letters), 0-9, and hyphen (-). Domain names must start with a letter or number and be at least 3 and no more than 28 characters long. """ params = {'DomainName': domain_name, } return self._make_request( action='CreateDomain', verb='POST', path='/', params=params) def define_analysis_scheme(self, domain_name, analysis_scheme): """ Configures an analysis scheme for a domain. An analysis scheme defines language-specific text processing options for a `text` field. For more information, see `Configuring Analysis Schemes`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type analysis_scheme: dict :param analysis_scheme: Configuration information for an analysis scheme. Each analysis scheme has a unique name and specifies the language of the text to be processed. The following options can be configured for an analysis scheme: `Synonyms`, `Stopwords`, `StemmingDictionary`, and `AlgorithmicStemming`. """ params = {'DomainName': domain_name, } self.build_complex_param(params, 'AnalysisScheme', analysis_scheme) return self._make_request( action='DefineAnalysisScheme', verb='POST', path='/', params=params) def define_expression(self, domain_name, expression): """ Configures an `Expression` for the search domain. Used to create new expressions and modify existing ones. If the expression exists, the new configuration replaces the old one. For more information, see `Configuring Expressions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type expression: dict :param expression: A named expression that can be evaluated at search time. Can be used for sorting and filtering search results and constructing other expressions. """ params = {'DomainName': domain_name, } self.build_complex_param(params, 'Expression', expression) return self._make_request( action='DefineExpression', verb='POST', path='/', params=params) def define_index_field(self, domain_name, index_field): """ Configures an `IndexField` for the search domain. Used to create new fields and modify existing ones. You must specify the name of the domain you are configuring and an index field configuration. The index field configuration specifies a unique name, the index field type, and the options you want to configure for the field. The options you can specify depend on the `IndexFieldType`. If the field exists, the new configuration replaces the old one. For more information, see `Configuring Index Fields`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type index_field: dict :param index_field: The index field and field options you want to configure. """ params = {'DomainName': domain_name, } self.build_complex_param(params, 'IndexField', index_field) return self._make_request( action='DefineIndexField', verb='POST', path='/', params=params) def define_suggester(self, domain_name, suggester): """ Configures a suggester for a domain. A suggester enables you to display possible matches before users finish typing their queries. When you configure a suggester, you must specify the name of the text field you want to search for possible matches and a unique name for the suggester. For more information, see `Getting Search Suggestions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type suggester: dict :param suggester: Configuration information for a search suggester. Each suggester has a unique name and specifies the text field you want to use for suggestions. The following options can be configured for a suggester: `FuzzyMatching`, `SortExpression`. """ params = {'DomainName': domain_name, } self.build_complex_param(params, 'Suggester', suggester) return self._make_request( action='DefineSuggester', verb='POST', path='/', params=params) def delete_analysis_scheme(self, domain_name, analysis_scheme_name): """ Deletes an analysis scheme. For more information, see `Configuring Analysis Schemes`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type analysis_scheme_name: string :param analysis_scheme_name: The name of the analysis scheme you want to delete. """ params = { 'DomainName': domain_name, 'AnalysisSchemeName': analysis_scheme_name, } return self._make_request( action='DeleteAnalysisScheme', verb='POST', path='/', params=params) def delete_domain(self, domain_name): """ Permanently deletes a search domain and all of its data. Once a domain has been deleted, it cannot be recovered. For more information, see `Deleting a Search Domain`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to permanently delete. """ params = {'DomainName': domain_name, } return self._make_request( action='DeleteDomain', verb='POST', path='/', params=params) def delete_expression(self, domain_name, expression_name): """ Removes an `Expression` from the search domain. For more information, see `Configuring Expressions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type expression_name: string :param expression_name: The name of the `Expression` to delete. """ params = { 'DomainName': domain_name, 'ExpressionName': expression_name, } return self._make_request( action='DeleteExpression', verb='POST', path='/', params=params) def delete_index_field(self, domain_name, index_field_name): """ Removes an `IndexField` from the search domain. For more information, see `Configuring Index Fields`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type index_field_name: string :param index_field_name: The name of the index field your want to remove from the domain's indexing options. """ params = { 'DomainName': domain_name, 'IndexFieldName': index_field_name, } return self._make_request( action='DeleteIndexField', verb='POST', path='/', params=params) def delete_suggester(self, domain_name, suggester_name): """ Deletes a suggester. For more information, see `Getting Search Suggestions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type suggester_name: string :param suggester_name: Specifies the name of the suggester you want to delete. """ params = { 'DomainName': domain_name, 'SuggesterName': suggester_name, } return self._make_request( action='DeleteSuggester', verb='POST', path='/', params=params) def describe_analysis_schemes(self, domain_name, analysis_scheme_names=None, deployed=None): """ Gets the analysis schemes configured for a domain. An analysis scheme defines language-specific text processing options for a `text` field. Can be limited to specific analysis schemes by name. By default, shows all analysis schemes and includes any pending changes to the configuration. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Configuring Analysis Schemes`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type analysis_scheme_names: list :param analysis_scheme_names: The analysis schemes you want to describe. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if analysis_scheme_names is not None: self.build_list_params(params, analysis_scheme_names, 'AnalysisSchemeNames.member') if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeAnalysisSchemes', verb='POST', path='/', params=params) def describe_availability_options(self, domain_name, deployed=None): """ Gets the availability options configured for a domain. By default, shows the configuration with any pending changes. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Configuring Availability Options`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeAvailabilityOptions', verb='POST', path='/', params=params) def describe_domains(self, domain_names=None): """ Gets information about the search domains owned by this account. Can be limited to specific domains. Shows all domains by default. For more information, see `Getting Information about a Search Domain`_ in the Amazon CloudSearch Developer Guide . :type domain_names: list :param domain_names: The names of the domains you want to include in the response. """ params = {} if domain_names is not None: self.build_list_params(params, domain_names, 'DomainNames.member') return self._make_request( action='DescribeDomains', verb='POST', path='/', params=params) def describe_expressions(self, domain_name, expression_names=None, deployed=None): """ Gets the expressions configured for the search domain. Can be limited to specific expressions by name. By default, shows all expressions and includes any pending changes to the configuration. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Configuring Expressions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type expression_names: list :param expression_names: Limits the `DescribeExpressions` response to the specified expressions. If not specified, all expressions are shown. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if expression_names is not None: self.build_list_params(params, expression_names, 'ExpressionNames.member') if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeExpressions', verb='POST', path='/', params=params) def describe_index_fields(self, domain_name, field_names=None, deployed=None): """ Gets information about the index fields configured for the search domain. Can be limited to specific fields by name. By default, shows all fields and includes any pending changes to the configuration. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Getting Domain Information`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type field_names: list :param field_names: A list of the index fields you want to describe. If not specified, information is returned for all configured index fields. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if field_names is not None: self.build_list_params(params, field_names, 'FieldNames.member') if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeIndexFields', verb='POST', path='/', params=params) def describe_scaling_parameters(self, domain_name): """ Gets the scaling parameters configured for a domain. A domain's scaling parameters specify the desired search instance type and replication count. For more information, see `Configuring Scaling Options`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). """ params = {'DomainName': domain_name, } return self._make_request( action='DescribeScalingParameters', verb='POST', path='/', params=params) def describe_service_access_policies(self, domain_name, deployed=None): """ Gets information about the access policies that control access to the domain's document and search endpoints. By default, shows the configuration with any pending changes. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Configuring Access for a Search Domain`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeServiceAccessPolicies', verb='POST', path='/', params=params) def describe_suggesters(self, domain_name, suggester_names=None, deployed=None): """ Gets the suggesters configured for a domain. A suggester enables you to display possible matches before users finish typing their queries. Can be limited to specific suggesters by name. By default, shows all suggesters and includes any pending changes to the configuration. Set the `Deployed` option to `True` to show the active configuration and exclude pending changes. For more information, see `Getting Search Suggestions`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: The name of the domain you want to describe. :type suggester_names: list :param suggester_names: The suggesters you want to describe. :type deployed: boolean :param deployed: Whether to display the deployed configuration ( `True`) or include any pending changes ( `False`). Defaults to `False`. """ params = {'DomainName': domain_name, } if suggester_names is not None: self.build_list_params(params, suggester_names, 'SuggesterNames.member') if deployed is not None: params['Deployed'] = str( deployed).lower() return self._make_request( action='DescribeSuggesters', verb='POST', path='/', params=params) def index_documents(self, domain_name): """ Tells the search domain to start indexing its documents using the latest indexing options. This operation must be invoked to activate options whose OptionStatus is `RequiresIndexDocuments`. :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). """ params = {'DomainName': domain_name, } return self._make_request( action='IndexDocuments', verb='POST', path='/', params=params) def list_domain_names(self): """ Lists all search domains owned by an account. """ params = {} return self._make_request( action='ListDomainNames', verb='POST', path='/', params=params) def update_availability_options(self, domain_name, multi_az): """ Configures the availability options for a domain. Enabling the Multi-AZ option expands an Amazon CloudSearch domain to an additional Availability Zone in the same Region to increase fault tolerance in the event of a service disruption. Changes to the Multi-AZ option can take about half an hour to become active. For more information, see `Configuring Availability Options`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type multi_az: boolean :param multi_az: You expand an existing search domain to a second Availability Zone by setting the Multi-AZ option to true. Similarly, you can turn off the Multi-AZ option to downgrade the domain to a single Availability Zone by setting the Multi-AZ option to `False`. """ params = {'DomainName': domain_name, 'MultiAZ': multi_az, } return self._make_request( action='UpdateAvailabilityOptions', verb='POST', path='/', params=params) def update_scaling_parameters(self, domain_name, scaling_parameters): """ Configures scaling parameters for a domain. A domain's scaling parameters specify the desired search instance type and replication count. Amazon CloudSearch will still automatically scale your domain based on the volume of data and traffic, but not below the desired instance type and replication count. If the Multi-AZ option is enabled, these values control the resources used per Availability Zone. For more information, see `Configuring Scaling Options`_ in the Amazon CloudSearch Developer Guide . :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type scaling_parameters: dict :param scaling_parameters: The desired instance type and desired number of replicas of each index partition. """ params = {'DomainName': domain_name, } self.build_complex_param(params, 'ScalingParameters', scaling_parameters) return self._make_request( action='UpdateScalingParameters', verb='POST', path='/', params=params) def update_service_access_policies(self, domain_name, access_policies): """ Configures the access rules that control access to the domain's document and search endpoints. For more information, see ` Configuring Access for an Amazon CloudSearch Domain`_. :type domain_name: string :param domain_name: A string that represents the name of a domain. Domain names are unique across the domains owned by an account within an AWS region. Domain names start with a letter or number and can contain the following characters: a-z (lowercase), 0-9, and - (hyphen). :type access_policies: string :param access_policies: The access rules you want to configure. These rules replace any existing rules. """ params = { 'DomainName': domain_name, 'AccessPolicies': access_policies, } return self._make_request( action='UpdateServiceAccessPolicies', verb='POST', path='/', params=params) def build_complex_param(self, params, label, value): """Serialize a structure. For example:: param_type = 'structure' label = 'IndexField' value = {'IndexFieldName': 'a', 'IntOptions': {'DefaultValue': 5}} would result in the params dict being updated with these params:: IndexField.IndexFieldName = a IndexField.IntOptions.DefaultValue = 5 :type params: dict :param params: The params dict. The complex list params will be added to this dict. :type label: str :param label: String label for param key :type value: any :param value: The value to serialize """ for k, v in value.items(): if isinstance(v, dict): for k2, v2 in v.items(): self.build_complex_param(params, label + '.' + k, v) elif isinstance(v, bool): params['%s.%s' % (label, k)] = v and 'true' or 'false' else: params['%s.%s' % (label, k)] = v def _make_request(self, action, verb, path, params): params['ContentType'] = 'JSON' response = self.make_request(action=action, verb='POST', path='/', params=params) body = response.read().decode('utf-8') boto.log.debug(body) if response.status == 200: return json.loads(body) else: json_body = json.loads(body) fault_name = json_body.get('Error', {}).get('Code', None) exception_class = self._faults.get(fault_name, self.ResponseError) raise exception_class(response.status, response.reason, body=json_body)
{ "content_hash": "a56c42dfc8fb4bf59f566e29f6eb48fd", "timestamp": "", "source": "github", "line_count": 757, "max_line_length": 79, "avg_line_length": 41.093791281373846, "alnum_prop": 0.611996913977112, "repo_name": "kyleknap/boto", "id": "fdc9d4c625026531e6e2f682971895817f6e3c0b", "size": "32232", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "boto/cloudsearch2/layer1.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
namespace Envoy { namespace Quic { // A factory interface to provide quic::ProofSource. class EnvoyQuicProofSourceFactoryInterface : public Config::TypedFactory { public: ~EnvoyQuicProofSourceFactoryInterface() override = default; std::string category() const override { return "envoy.quic.proof_source"; } virtual std::unique_ptr<quic::ProofSource> createQuicProofSource(Network::Socket& listen_socket, Network::FilterChainManager& filter_chain_manager, Server::ListenerStats& listener_stats) PURE; }; } // namespace Quic } // namespace Envoy
{ "content_hash": "81f090242f7a37b20563eae0dd384ac6", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 77, "avg_line_length": 33.55555555555556, "alnum_prop": 0.7152317880794702, "repo_name": "lyft/envoy", "id": "92df7df9fd0db168390cccdf9e308b16845ba43c", "size": "826", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "source/common/quic/envoy_quic_proof_source_factory_interface.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "439" }, { "name": "C", "bytes": "9840" }, { "name": "C++", "bytes": "30180292" }, { "name": "Dockerfile", "bytes": "891" }, { "name": "Emacs Lisp", "bytes": "966" }, { "name": "Go", "bytes": "558" }, { "name": "Jinja", "bytes": "46306" }, { "name": "Makefile", "bytes": "303" }, { "name": "PureBasic", "bytes": "472" }, { "name": "Python", "bytes": "659418" }, { "name": "Rust", "bytes": "38417" }, { "name": "Shell", "bytes": "177423" }, { "name": "Starlark", "bytes": "1743784" }, { "name": "Thrift", "bytes": "748" } ], "symlink_target": "" }
// .NAME vtkRepresentationPainter - painter that handles representation. // .SECTION Description // This painter merely defines the interface. // Subclasses will change the polygon rendering mode dependent on // the graphics library. #ifndef __vtkRepresentationPainter_h #define __vtkRepresentationPainter_h #include "vtkRenderingCoreModule.h" // For export macro #include "vtkPolyDataPainter.h" class VTKRENDERINGCORE_EXPORT vtkRepresentationPainter : public vtkPolyDataPainter { public: static vtkRepresentationPainter* New(); vtkTypeMacro(vtkRepresentationPainter, vtkPolyDataPainter); void PrintSelf(ostream& os, vtkIndent indent); protected: vtkRepresentationPainter(); ~vtkRepresentationPainter(); private: vtkRepresentationPainter(const vtkRepresentationPainter&); // Not implemented. void operator=(const vtkRepresentationPainter&); // Not implemented. }; #endif
{ "content_hash": "f2c1661961897439e4dbc9a71072e2d4", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 82, "avg_line_length": 29.733333333333334, "alnum_prop": 0.8004484304932735, "repo_name": "biddisco/VTK", "id": "0bcbd7852f7229bed407672b535b14ac6083d25d", "size": "1486", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "Rendering/Core/vtkRepresentationPainter.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "37444" }, { "name": "C", "bytes": "45542302" }, { "name": "C++", "bytes": "60467840" }, { "name": "CSS", "bytes": "157961" }, { "name": "Cuda", "bytes": "28721" }, { "name": "GAP", "bytes": "14120" }, { "name": "IDL", "bytes": "4406" }, { "name": "Java", "bytes": "184678" }, { "name": "JavaScript", "bytes": "978324" }, { "name": "Objective-C", "bytes": "121232" }, { "name": "Objective-C++", "bytes": "101052" }, { "name": "Pascal", "bytes": "3255" }, { "name": "Perl", "bytes": "177007" }, { "name": "Python", "bytes": "13262355" }, { "name": "Shell", "bytes": "41929" }, { "name": "Tcl", "bytes": "1894036" } ], "symlink_target": "" }
/** * Created by michal.wadas on 2015-01-28. */ (function (root, factory) { if (typeof define === 'function' && define.amd) { // AMD. Register as an anonymous module. define([], factory); } else { // Browser globals return root.domPlusPlus = factory(); } }(this, function () { var global; try { global = Function('return this')(); } catch(e) { if (typeof window !== undefined) global = window; } var domUtils = {}; var has = ({}).hasOwnProperty; var isValidDefineProperty = !!Object.defineProperty && (function(q) { try { Object.defineProperty(q, 'wow', { value : 3 }); } catch (e) {} return q.wow === 3; }({})); function defineProperty(obj, key, value) { if (isValidDefineProperty) Object.defineProperty(obj,key, {value: value, enumerable: false}); else obj[key]=value; } function extendOrThrow(obj, key, value) { if (obj.key) throw new TypeError('It\'s impossible to extend this object.'); defineProperty(obj, key, value); } domUtils.$extendNatives = function() { if (typeof Element !== 'undefined') { extendOrThrow(Element.prototype, 'setAttributes', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.setAttributes.apply(null, subArgs); }); extendOrThrow(Element.prototype, 'getComments', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.getComments.apply(null, subArgs); }) } if (typeof document !== 'undefined') { extendOrThrow(document, 'createFragmentFromHTML', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.createFragmentFromHTML.apply(null, subArgs); }); } if (typeof Node !== 'undefined') { extendOrThrow(Node.prototype, 'appendChilds', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.appendChilds.apply(null, subArgs); }); extendOrThrow(Node.prototype, 'operateOnDetached', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.operateOnDetached.apply(null, subArgs); }); extendOrThrow(Node.prototype, 'operateOnDetachedAsync', function() { var subArgs = [this]; for(var i = 0; i < arguments.length; i++) { subArgs.push(arguments[i]); } return domUtils.operateOnDetachedAsync.apply(null, subArgs); }); } }; /** * Set attributes based on given object * Batch version of native Element::setAttribute * @method * @param {Element} element - Element to be modified * @param {string} params - Object with enumerable properties */ domUtils.setAttributes = function setAttributes(element, params) { params = typeof params === 'object' ? params : {}; for (key in params) { if (has.call(params,key)) element.setAttribute(key, params[key]); } return element; }; /** * Append multiple childs to element * Batch version of native Element::appendChild * @method * @param {Element} element - Element to be modified * @param {...Node} childs - List of Nodes to be appended in order */ domUtils.appendChilds = function appendChilds(element) { var childs = []; for(var i = 1; i < arguments.length; i++) { childs.push(arguments[i]); } var fragment = document.createDocumentFragment(); for(var i = 0; i < childs.length; i++) { fragment.appendChild(childs[i]); } element.appendChild(fragment); }; /** * Parses HTML and creates DocumentFragment from it's content * @method * @param {Document} document - Document to be used as source * @param {string} html - String containing HTML */ domUtils.createFragmentFromHTML = function parseHTML(document, html) { var q = document.createElement('div'); q.insertAdjacentHTML('afterbegin', html); var fragment = document.createDocumentFragment(); domUtils.appendChilds.apply(null, [fragment].concat([].slice.call(q.children))); return fragment; }; /** * Detaches element from DOM, performs given operation, then inserts it in the same position * @method * @param {element} element - Element to be manipulated * @param {function} func - function to be called on element (as this) * @params {...any} subArgs - arguments provided to function */ domUtils.operateOnDetached = function operateOnDetached(element, func) { var subArgs = []; for(var i = 2; i < arguments.length; i++) { subArgs.push(arguments[i]); } var parent = element.parentNode; var nextSibling = element.nextSibling; if (!parent) throw new TypeError('.parentNode doesn\'t exist'); var reattachMethod = nextSibling ? parent.appendChild : parent.insertBefore; parent.removeChild(element); func.apply(element, subArgs); reattachMethod.call(parent, element, nextSibling); return element; }; domUtils.operateOnDetachedAsync = function operateOnDetachedAsync(element, func) { var subArgs = []; for(var i = 2; i < arguments.length; i++) { subArgs.push(arguments[i]); } var parent = element.parentNode; var nextSibling = element.nextSibling; if (!parent) throw new TypeError('.parentNode doesn\'t exist'); var reattachMethod = nextSibling ? parent.appendChild : parent.insertBefore; parent.removeChild(element); function done() { reattachMethod.call(parent, element, nextSibling); } func.apply(element, [done].concat(subArgs)); return element; }; function nodeIteratorToArray(nodeIterator) { var ret = [],r; while(r = nodeIterator.next() && ret.push(r)); return ret; } /** * Gets comment nodes inside of root. * @method * @param {Element} element - Element to be manipulated * @param {function} func - function to be called on element (as this) * @params {...any} subArgs - arguments provided to function */ domUtils.getComments = function getComments(root) { var document = root.ownerDocument === null ? root : root.ownerDocument; return nodeIteratorToArray(document.createNodeIterator(root, 128)); } return domUtils; }));
{ "content_hash": "18977d835cd983fe8af1a5e8b1377f49", "timestamp": "", "source": "github", "line_count": 204, "max_line_length": 96, "avg_line_length": 36.48039215686274, "alnum_prop": 0.5601988712711636, "repo_name": "P0lip/dom-plus-plus", "id": "57843da7003459c1afb8e657c9ca14b6f2177177", "size": "7442", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "7442" } ], "symlink_target": "" }
USE tool; IF OBJECT_ID('BlankToZero','FN') IS NOT NULL DROP FUNCTION BlankToZero; CREATE FUNCTION dbo.BlankToZero(@Number VARCHAR(255)) RETURNS VARCHAR(255) AS BEGIN SET @Number = REPLACE(@Number,',','') IF @Number = '' SET @Number = '0' RETURN @Number END;
{ "content_hash": "cd7975281adc060f9b9fecd5f558dbc3", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 53, "avg_line_length": 21.23076923076923, "alnum_prop": 0.6702898550724637, "repo_name": "chinchon/sql-formatting-tools", "id": "f452f454791847435853f7083a7b5305af6be59e", "size": "276", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "FN_BlankToZero.sql", "mode": "33188", "license": "mit", "language": [ { "name": "PLSQL", "bytes": "1981" }, { "name": "SQLPL", "bytes": "4122" } ], "symlink_target": "" }
package process import ( "github.com/stretchr/testify/assert" "testing" ) var p = Process{ Name: "Example", Priority: 99, ProdCommand: `sh -c "echo this is production? $HELLO"`, ProdEnviron: NewEnviron("HELLO=PRODUCTION"), TestCommand: `sh -c "echo this is testing? $HELLO"`, TestEnviron: NewEnviron("HELLO=TESTING"), } func TestSmokeProcessRunProd(t *testing.T) { cmd, err := p.Cmd(true) assert.NoError(t, err, "got an error on cmd") out, err := cmd.Output() assert.NoError(t, err, "got an error on run") assert.Equal(t, "this is production? PRODUCTION\n", string(out), "wrong output") } func TestSmokeProcessRunNotProd(t *testing.T) { cmd, err := p.Cmd(false) assert.NoError(t, err, "got an error") out, err := cmd.Output() assert.NoError(t, err, "got an error on run") assert.Equal(t, "this is testing? TESTING\n", string(out), "wrong output") } func TestSmokeProcessRunFail(t *testing.T) { q := Process{ Name: "Bad Example", Priority: 42, } _, err := q.Cmd(true) assert.Error(t, err, "did not get an error") } func TestSmokeSimpleProcessRun(t *testing.T) { r := Process{ Name: "Short Example", Priority: 42, ProdCommand: "ls", } _, err := r.Cmd(true) assert.NoError(t, err, "got an error") }
{ "content_hash": "f65f829827d339946b2fe5fb485dd213", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 81, "avg_line_length": 25.24, "alnum_prop": 0.661648177496038, "repo_name": "tristanwietsma/gobox", "id": "cd2ee9fe29f902125c2c12246e624bbf4e9d365b", "size": "1262", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "process/process_test.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "8720" }, { "name": "Python", "bytes": "544" } ], "symlink_target": "" }
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" xmlns:materialdesign="http://schemas.android.com/apk/res-auto" android:layout_width="match_parent" android:layout_height="match_parent" android:paddingLeft="@dimen/activity_horizontal_margin" android:paddingRight="@dimen/activity_horizontal_margin" android:paddingTop="@dimen/activity_vertical_margin" android:paddingBottom="@dimen/activity_vertical_margin" android:orientation="vertical" tools:context="com.brotherjing.client.SocketmsgActivity"> <LinearLayout android:orientation="horizontal" android:layout_width="match_parent" android:layout_height="wrap_content"> <!--<TextView--> <!--android:text="ip:"--> <!--android:gravity="center"--> <!--android:layout_weight="1"--> <!--android:layout_width="0dp"--> <!--android:layout_height="wrap_content"/>--> <com.gc.materialdesign.views.ButtonFlat android:layout_weight="1" android:layout_width="0dp" android:layout_height="wrap_content" android:background="#1E88E5" android:gravity="center" android:text="IP" > </com.gc.materialdesign.views.ButtonFlat> <EditText android:id="@+id/edt_ip" android:hint="输入ip" android:layout_weight="2" android:layout_width="0dp" android:layout_height="wrap_content"/> </LinearLayout> <LinearLayout android:orientation="horizontal" android:layout_width="match_parent" android:layout_height="wrap_content"> <!--<TextView--> <!--android:text="port:"--> <!--android:gravity="center"--> <!--android:layout_weight="1"--> <!--android:layout_width="0dp"--> <!--android:layout_height="wrap_content"/>--> <com.gc.materialdesign.views.ButtonFlat android:layout_weight="1" android:layout_width="0dp" android:layout_height="wrap_content" android:background="#1E88E5" android:gravity="center" android:text="Port" > </com.gc.materialdesign.views.ButtonFlat> <EditText android:id="@+id/edt_port" android:hint="输入port" android:layout_weight="2" android:layout_width="0dp" android:layout_height="wrap_content"/> </LinearLayout> <!--<Button--> <!--android:id="@+id/btn_connect"--> <!--android:text="@string/bit_connect"--> <!--android:layout_width="match_parent"--> <!--android:layout_height="wrap_content"/>--> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_connect" android:text="@string/bit_connect" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="#1E88E5" > </com.gc.materialdesign.views.ButtonRectangle> <LinearLayout android:layout_width="fill_parent" android:layout_height="wrap_content" android:orientation="horizontal"> <EditText android:id="@+id/edt_input" android:paddingTop="10dp" android:hint="@string/edt_input" android:layout_width="0dp" android:layout_weight="1" android:layout_height="wrap_content"/> <!--<Button--> <!--android:id="@+id/btn_submit"--> <!--android:text="@string/btn_submit"--> <!--android:layout_width="wrap_content"--> <!--android:layout_height="wrap_content"/>--> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_submit" android:text="@string/btn_submit" android:layout_width="wrap_content" android:layout_height="wrap_content" android:background="#1E88E5"> </com.gc.materialdesign.views.ButtonRectangle> </LinearLayout> <!--<Button--> <!--android:id="@+id/btn_asr"--> <!--android:text="@string/btn_asr"--> <!--android:layout_width="match_parent"--> <!--android:layout_height="wrap_content"/>--> <!--<Button--> <!--android:id="@+id/btn_ar"--> <!--android:text="@string/btn_ar"--> <!--android:layout_width="match_parent"--> <!--android:layout_height="wrap_content"/>--> <!--<Button--> <!--android:id="@+id/btn_video"--> <!--android:text="@string/btn_video"--> <!--android:layout_width="match_parent"--> <!--android:layout_height="wrap_content"/>--> <!--<Button--> <!--android:id="@+id/btn_qrcode"--> <!--android:text="@string/btn_qrcode"--> <!--android:layout_width="match_parent"--> <!--android:layout_height="wrap_content"/>--> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_asr" android:text="@string/btn_asr" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="#1E88E5"> </com.gc.materialdesign.views.ButtonRectangle> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_ar" android:text="@string/btn_ar" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="#1E88E5"> </com.gc.materialdesign.views.ButtonRectangle> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_video" android:text="@string/btn_video" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="#1E88E5"> </com.gc.materialdesign.views.ButtonRectangle> <com.gc.materialdesign.views.ButtonRectangle android:id="@+id/btn_qrcode" android:text="@string/btn_qrcode" android:layout_width="match_parent" android:layout_height="wrap_content" android:background="#1E88E5"> </com.gc.materialdesign.views.ButtonRectangle> <Button android:id="@+id/btn_demo" android:text="demo" android:layout_width="match_parent" android:layout_height="wrap_content" > </Button> <ImageView android:layout_width="fill_parent" android:layout_height="fill_parent" android:id="@+id/iv_video"/> <ScrollView android:layout_width="fill_parent" android:layout_height="fill_parent" android:visibility="gone"> <LinearLayout android:layout_width="fill_parent" android:layout_height="wrap_content" android:id="@+id/ll_chat" android:orientation="vertical"/> </ScrollView> </LinearLayout>
{ "content_hash": "0e3ed7af46dd5867f3f539a64d4c2192", "timestamp": "", "source": "github", "line_count": 204, "max_line_length": 76, "avg_line_length": 34.40686274509804, "alnum_prop": 0.5794272688417154, "repo_name": "gongkechuang3C/smartcar", "id": "61883563da1e19d6b57af9d8835b4156c4782535", "size": "7027", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "client/src/main/res/layout/activity_socketmsg.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "1945" }, { "name": "Java", "bytes": "519205" } ], "symlink_target": "" }
from enum import Enum from typing import Iterable, List, Set, Tuple, Type, Union def enum_to_choices(enumeration: Type[Enum]) -> Iterable[Tuple]: return tuple((e.value, e.value) for e in enumeration) def enum_to_set(enumeration: Type[Enum]) -> Set: return set(e.value for e in enumeration) def values_to_choices(enumeration: Union[List, Set]) -> Iterable[Tuple]: return tuple((e, e) for e in sorted(enumeration))
{ "content_hash": "d2d46694ddfd16574c0739a795cc2d91", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 72, "avg_line_length": 30.785714285714285, "alnum_prop": 0.7122969837587007, "repo_name": "polyaxon/polyaxon", "id": "eadf572bb2424b92d487d54a9e9bb0f58a12ac40", "size": "1036", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/polyaxon/utils/enums_utils.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "1989" }, { "name": "Python", "bytes": "5201898" }, { "name": "Shell", "bytes": "1565" } ], "symlink_target": "" }
class SpmiRecordsHelper { public: static MethodContext::Agnostic_CORINFO_RESOLVED_TOKENin CreateAgnostic_CORINFO_RESOLVED_TOKENin( CORINFO_RESOLVED_TOKEN* pResolvedToken); static MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout CreateAgnostic_CORINFO_RESOLVED_TOKENout_without_buffers( CORINFO_RESOLVED_TOKEN* pResolvedToken); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout StoreAgnostic_CORINFO_RESOLVED_TOKENout( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout RestoreAgnostic_CORINFO_RESOLVED_TOKENout( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN StoreAgnostic_CORINFO_RESOLVED_TOKEN( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN RestoreAgnostic_CORINFO_RESOLVED_TOKEN( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers); // Restore the out values in the first argument from the second. // Can't just return whole CORINFO_RESOLVED_TOKEN because [in] values in it are important too. template <typename key, typename value> static void Restore_CORINFO_RESOLVED_TOKENout(CORINFO_RESOLVED_TOKEN* pResolvedToken, MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout& token, LightWeightMap<key, value>* buffers); static MethodContext::Agnostic_CORINFO_SIG_INFO CreateAgnostic_CORINFO_SIG_INFO_without_buffers( CORINFO_SIG_INFO& sigInfo); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_SIG_INFO StoreAgnostic_CORINFO_SIG_INFO(CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers); template <typename key, typename value> static MethodContext::Agnostic_CORINFO_SIG_INFO RestoreAgnostic_CORINFO_SIG_INFO( CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers); template <typename key, typename value> static CORINFO_SIG_INFO Restore_CORINFO_SIG_INFO(MethodContext::Agnostic_CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers); static MethodContext::Agnostic_CORINFO_LOOKUP_KIND CreateAgnostic_CORINFO_LOOKUP_KIND( const CORINFO_LOOKUP_KIND* pGenericLookupKind); static CORINFO_LOOKUP_KIND RestoreCORINFO_LOOKUP_KIND(MethodContext::Agnostic_CORINFO_LOOKUP_KIND& lookupKind); static MethodContext::Agnostic_CORINFO_CONST_LOOKUP StoreAgnostic_CORINFO_CONST_LOOKUP( CORINFO_CONST_LOOKUP* pLookup); static CORINFO_CONST_LOOKUP RestoreCORINFO_CONST_LOOKUP(MethodContext::Agnostic_CORINFO_CONST_LOOKUP& lookup); static MethodContext::Agnostic_CORINFO_RUNTIME_LOOKUP StoreAgnostic_CORINFO_RUNTIME_LOOKUP( CORINFO_RUNTIME_LOOKUP* pLookup); static CORINFO_RUNTIME_LOOKUP RestoreCORINFO_RUNTIME_LOOKUP(MethodContext::Agnostic_CORINFO_RUNTIME_LOOKUP& Lookup); static MethodContext::Agnostic_CORINFO_LOOKUP StoreAgnostic_CORINFO_LOOKUP(CORINFO_LOOKUP* pLookup); static CORINFO_LOOKUP RestoreCORINFO_LOOKUP(MethodContext::Agnostic_CORINFO_LOOKUP& agnosticLookup); }; inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKENin SpmiRecordsHelper::CreateAgnostic_CORINFO_RESOLVED_TOKENin( CORINFO_RESOLVED_TOKEN* pResolvedToken) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKENin tokenIn; ZeroMemory(&tokenIn, sizeof(tokenIn)); tokenIn.tokenContext = (DWORDLONG)pResolvedToken->tokenContext; tokenIn.tokenScope = (DWORDLONG)pResolvedToken->tokenScope; tokenIn.token = (DWORD)pResolvedToken->token; tokenIn.tokenType = (DWORD)pResolvedToken->tokenType; return tokenIn; } inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout SpmiRecordsHelper:: CreateAgnostic_CORINFO_RESOLVED_TOKENout_without_buffers(CORINFO_RESOLVED_TOKEN* pResolvedToken) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout tokenOut; ZeroMemory(&tokenOut, sizeof(tokenOut)); tokenOut.hClass = (DWORDLONG)pResolvedToken->hClass; tokenOut.hMethod = (DWORDLONG)pResolvedToken->hMethod; tokenOut.hField = (DWORDLONG)pResolvedToken->hField; tokenOut.cbTypeSpec = (DWORD)pResolvedToken->cbTypeSpec; tokenOut.cbMethodSpec = (DWORD)pResolvedToken->cbMethodSpec; tokenOut.pTypeSpec_Index = -1; tokenOut.pMethodSpec_Index = -1; return tokenOut; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout SpmiRecordsHelper::StoreAgnostic_CORINFO_RESOLVED_TOKENout( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout tokenOut( CreateAgnostic_CORINFO_RESOLVED_TOKENout_without_buffers(pResolvedToken)); tokenOut.pTypeSpec_Index = (DWORD)buffers->AddBuffer((unsigned char*)pResolvedToken->pTypeSpec, pResolvedToken->cbTypeSpec); tokenOut.pMethodSpec_Index = (DWORD)buffers->AddBuffer((unsigned char*)pResolvedToken->pMethodSpec, pResolvedToken->cbMethodSpec); return tokenOut; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout SpmiRecordsHelper::RestoreAgnostic_CORINFO_RESOLVED_TOKENout( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout tokenOut( CreateAgnostic_CORINFO_RESOLVED_TOKENout_without_buffers(pResolvedToken)); tokenOut.pTypeSpec_Index = (DWORD)buffers->Contains((unsigned char*)pResolvedToken->pTypeSpec, pResolvedToken->cbTypeSpec); tokenOut.pMethodSpec_Index = (DWORD)buffers->Contains((unsigned char*)pResolvedToken->pMethodSpec, pResolvedToken->cbMethodSpec); return tokenOut; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN SpmiRecordsHelper::StoreAgnostic_CORINFO_RESOLVED_TOKEN( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN token; token.inValue = CreateAgnostic_CORINFO_RESOLVED_TOKENin(pResolvedToken); token.outValue = StoreAgnostic_CORINFO_RESOLVED_TOKENout(pResolvedToken, buffers); return token; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN SpmiRecordsHelper::RestoreAgnostic_CORINFO_RESOLVED_TOKEN( CORINFO_RESOLVED_TOKEN* pResolvedToken, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_RESOLVED_TOKEN token; ZeroMemory(&token, sizeof(token)); token.inValue = CreateAgnostic_CORINFO_RESOLVED_TOKENin(pResolvedToken); token.outValue = RestoreAgnostic_CORINFO_RESOLVED_TOKENout(pResolvedToken, buffers); return token; } template <typename key, typename value> inline void SpmiRecordsHelper::Restore_CORINFO_RESOLVED_TOKENout( CORINFO_RESOLVED_TOKEN* pResolvedToken, MethodContext::Agnostic_CORINFO_RESOLVED_TOKENout& tokenOut, LightWeightMap<key, value>* buffers) { pResolvedToken->hClass = (CORINFO_CLASS_HANDLE)tokenOut.hClass; pResolvedToken->hMethod = (CORINFO_METHOD_HANDLE)tokenOut.hMethod; pResolvedToken->hField = (CORINFO_FIELD_HANDLE)tokenOut.hField; pResolvedToken->pTypeSpec = (PCCOR_SIGNATURE)buffers->GetBuffer(tokenOut.pTypeSpec_Index); pResolvedToken->cbTypeSpec = (ULONG)tokenOut.cbTypeSpec; pResolvedToken->pMethodSpec = (PCCOR_SIGNATURE)buffers->GetBuffer(tokenOut.pMethodSpec_Index); pResolvedToken->cbMethodSpec = (ULONG)tokenOut.cbMethodSpec; } inline MethodContext::Agnostic_CORINFO_SIG_INFO SpmiRecordsHelper::CreateAgnostic_CORINFO_SIG_INFO_without_buffers( CORINFO_SIG_INFO& sigInfo) { MethodContext::Agnostic_CORINFO_SIG_INFO sig; ZeroMemory(&sig, sizeof(sig)); sig.callConv = (DWORD)sigInfo.callConv; sig.retTypeClass = (DWORDLONG)sigInfo.retTypeClass; sig.retTypeSigClass = (DWORDLONG)sigInfo.retTypeSigClass; sig.retType = (DWORD)sigInfo.retType; sig.flags = (DWORD)sigInfo.flags; sig.numArgs = (DWORD)sigInfo.numArgs; sig.sigInst_classInstCount = (DWORD)sigInfo.sigInst.classInstCount; sig.sigInst_methInstCount = (DWORD)sigInfo.sigInst.methInstCount; sig.args = (DWORDLONG)sigInfo.args; sig.cbSig = (DWORD)sigInfo.cbSig; sig.scope = (DWORDLONG)sigInfo.scope; sig.token = (DWORD)sigInfo.token; return sig; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_SIG_INFO SpmiRecordsHelper::StoreAgnostic_CORINFO_SIG_INFO( CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_SIG_INFO sig(CreateAgnostic_CORINFO_SIG_INFO_without_buffers(sigInfo)); sig.sigInst_classInst_Index = buffers->AddBuffer((unsigned char*)sigInfo.sigInst.classInst, sigInfo.sigInst.classInstCount * 8); sig.sigInst_methInst_Index = buffers->AddBuffer((unsigned char*)sigInfo.sigInst.methInst, sigInfo.sigInst.methInstCount * 8); sig.pSig_Index = (DWORD)buffers->AddBuffer((unsigned char*)sigInfo.pSig, sigInfo.cbSig); return sig; } template <typename key, typename value> inline MethodContext::Agnostic_CORINFO_SIG_INFO SpmiRecordsHelper::RestoreAgnostic_CORINFO_SIG_INFO( CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers) { MethodContext::Agnostic_CORINFO_SIG_INFO sig(CreateAgnostic_CORINFO_SIG_INFO_without_buffers(sigInfo)); sig.sigInst_classInst_Index = buffers->Contains((unsigned char*)sigInfo.sigInst.classInst, sigInfo.sigInst.classInstCount * 8); sig.sigInst_methInst_Index = buffers->Contains((unsigned char*)sigInfo.sigInst.methInst, sigInfo.sigInst.methInstCount * 8); sig.pSig_Index = (DWORD)buffers->Contains((unsigned char*)sigInfo.pSig, sigInfo.cbSig); return sig; } template <typename key, typename value> inline CORINFO_SIG_INFO SpmiRecordsHelper::Restore_CORINFO_SIG_INFO(MethodContext::Agnostic_CORINFO_SIG_INFO& sigInfo, LightWeightMap<key, value>* buffers) { CORINFO_SIG_INFO sig; sig.callConv = (CorInfoCallConv)sigInfo.callConv; sig.retTypeClass = (CORINFO_CLASS_HANDLE)sigInfo.retTypeClass; sig.retTypeSigClass = (CORINFO_CLASS_HANDLE)sigInfo.retTypeSigClass; sig.retType = (CorInfoType)sigInfo.retType; sig.flags = (unsigned)sigInfo.flags; sig.numArgs = (unsigned)sigInfo.numArgs; sig.sigInst.classInstCount = (unsigned)sigInfo.sigInst_classInstCount; sig.sigInst.classInst = (CORINFO_CLASS_HANDLE*)buffers->GetBuffer(sigInfo.sigInst_classInst_Index); sig.sigInst.methInstCount = (unsigned)sigInfo.sigInst_methInstCount; sig.sigInst.methInst = (CORINFO_CLASS_HANDLE*)buffers->GetBuffer(sigInfo.sigInst_methInst_Index); sig.args = (CORINFO_ARG_LIST_HANDLE)sigInfo.args; sig.cbSig = (unsigned int)sigInfo.cbSig; sig.pSig = (PCCOR_SIGNATURE)buffers->GetBuffer(sigInfo.pSig_Index); sig.scope = (CORINFO_MODULE_HANDLE)sigInfo.scope; sig.token = (mdToken)sigInfo.token; return sig; } inline MethodContext::Agnostic_CORINFO_LOOKUP_KIND SpmiRecordsHelper::CreateAgnostic_CORINFO_LOOKUP_KIND( const CORINFO_LOOKUP_KIND* pGenericLookupKind) { MethodContext::Agnostic_CORINFO_LOOKUP_KIND genericLookupKind; ZeroMemory(&genericLookupKind, sizeof(genericLookupKind)); if (pGenericLookupKind != nullptr) { genericLookupKind.needsRuntimeLookup = (DWORD)pGenericLookupKind->needsRuntimeLookup; genericLookupKind.runtimeLookupKind = (DWORD)pGenericLookupKind->runtimeLookupKind; genericLookupKind.runtimeLookupFlags = pGenericLookupKind->runtimeLookupFlags; } // We don't store result->runtimeLookupArgs, which is opaque data. Ok? return genericLookupKind; } inline CORINFO_LOOKUP_KIND SpmiRecordsHelper::RestoreCORINFO_LOOKUP_KIND( MethodContext::Agnostic_CORINFO_LOOKUP_KIND& lookupKind) { CORINFO_LOOKUP_KIND genericLookupKind; genericLookupKind.needsRuntimeLookup = lookupKind.needsRuntimeLookup != 0; genericLookupKind.runtimeLookupKind = (CORINFO_RUNTIME_LOOKUP_KIND)lookupKind.runtimeLookupKind; genericLookupKind.runtimeLookupFlags = lookupKind.runtimeLookupFlags; genericLookupKind.runtimeLookupArgs = nullptr; // We don't store this opaque data. Ok? return genericLookupKind; } inline MethodContext::Agnostic_CORINFO_CONST_LOOKUP SpmiRecordsHelper::StoreAgnostic_CORINFO_CONST_LOOKUP( CORINFO_CONST_LOOKUP* pLookup) { MethodContext::Agnostic_CORINFO_CONST_LOOKUP constLookup; ZeroMemory(&constLookup, sizeof(constLookup)); constLookup.accessType = (DWORD)pLookup->accessType; constLookup.handle = (DWORDLONG)pLookup->handle; return constLookup; } inline CORINFO_CONST_LOOKUP SpmiRecordsHelper::RestoreCORINFO_CONST_LOOKUP( MethodContext::Agnostic_CORINFO_CONST_LOOKUP& lookup) { CORINFO_CONST_LOOKUP constLookup; constLookup.accessType = (InfoAccessType)lookup.accessType; constLookup.handle = (CORINFO_GENERIC_HANDLE)lookup.handle; return constLookup; } inline MethodContext::Agnostic_CORINFO_RUNTIME_LOOKUP SpmiRecordsHelper::StoreAgnostic_CORINFO_RUNTIME_LOOKUP( CORINFO_RUNTIME_LOOKUP* pLookup) { MethodContext::Agnostic_CORINFO_RUNTIME_LOOKUP runtimeLookup; ZeroMemory(&runtimeLookup, sizeof(runtimeLookup)); runtimeLookup.signature = (DWORDLONG)pLookup->signature; runtimeLookup.helper = (DWORD)pLookup->helper; runtimeLookup.indirections = (DWORD)pLookup->indirections; runtimeLookup.testForNull = (DWORD)pLookup->testForNull; runtimeLookup.testForFixup = (DWORD)pLookup->testForFixup; runtimeLookup.indirectFirstOffset = (DWORD)pLookup->indirectFirstOffset; for (int i = 0; i < CORINFO_MAXINDIRECTIONS; i++) runtimeLookup.offsets[i] = (DWORDLONG)pLookup->offsets[i]; return runtimeLookup; } inline CORINFO_RUNTIME_LOOKUP SpmiRecordsHelper::RestoreCORINFO_RUNTIME_LOOKUP( MethodContext::Agnostic_CORINFO_RUNTIME_LOOKUP& lookup) { CORINFO_RUNTIME_LOOKUP runtimeLookup; runtimeLookup.signature = (LPVOID)lookup.signature; runtimeLookup.helper = (CorInfoHelpFunc)lookup.helper; runtimeLookup.indirections = (WORD)lookup.indirections; runtimeLookup.testForNull = lookup.testForNull != 0; runtimeLookup.testForFixup = lookup.testForFixup != 0; runtimeLookup.indirectFirstOffset = lookup.indirectFirstOffset != 0; for (int i = 0; i < CORINFO_MAXINDIRECTIONS; i++) runtimeLookup.offsets[i] = (size_t)lookup.offsets[i]; return CORINFO_RUNTIME_LOOKUP(); } inline MethodContext::Agnostic_CORINFO_LOOKUP SpmiRecordsHelper::StoreAgnostic_CORINFO_LOOKUP(CORINFO_LOOKUP* pLookup) { MethodContext::Agnostic_CORINFO_LOOKUP lookup; ZeroMemory(&lookup, sizeof(lookup)); lookup.lookupKind = CreateAgnostic_CORINFO_LOOKUP_KIND(&pLookup->lookupKind); if (pLookup->lookupKind.needsRuntimeLookup) { lookup.runtimeLookup = StoreAgnostic_CORINFO_RUNTIME_LOOKUP(&pLookup->runtimeLookup); } else { lookup.constLookup = StoreAgnostic_CORINFO_CONST_LOOKUP(&pLookup->constLookup); } return lookup; } inline CORINFO_LOOKUP SpmiRecordsHelper::RestoreCORINFO_LOOKUP(MethodContext::Agnostic_CORINFO_LOOKUP& agnosticLookup) { CORINFO_LOOKUP lookup; ZeroMemory(&lookup, sizeof(lookup)); lookup.lookupKind = RestoreCORINFO_LOOKUP_KIND(agnosticLookup.lookupKind); if (lookup.lookupKind.needsRuntimeLookup) { lookup.runtimeLookup = RestoreCORINFO_RUNTIME_LOOKUP(agnosticLookup.runtimeLookup); } else { lookup.constLookup = RestoreCORINFO_CONST_LOOKUP(agnosticLookup.constLookup); } return lookup; } #endif
{ "content_hash": "2bd15c10573f59d84beba935961ee4ff", "timestamp": "", "source": "github", "line_count": 339, "max_line_length": 120, "avg_line_length": 49.31268436578171, "alnum_prop": 0.7294969193037029, "repo_name": "YongseopKim/coreclr", "id": "0030a2bbd5a4a4531f2678597384ce00fb6dd5e0", "size": "17163", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/ToolBox/superpmi/superpmi-shared/spmirecordhelper.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "1018888" }, { "name": "Awk", "bytes": "5861" }, { "name": "Batchfile", "bytes": "138123" }, { "name": "C", "bytes": "2869388" }, { "name": "C#", "bytes": "134041831" }, { "name": "C++", "bytes": "68794728" }, { "name": "CMake", "bytes": "655027" }, { "name": "Groovy", "bytes": "174637" }, { "name": "Makefile", "bytes": "2736" }, { "name": "Objective-C", "bytes": "656865" }, { "name": "PAWN", "bytes": "903" }, { "name": "Perl", "bytes": "23640" }, { "name": "PowerShell", "bytes": "9319" }, { "name": "Python", "bytes": "235095" }, { "name": "Roff", "bytes": "529523" }, { "name": "Shell", "bytes": "218577" }, { "name": "Smalltalk", "bytes": "1218984" }, { "name": "SuperCollider", "bytes": "4752" }, { "name": "XSLT", "bytes": "1016" }, { "name": "Yacc", "bytes": "157348" } ], "symlink_target": "" }
.block-cr_gmap_block .block-settings .modal-body { overflow: hidden; padding: 0; } .circleflip-googlemaps-frame .cfgm-rightpanel { width: 40%; height: 100%; overflow: hidden; float: right; position: relative; } .circleflip-googlemaps-frame .cfgm-leftpanel { height: 100%; display: block; overflow: hidden; } .circleflip-googlemaps-frame .cfgm-map, .circleflip-googlemaps-frame .cfgm-map-container, .circleflip-googlemaps-frame { height: 100%; } .circleflip-googlemaps-frame .cfgm-error .error { margin: 15px 10px !important; background: #fcfcfc; -webkit-box-shadow: 0 1px 1px 0 rgba(0,0,0,0.2); box-shadow: 0 1px 1px 0 rgba(0, 0, 0, 0.2); } /* -------------------------------------------------------------------------- */ /* # TABLE STRUCTURE */ .circleflip-googlemaps-frame .cfgm-locationlist { table-layout: fixed; } .circleflip-googlemaps-frame .cfgm-location-key { width: 40px; box-sizing: border-box; } .circleflip-googlemaps-frame .cfgm-location-title { overflow: hidden; white-space: nowrap; text-overflow: ellipsis; cursor: default; } .circleflip-googlemaps-frame .cfgm-location-actions { width: 87px; white-space: nowrap; box-sizing: border-box; } .circleflip-googlemaps-frame .cfgm-location:nth-child(odd) { background-color: #f9f9f9; } /* #END TABLE STRUCTURE */ /* # USER CONTROLS */ .circleflip-googlemaps-frame .cfgm-location-hidden .cfgm-location-action-hide, .circleflip-googlemaps-frame .cfgm-location-action-primary{ opacity: 0.2; } .circleflip-googlemaps-frame .cfgm-location-primary .cfgm-location-action-primary { opacity: 1; } .circleflip-googlemaps-frame .cfgm-location-action-primary, .circleflip-googlemaps-frame .cfgm-location-actions .dashicons { cursor: pointer; } .circleflip-googlemaps-frame .cfgm-location-primary .cfgm-location-action-primary { cursor: auto; } .circleflip-googlemaps-frame .cfgm-location.cfgm-location-selected { background: rgba(177, 232, 232, 0.6); } .cfgm-location { transition: transform .6s linear; } /* #END USER CONTROLS */ /* -------------------------------------------------------------------------- */ .circleflip-googlemaps-frame .cfgm-map-toolbar-search { overflow: hidden; } .circleflip-googlemaps-frame .cfgm-map-toolbar-pin { width: 52px; height: 52px; float: right; } .circleflip-googlemaps-frame .cfgm-map-toolbar-search .cfgm-map-search { width: 100%; font-size: 16px; border: none; border-left: 5px solid #c5c5c5; box-shadow: none; margin: 0; padding: 15px; transition: 150ms ease-out; } .circleflip-googlemaps-frame .cfgm-map-toolbar-search .cfgm-map-search:focus { border-left: 10px solid #0074a2; } .circleflip-googlemaps-frame .cfgm-map-toolbar-pin .cfgm-map-pin { height: 52px; width: 52px; font-size: 32px; padding: 10px; box-sizing: border-box; } /* ------------------------------------------------------------------------- */ /* Google Places API autocomplete container */ .pac-container { z-index: 99999; margin-left: 10px; box-shadow: none; border-top: none; } .pac-item:first-child { border-top: none; } /* ------------------------------------------------------------------------- */ .cfgm-content-panel { height: 100%; overflow: auto; padding: 10px; box-sizing: border-box; } /* -------------------------------------------------------------------------- */ .cfgm-edit { opacity: 0; padding: 0; overflow: hidden; transform: translateY(-100%); transition: opacity .15s linear; } .cfgm-edit.cfgm-edit-open { opacity: 1; } .cfgm-edit-header { height: 50px; background: #fcfcfc; border-bottom: 1px solid #e5e5e5; box-sizing: border-box; box-shadow: 0px -7px 17px 0px; /* for box shadow to appear */ position: relative; } .cfgm-edit-header > [class*="cfgm-location-"] { padding: 15px 10px; } .cfgm-edit-header .cfgm-location-key { float: left; } .cfgm-edit-header .cfgm-location-actions { float: right; width: 63px; } .cfgm-edit-header .cfgm-location-title { overflow: hidden; margin: 0; white-space: nowrap; text-overflow: ellipsis; line-height: 20px; } .cfgm-edit-header .cfgm-edit-pretitle { font-weight: normal; font-style: italic; } .cfgm-edit-body { height: calc(100% - 100px); background: white; padding: 20px; box-sizing: border-box; overflow-y: auto; } .cfgm-edit-field-title { float: left; width: 20%; min-width: 100px; text-align: right; padding: 15px 15px; box-sizing: border-box; } .cfgm-edit-field { overflow: hidden; padding: 10px 15px; box-sizing: border-box; } .cfgm-edit-field textarea { resize: vertical; } .cfgm-edit-footer { height: 50px; padding: 10px 20px; text-align: right; border-top: 1px solid #e5e5e5; background: #fcfcfc; box-sizing: border-box; } .cfgm-media-box img { max-width: 100%; } .cfgm-media-box-controls { width: 100%; } .cfgm-media-box-controls [data-action] { width: calc(50% - 12px); margin: 5px; font-size: 20px; height: 1.5em; } .cfgm-media-box { width: 100%; box-sizing: border-box; } .cfgm-edit-field-row { width: 100%; clear: both; } .cfgm-settings { margin-top: 30px; border-top: 1px solid #e5e5e5; padding-top: 30px; } .cfgm-locationlist-body + tfoot { display: none; } .cfgm-locationlist-body + tfoot th { text-align: center; } .cfgm-locationlist-body:empty + tfoot { display: table-footer-group; }
{ "content_hash": "91f2563dcc0b377e2ab46db4d2d2ac81", "timestamp": "", "source": "github", "line_count": 231, "max_line_length": 83, "avg_line_length": 24.246753246753247, "alnum_prop": 0.6170326727370112, "repo_name": "purgesoftwares/purges", "id": "c5c622733c7566cba9e0aff7e6290572bbb63011", "size": "5601", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "frontend/wp-content/themes/circleflip/creiden-framework/content-builder/assets/css/circleflip-pagebuilder-googlemaps.css", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "972" }, { "name": "CSS", "bytes": "3670813" }, { "name": "HTML", "bytes": "340012" }, { "name": "JavaScript", "bytes": "3655920" }, { "name": "PHP", "bytes": "12881714" } ], "symlink_target": "" }
<?php /** * Sales Rule resource model * * @category Mage * @package Mage_SalesRule * @author Magento Core Team <core@magentocommerce.com> */ class Mage_SalesRule_Model_Resource_Rule extends Mage_Rule_Model_Resource_Abstract { /** * Store associated with rule entities information map * * @var array */ protected $_associatedEntitiesMap = array( 'website' => array( 'associations_table' => 'salesrule/website', 'rule_id_field' => 'rule_id', 'entity_id_field' => 'website_id' ), 'customer_group' => array( 'associations_table' => 'salesrule/customer_group', 'rule_id_field' => 'rule_id', 'entity_id_field' => 'customer_group_id' ) ); /** * Initialize main table and table id field */ protected function _construct() { $this->_init('salesrule/rule', 'rule_id'); } /** * Add customer group ids and website ids to rule data after load * * @param Mage_Core_Model_Abstract $object * * @return Mage_SalesRule_Model_Resource_Rule */ protected function _afterLoad(Mage_Core_Model_Abstract $object) { $object->setData('customer_group_ids', (array)$this->getCustomerGroupIds($object->getId())); $object->setData('website_ids', (array)$this->getWebsiteIds($object->getId())); parent::_afterLoad($object); return $this; } /** * Prepare sales rule's discount quantity * * @param Mage_Core_Model_Abstract $object * * @return Mage_SalesRule_Model_Resource_Rule */ public function _beforeSave(Mage_Core_Model_Abstract $object) { if (!$object->getDiscountQty()) { $object->setDiscountQty(new Zend_Db_Expr('NULL')); } parent::_beforeSave($object); return $this; } /** * Bind sales rule to customer group(s) and website(s). * Save rule's associated store labels. * Save product attributes used in rule. * * @param Mage_Core_Model_Abstract $object * * @return Mage_SalesRule_Model_Resource_Rule */ protected function _afterSave(Mage_Core_Model_Abstract $object) { if ($object->hasStoreLabels()) { $this->saveStoreLabels($object->getId(), $object->getStoreLabels()); } if ($object->hasWebsiteIds()) { $websiteIds = $object->getWebsiteIds(); if (!is_array($websiteIds)) { $websiteIds = explode(',', (string)$websiteIds); } $this->bindRuleToEntity($object->getId(), $websiteIds, 'website'); } if ($object->hasCustomerGroupIds()) { $customerGroupIds = $object->getCustomerGroupIds(); if (!is_array($customerGroupIds)) { $customerGroupIds = explode(',', (string)$customerGroupIds); } $this->bindRuleToEntity($object->getId(), $customerGroupIds, 'customer_group'); } // Save product attributes used in rule $ruleProductAttributes = array_merge( $this->getProductAttributes(serialize($object->getConditions()->asArray())), $this->getProductAttributes(serialize($object->getActions()->asArray())) ); if (count($ruleProductAttributes)) { $this->setActualProductAttributes($object, $ruleProductAttributes); } // Update auto geterated specific coupons if exists if ($object->getUseAutoGeneration() && $object->hasDataChanges()) { Mage::getResourceModel('salesrule/coupon')->updateSpecificCoupons($object); } return parent::_afterSave($object); } /** * Retrieve coupon/rule uses for specified customer * * @param Mage_SalesRule_Model_Rule $rule * @param int $customerId * * @return string */ public function getCustomerUses($rule, $customerId) { $read = $this->_getReadAdapter(); $select = $read->select()->from($this->getTable('rule_customer'), array('cnt'=>'count(*)')) ->where('rule_id = :rule_id') ->where('customer_id = :customer_id'); return $read->fetchOne($select, array(':rule_id' => $rule->getRuleId(), ':customer_id' => $customerId)); } /** * Save rule labels for different store views * * @param int $ruleId * @param array $labels * * @return Mage_SalesRule_Model_Resource_Rule */ public function saveStoreLabels($ruleId, $labels) { $deleteByStoreIds = array(); $table = $this->getTable('salesrule/label'); $adapter = $this->_getWriteAdapter(); $data = array(); foreach ($labels as $storeId => $label) { if (Mage::helper('core/string')->strlen($label)) { $data[] = array('rule_id' => $ruleId, 'store_id' => $storeId, 'label' => $label); } else { $deleteByStoreIds[] = $storeId; } } $adapter->beginTransaction(); try { if (!empty($data)) { $adapter->insertOnDuplicate( $table, $data, array('label') ); } if (!empty($deleteByStoreIds)) { $adapter->delete($table, array( 'rule_id=?' => $ruleId, 'store_id IN (?)' => $deleteByStoreIds )); } } catch (Exception $e) { $adapter->rollback(); throw $e; } $adapter->commit(); return $this; } /** * Get all existing rule labels * * @param int $ruleId * @return array */ public function getStoreLabels($ruleId) { $select = $this->_getReadAdapter()->select() ->from($this->getTable('salesrule/label'), array('store_id', 'label')) ->where('rule_id = :rule_id'); return $this->_getReadAdapter()->fetchPairs($select, array(':rule_id' => $ruleId)); } /** * Get rule label by specific store id * * @param int $ruleId * @param int $storeId * @return string */ public function getStoreLabel($ruleId, $storeId) { $select = $this->_getReadAdapter()->select() ->from($this->getTable('salesrule/label'), 'label') ->where('rule_id = :rule_id') ->where('store_id IN(0, :store_id)') ->order('store_id DESC'); return $this->_getReadAdapter()->fetchOne($select, array(':rule_id' => $ruleId, ':store_id' => $storeId)); } /** * Return codes of all product attributes currently used in promo rules for specified customer group and website * * @param unknown_type $websiteId * @param unknown_type $customerGroupId * @return mixed */ public function getActiveAttributes($websiteId, $customerGroupId) { $read = $this->_getReadAdapter(); $select = $read->select() ->from(array('a' => $this->getTable('salesrule/product_attribute')), new Zend_Db_Expr('DISTINCT ea.attribute_code')) ->joinInner(array('ea' => $this->getTable('eav/attribute')), 'ea.attribute_id = a.attribute_id', array()); return $read->fetchAll($select); } /** * Save product attributes currently used in conditions and actions of rule * * @param Mage_SalesRule_Model_Rule $rule * @param mixed $attributes * @return Mage_SalesRule_Model_Resource_Rule */ public function setActualProductAttributes($rule, $attributes) { $write = $this->_getWriteAdapter(); $write->delete($this->getTable('salesrule/product_attribute'), array('rule_id=?' => $rule->getId())); //Getting attribute IDs for attribute codes $attributeIds = array(); $select = $this->_getReadAdapter()->select() ->from(array('a' => $this->getTable('eav/attribute')), array('a.attribute_id')) ->where('a.attribute_code IN (?)', array($attributes)); $attributesFound = $this->_getReadAdapter()->fetchAll($select); if ($attributesFound) { foreach ($attributesFound as $attribute) { $attributeIds[] = $attribute['attribute_id']; } $data = array(); foreach ($rule->getCustomerGroupIds() as $customerGroupId) { foreach ($rule->getWebsiteIds() as $websiteId) { foreach ($attributeIds as $attribute) { $data[] = array ( 'rule_id' => $rule->getId(), 'website_id' => $websiteId, 'customer_group_id' => $customerGroupId, 'attribute_id' => $attribute ); } } } $write->insertMultiple($this->getTable('salesrule/product_attribute'), $data); } return $this; } /** * Collect all product attributes used in serialized rule's action or condition * * @param string $serializedString * * @return array */ public function getProductAttributes($serializedString) { $result = array(); if (preg_match_all('~s:32:"salesrule/rule_condition_product";s:9:"attribute";s:\d+:"(.*?)"~s', $serializedString, $matches)){ foreach ($matches[1] as $offset => $attributeCode) { $result[] = $attributeCode; } } return $result; } }
{ "content_hash": "805cc0dcd6054d3a88b164b70de38697", "timestamp": "", "source": "github", "line_count": 295, "max_line_length": 118, "avg_line_length": 33.0135593220339, "alnum_prop": 0.54009651914981, "repo_name": "Louspirit/E-Commerce-Website", "id": "384bb1b9b36c1200a908afa7937a061a4d0fe4c4", "size": "10697", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "app/code/core/Mage/SalesRule/Model/Resource/Rule.php", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "19946" }, { "name": "CSS", "bytes": "1655613" }, { "name": "JavaScript", "bytes": "1036085" }, { "name": "PHP", "bytes": "44369436" }, { "name": "PowerShell", "bytes": "1028" }, { "name": "Ruby", "bytes": "288" }, { "name": "Shell", "bytes": "1753" }, { "name": "XSLT", "bytes": "2066" } ], "symlink_target": "" }
title: TakeShape repo: date: 2018-06-30T02:53:36.000+00:00 weight: 3 description: 'A stress-free CMS, Instant API, and Static Site Generator to help you get your projects done.' tools: - "Content Management" - "Static Site Generator" license: Commercial data_model: API-based language: '' related_tools: [] tags: [] urls: website: https://www.takeshape.io/ github: '' twitter: https://twitter.com/TakeShapeCMS other: '' resources: [] cat_test: '' --- CMS, Instant API, and a Built-in static site generator with a flexible asset pipeline, local image proxy, and an easy-to-use templating language (Nunjucks). Powered by GraphQl
{ "content_hash": "9354e7f8eef47e348b3d67c70d455708", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 175, "avg_line_length": 26.583333333333332, "alnum_prop": 0.7304075235109718, "repo_name": "budparr/thenewdynamic", "id": "c7697d266f6df0c185868a6f257b4b79f31e91b3", "size": "642", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "content/tool/takeshape.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "79576" }, { "name": "HTML", "bytes": "60524" }, { "name": "JavaScript", "bytes": "2712" }, { "name": "Ruby", "bytes": "3024" } ], "symlink_target": "" }
package server import "testing" import "time" import "sync" import "sync/atomic" func TestMain(t *testing.T) { var sent, processed uint64 wg := sync.WaitGroup{} process := func(w *Worker, p Job) error { atomic.AddUint64(&processed, 1) wg.Done() return nil } dispatcher := NewDispatcher(2, 10, process) dispatcher.Run() spam := func() { for i := 0; i < 5; i++ { atomic.AddUint64(&sent, 1) work := Job{Payload: Payload{"1"}} dispatcher.DispatchJob(&work) time.Sleep(time.Microsecond) } } wg.Add(10) go spam() go spam() wg.Wait() dispatcher.Stop() if sent != processed || sent != 10 { t.Errorf("not all jobs were processed %v jobs %v processed", sent, processed) } }
{ "content_hash": "1491e27298507303dfc3be9a644fb7bd", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 62, "avg_line_length": 16.651162790697676, "alnum_prop": 0.6396648044692738, "repo_name": "bgadrian/go-worker-thread-pool", "id": "b1c5e9299993e4ddb995b0f242e94f41eca4f060", "size": "716", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/dispatcher_test.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "7068" }, { "name": "HTML", "bytes": "954" }, { "name": "JavaScript", "bytes": "1917" } ], "symlink_target": "" }
// Copyright 2020 The Chromium Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.feed; import org.chromium.chrome.browser.feed.ScrollListener.ScrollState; import org.chromium.components.feature_engagement.FeatureConstants; import org.chromium.components.feature_engagement.Tracker; import org.chromium.components.feature_engagement.TriggerState; /** * Creates a ScrollListener that triggers the menu IPH. The listener removes itself from the * list of observers when the IPH is determined to be already triggered. * * Triggering the IPH is based on (1) the fraction of scroll done on the stream proportionally * to its height, (2) the transition fraction of the top search bar, and (3) the position of the * menu button in the stream. * * We want the IPH to be triggered when the section header is properly positioned in the stream * which has to meet the following conditions: (1) the IPH popup won't interfere with the search * bar at the top of the NTP, (2) the user has scrolled down a bit because they want to look at * the feed, and (3) the feed header with its menu button is high enough in the stream to have * the feed visible. The goal of conditions (2) and (3) is to show the IPH when the signals are * that the user wants to interact with the feed are strong. */ public class HeaderIphScrollListener implements ScrollListener { private static final float MIN_SCROLL_FRACTION = 0.1f; private static final float MAX_HEADER_POS_FRACTION = 0.35f; private final FeedBubbleDelegate mDelegate; private final ScrollableContainerDelegate mScrollableContainerDelegate; private final Runnable mShowIPHRunnable; private float mMinScrollFraction; private float mHeaderMaxPosFraction; HeaderIphScrollListener(FeedBubbleDelegate delegate, ScrollableContainerDelegate scrollableContainerDelegate, Runnable showIPHRunnable) { mDelegate = delegate; mScrollableContainerDelegate = scrollableContainerDelegate; mShowIPHRunnable = showIPHRunnable; mMinScrollFraction = MIN_SCROLL_FRACTION; mHeaderMaxPosFraction = MAX_HEADER_POS_FRACTION; } @Override public void onScrollStateChanged(@ScrollState int state) { if (state != ScrollState.IDLE) return; maybeTriggerIPH(mScrollableContainerDelegate.getVerticalScrollOffset()); } @Override public void onScrolled(int dx, int dy) {} @Override public void onHeaderOffsetChanged(int verticalOffset) { if (verticalOffset == 0) return; // Negate the vertical offset because it is inversely proportional to the scroll offset. // For example, a header verical offset of -50px corresponds to a scroll offset of 50px. maybeTriggerIPH(-verticalOffset); } private void maybeTriggerIPH(int verticalScrollOffset) { // Get the feature tracker for the IPH and determine whether to show the IPH. final String featureForIph = FeatureConstants.FEED_HEADER_MENU_FEATURE; final Tracker tracker = mDelegate.getFeatureEngagementTracker(); // Stop listening to scroll if the IPH was already displayed in the past. if (tracker.getTriggerState(featureForIph) == TriggerState.HAS_BEEN_DISPLAYED) { mScrollableContainerDelegate.removeScrollListener(this); return; } // Check whether the feed is expanded. if (!mDelegate.isFeedExpanded()) return; // Check whether the user is signed in. if (!mDelegate.isSignedIn()) return; // Check that enough scrolling was done proportionally to the stream height. if ((float) verticalScrollOffset < (float) mScrollableContainerDelegate.getRootViewHeight() * mMinScrollFraction) { return; } // Check that the feed header is well positioned in the recycler view to show the IPH. if (!mDelegate.isFeedHeaderPositionInContainerSuitableForIPH(mHeaderMaxPosFraction)) { return; } mShowIPHRunnable.run(); } }
{ "content_hash": "e06df7a6728cdacdd103aa34b8db5486", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 98, "avg_line_length": 43.25, "alnum_prop": 0.726878612716763, "repo_name": "chromium/chromium", "id": "13093f0fc3d33e04de5442da5143c445b8ab5f74", "size": "4152", "binary": false, "copies": "6", "ref": "refs/heads/main", "path": "chrome/browser/feed/android/java/src/org/chromium/chrome/browser/feed/HeaderIphScrollListener.java", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>basic_deadline_timer::cancel_one (2 of 2 overloads)</title> <link rel="stylesheet" href="../../../../boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.75.2"> <link rel="home" href="../../../../index.html" title="Asio"> <link rel="up" href="../cancel_one.html" title="basic_deadline_timer::cancel_one"> <link rel="prev" href="overload1.html" title="basic_deadline_timer::cancel_one (1 of 2 overloads)"> <link rel="next" href="../duration_type.html" title="basic_deadline_timer::duration_type"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr><td valign="top"><img alt="asio C++ library" width="250" height="60" src="../../../../asio.png"></td></tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="overload1.html"><img src="../../../../prev.png" alt="Prev"></a><a accesskey="u" href="../cancel_one.html"><img src="../../../../up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../home.png" alt="Home"></a><a accesskey="n" href="../duration_type.html"><img src="../../../../next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h5 class="title"> <a name="asio.reference.basic_deadline_timer.cancel_one.overload2"></a><a class="link" href="overload2.html" title="basic_deadline_timer::cancel_one (2 of 2 overloads)">basic_deadline_timer::cancel_one (2 of 2 overloads)</a> </h5></div></div></div> <p> Cancels one asynchronous operation that is waiting on the timer. </p> <pre class="programlisting"><span class="identifier">std</span><span class="special">::</span><span class="identifier">size_t</span> <span class="identifier">cancel_one</span><span class="special">(</span> <span class="identifier">asio</span><span class="special">::</span><span class="identifier">error_code</span> <span class="special">&amp;</span> <span class="identifier">ec</span><span class="special">);</span> </pre> <p> This function forces the completion of one pending asynchronous wait operation against the timer. Handlers are cancelled in FIFO order. The handler for the cancelled operation will be invoked with the <code class="computeroutput"><span class="identifier">asio</span><span class="special">::</span><span class="identifier">error</span><span class="special">::</span><span class="identifier">operation_aborted</span></code> error code. </p> <p> Cancelling the timer does not change the expiry time. </p> <h6> <a name="asio.reference.basic_deadline_timer.cancel_one.overload2.h0"></a> <span><a name="asio.reference.basic_deadline_timer.cancel_one.overload2.parameters"></a></span><a class="link" href="overload2.html#asio.reference.basic_deadline_timer.cancel_one.overload2.parameters">Parameters</a> </h6> <div class="variablelist"> <p class="title"><b></b></p> <dl> <dt><span class="term">ec</span></dt> <dd><p> Set to indicate what error occurred, if any. </p></dd> </dl> </div> <h6> <a name="asio.reference.basic_deadline_timer.cancel_one.overload2.h1"></a> <span><a name="asio.reference.basic_deadline_timer.cancel_one.overload2.return_value"></a></span><a class="link" href="overload2.html#asio.reference.basic_deadline_timer.cancel_one.overload2.return_value">Return Value</a> </h6> <p> The number of asynchronous operations that were cancelled. That is, either 0 or 1. </p> <h6> <a name="asio.reference.basic_deadline_timer.cancel_one.overload2.h2"></a> <span><a name="asio.reference.basic_deadline_timer.cancel_one.overload2.remarks"></a></span><a class="link" href="overload2.html#asio.reference.basic_deadline_timer.cancel_one.overload2.remarks">Remarks</a> </h6> <p> If the timer has already expired when <code class="computeroutput"><span class="identifier">cancel_one</span><span class="special">()</span></code> is called, then the handlers for asynchronous wait operations will: </p> <div class="itemizedlist"><ul class="itemizedlist" type="disc"> <li class="listitem"> have already been invoked; or </li> <li class="listitem"> have been queued for invocation in the near future. </li> </ul></div> <p> These handlers can no longer be cancelled, and therefore are passed an error code that indicates the successful completion of the wait operation. </p> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2015 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="overload1.html"><img src="../../../../prev.png" alt="Prev"></a><a accesskey="u" href="../cancel_one.html"><img src="../../../../up.png" alt="Up"></a><a accesskey="h" href="../../../../index.html"><img src="../../../../home.png" alt="Home"></a><a accesskey="n" href="../duration_type.html"><img src="../../../../next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "cc71e721bd9d4e8b90a48b7e5b42a055", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 359, "avg_line_length": 60.06382978723404, "alnum_prop": 0.6409847679773291, "repo_name": "jeanleflambeur/silkopter", "id": "d4fcb3fc536ce60de4960b8645cf81255e667140", "size": "5646", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "asio/doc/asio/reference/basic_deadline_timer/cancel_one/overload2.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "110" }, { "name": "C", "bytes": "2221045" }, { "name": "C++", "bytes": "24409548" }, { "name": "CMake", "bytes": "216809" }, { "name": "CSS", "bytes": "34635" }, { "name": "Cuda", "bytes": "4881" }, { "name": "Fortran", "bytes": "1315584" }, { "name": "HTML", "bytes": "18916392" }, { "name": "JavaScript", "bytes": "7839" }, { "name": "Lex", "bytes": "3749" }, { "name": "Lua", "bytes": "3762" }, { "name": "M4", "bytes": "9302" }, { "name": "Objective-C", "bytes": "2096" }, { "name": "Objective-C++", "bytes": "168" }, { "name": "Perl", "bytes": "6547" }, { "name": "Python", "bytes": "8937" }, { "name": "QML", "bytes": "10680" }, { "name": "QMake", "bytes": "101603" }, { "name": "Shell", "bytes": "73534" } ], "symlink_target": "" }
(function () { 'use strict'; describe('Serviceproviders List Controller Tests', function () { // Initialize global variables var ServiceprovidersListController, $scope, $httpBackend, $state, Authentication, ServiceprovidersService, mockServiceprovider; // The $resource service augments the response object with methods for updating and deleting the resource. // If we were to use the standard toEqual matcher, our tests would fail because the test values would not match // the responses exactly. To solve the problem, we define a new toEqualData Jasmine matcher. // When the toEqualData matcher compares two objects, it takes only object properties into // account and ignores methods. beforeEach(function () { jasmine.addMatchers({ toEqualData: function (util, customEqualityTesters) { return { compare: function (actual, expected) { return { pass: angular.equals(actual, expected) }; } }; } }); }); // Then we can start by loading the main application module beforeEach(module(ApplicationConfiguration.applicationModuleName)); // The injector ignores leading and trailing underscores here (i.e. _$httpBackend_). // This allows us to inject a service but then attach it to a variable // with the same name as the service. beforeEach(inject(function ($controller, $rootScope, _$state_, _$httpBackend_, _Authentication_, _ServiceprovidersService_) { // Set a new global scope $scope = $rootScope.$new(); // Point global variables to injected services $httpBackend = _$httpBackend_; $state = _$state_; Authentication = _Authentication_; ServiceprovidersService = _ServiceprovidersService_; // create mock article mockServiceprovider = new ServiceprovidersService({ _id: '525a8422f6d0f87f0e407a33', name: 'Serviceprovider Name' }); // Mock logged in user Authentication.user = { roles: ['user'] }; // Initialize the Serviceproviders List controller. ServiceprovidersListController = $controller('ServiceprovidersListController as vm', { $scope: $scope }); // Spy on state go spyOn($state, 'go'); })); describe('Instantiate', function () { var mockServiceproviderList; beforeEach(function () { mockServiceproviderList = [mockServiceprovider, mockServiceprovider]; }); it('should send a GET request and return all Serviceproviders', inject(function (ServiceprovidersService) { // Set POST response $httpBackend.expectGET('api/serviceproviders').respond(mockServiceproviderList); $httpBackend.flush(); // Test form inputs are reset expect($scope.vm.serviceproviders.length).toEqual(2); expect($scope.vm.serviceproviders[0]).toEqual(mockServiceprovider); expect($scope.vm.serviceproviders[1]).toEqual(mockServiceprovider); })); }); }); }());
{ "content_hash": "aba84872ced7f55bf39a6e4c42d3d246", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 129, "avg_line_length": 34.252747252747255, "alnum_prop": 0.6515880654475458, "repo_name": "marreo/cliguru", "id": "8fb1bd9598859992a806620a33054c4d41d70fa9", "size": "3117", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/serviceproviders/tests/client/list-serviceproviders.client.controller.tests.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2155" }, { "name": "HTML", "bytes": "38039" }, { "name": "JavaScript", "bytes": "339769" }, { "name": "Shell", "bytes": "685" } ], "symlink_target": "" }
package org.xdi.model.custom.script.type.client; import java.util.Map; import org.xdi.model.SimpleCustomProperty; /** * Dummy implementation of interface ClientRegistrationType * * @author Yuriy Movchan Date: 11/11/2014 */ public class DummyClientRegistrationType implements ClientRegistrationType { @Override public boolean init(Map<String, SimpleCustomProperty> configurationAttributes) { return true; } @Override public boolean destroy(Map<String, SimpleCustomProperty> configurationAttributes) { return true; } @Override public int getApiVersion() { return 1; } @Override public boolean updateClient(Object registerRequest, Object client, Map<String, SimpleCustomProperty> configurationAttributes) { return false; } }
{ "content_hash": "8c50aa677f3cb3753d5309dd2f579ef6", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 131, "avg_line_length": 23.941176470588236, "alnum_prop": 0.7235872235872236, "repo_name": "madumlao/oxCore", "id": "1b0f7a5c7ac528b2a2064b390c80f8fe273a4a36", "size": "958", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "oxService/src/main/java/org/xdi/model/custom/script/type/client/DummyClientRegistrationType.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "1014370" } ], "symlink_target": "" }
module Fog module Compute class AWS class Real require 'fog/aws/parsers/compute/create_image' # Create a bootable EBS volume AMI # # ==== Parameters # * instance_id<~String> - Instance used to create image. # * name<~Name> - Name to give image. # * description<~Name> - Description of image. # * no_reboot<~Boolean> - Optional, whether or not to reboot the image when making the snapshot # # ==== Returns # * response<~Excon::Response>: # * body<~Hash>: # * 'imageId'<~String> - The ID of the created AMI. # * 'requestId'<~String> - Id of request. # # {Amazon API Reference}[http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/ApiReference-query-CreateImage.html] def create_image(instance_id, name, description, no_reboot = false, options={}) params = {} block_device_mappings = options[:block_device_mappings] || [] params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.DeviceName', block_device_mappings.map{|mapping| mapping['DeviceName']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.NoDevice', block_device_mappings.map{|mapping| mapping['NoDevice']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.VirtualName', block_device_mappings.map{|mapping| mapping['VirtualName']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.SnapshotId', block_device_mappings.map{|mapping| mapping['Ebs.SnapshotId']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.DeleteOnTermination', block_device_mappings.map{|mapping| mapping['Ebs.DeleteOnTermination']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.VolumeType', block_device_mappings.map{|mapping| mapping['Ebs.VolumeType']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.Iops', block_device_mappings.map{|mapping| mapping['Ebs.Iops']}) params.reject!{|k,v| v.nil?} request({ 'Action' => 'CreateImage', 'InstanceId' => instance_id, 'Name' => name, 'Description' => description, 'NoReboot' => no_reboot.to_s, :parser => Fog::Parsers::Compute::AWS::CreateImage.new }.merge!(params)) end end class Mock # Usage # # AWS[:compute].create_image("i-ac65ee8c", "test", "something") # def create_image(instance_id, name, description, no_reboot = false, options = {}) params = {} block_device_mappings = options[:block_device_mappings] || [] params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.DeviceName', block_device_mappings.map{|mapping| mapping['DeviceName']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.NoDevice', block_device_mappings.map{|mapping| mapping['NoDevice']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.VirtualName', block_device_mappings.map{|mapping| mapping['VirtualName']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.SnapshotId', block_device_mappings.map{|mapping| mapping['Ebs.SnapshotId']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.DeleteOnTermination', block_device_mappings.map{|mapping| mapping['Ebs.DeleteOnTermination']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.VolumeType', block_device_mappings.map{|mapping| mapping['Ebs.VolumeType']}) params.merge!Fog::AWS.indexed_param('BlockDeviceMapping.%d.Ebs.Iops', block_device_mappings.map{|mapping| mapping['Ebs.Iops']}) params.reject!{|k,v| v.nil?} reserved_ebs_root_device = '/dev/sda1' block_devices = options.delete(:block_device_mappings) || [] register_image_response = register_image(name, description, reserved_ebs_root_device, block_devices, options) response = Excon::Response.new if instance_id && !name.empty? response.status = 200 response.body = { 'requestId' => Fog::AWS::Mock.request_id, 'imageId' => register_image_response.body['imageId'] } else response.status = 400 response.body = { 'Code' => 'InvalidParameterValue' } if name.empty? response.body['Message'] = "Invalid value '' for name. Must be specified." end end response end end end end end
{ "content_hash": "261c1f8e3fdc8d157ece977276f35aeb", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 167, "avg_line_length": 51.51086956521739, "alnum_prop": 0.6136315678413168, "repo_name": "jreichhold/chef-repo", "id": "6a2f5d14db90058bf534a1af99663971d4e81524", "size": "4739", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "vendor/ruby/2.0.0/gems/fog-1.20.0/lib/fog/aws/requests/compute/create_image.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "33062" } ], "symlink_target": "" }
immunization-backend ==================== Immunization Backend - JSON RESTful Web API w/ Express.js, MongoDB 2013 Bill &amp; Melinda Gates Immunization App [Test Deployment](http://immunization-api.herokuapp.com/) [See the iPad app that uses this API!](http://www.youtube.com/watch?v=k--setjDyEQ) ## Backend API ### Fields in Sample Patient Data * firstName * middleName * lastName * fatherFullName * motherFullName * motherMaidenName * birthYear * birthMonth * birthDay * gender * contactPhone * contactEmail * contactStreetAddress * contactCity * contactState * contactZip * contactCountry * picture * bloodType * alergies * diseaseHistory * notes ### /login (Post) * Request: * username * password * Response: * status: "success" or "failure" * firstName: < firstName > ### /logout (Post) * Response: * status: "success" or "failure" ### /patients (Get) Retrieves all patients * Response: * < array of all patients > ### /patients/:id (Get) Finds a particular patient * Response: * if patient found: < patient > * if patient not found: status: "failure" ### /patients/:id (Post) Updates a patient's info * Request: < fields to change > * Response: * status: "success" or "failure" ### /search (Post) * Request: * < fields to match on > * Response list: * < array of matched patients > ### /populate (Get) * Reload the database in case something bad happened. Note, no authentication is required for this: the database can be reset with a web browser. ### /email/:id (Get) * Response: * status: "success" or "failure"
{ "content_hash": "6a07774909d634aa8d58fd07850f18ba", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 145, "avg_line_length": 19.772151898734176, "alnum_prop": 0.6901408450704225, "repo_name": "errantmind/immunization-backend", "id": "f001a01ffadd971ae72cbbd0256bce3e15d2dc7f", "size": "1562", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "18848" } ], "symlink_target": "" }
#include <linux/fs.h> #include <linux/mount.h> #include <linux/compat.h> #include <cluster/masklog.h> #include "ocfs2.h" #include "alloc.h" #include "dlmglue.h" #include "file.h" #include "inode.h" #include "journal.h" #include "ocfs2_fs.h" #include "ioctl.h" #include "resize.h" #include "refcounttree.h" #include "sysfile.h" #include "dir.h" #include "buffer_head_io.h" #include "suballoc.h" #include "move_extents.h" #define o2info_from_user(a, b) \ copy_from_user(&(a), (b), sizeof(a)) #define o2info_to_user(a, b) \ copy_to_user((typeof(a) __user *)b, &(a), sizeof(a)) /* * This call is void because we are already reporting an error that may * be -EFAULT. The error will be returned from the ioctl(2) call. It's * just a best-effort to tell userspace that this request caused the error. */ static inline void o2info_set_request_error(struct ocfs2_info_request *kreq, struct ocfs2_info_request __user *req) { kreq->ir_flags |= OCFS2_INFO_FL_ERROR; (void)put_user(kreq->ir_flags, (__u32 __user *)&(req->ir_flags)); } static inline void o2info_set_request_filled(struct ocfs2_info_request *req) { req->ir_flags |= OCFS2_INFO_FL_FILLED; } static inline void o2info_clear_request_filled(struct ocfs2_info_request *req) { req->ir_flags &= ~OCFS2_INFO_FL_FILLED; } static inline int o2info_coherent(struct ocfs2_info_request *req) { return (!(req->ir_flags & OCFS2_INFO_FL_NON_COHERENT)); } static int ocfs2_get_inode_attr(struct inode *inode, unsigned *flags) { int status; status = ocfs2_inode_lock(inode, NULL, 0); if (status < 0) { mlog_errno(status); return status; } ocfs2_get_inode_flags(OCFS2_I(inode)); *flags = OCFS2_I(inode)->ip_attr; ocfs2_inode_unlock(inode, 0); return status; } static int ocfs2_set_inode_attr(struct inode *inode, unsigned flags, unsigned mask) { struct ocfs2_inode_info *ocfs2_inode = OCFS2_I(inode); struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); handle_t *handle = NULL; struct buffer_head *bh = NULL; unsigned oldflags; int status; mutex_lock(&inode->i_mutex); status = ocfs2_inode_lock(inode, &bh, 1); if (status < 0) { mlog_errno(status); goto bail; } status = -EACCES; if (!inode_owner_or_capable(inode)) goto bail_unlock; if (!S_ISDIR(inode->i_mode)) flags &= ~OCFS2_DIRSYNC_FL; oldflags = ocfs2_inode->ip_attr; flags = flags & mask; flags |= oldflags & ~mask; /* * The IMMUTABLE and APPEND_ONLY flags can only be changed by * the relevant capability. */ status = -EPERM; if ((oldflags & OCFS2_IMMUTABLE_FL) || ((flags ^ oldflags) & (OCFS2_APPEND_FL | OCFS2_IMMUTABLE_FL))) { if (!capable(CAP_LINUX_IMMUTABLE)) goto bail_unlock; } handle = ocfs2_start_trans(osb, OCFS2_INODE_UPDATE_CREDITS); if (IS_ERR(handle)) { status = PTR_ERR(handle); mlog_errno(status); goto bail_unlock; } ocfs2_inode->ip_attr = flags; ocfs2_set_inode_flags(inode); status = ocfs2_mark_inode_dirty(handle, inode, bh); if (status < 0) mlog_errno(status); ocfs2_commit_trans(osb, handle); bail_unlock: ocfs2_inode_unlock(inode, 1); bail: mutex_unlock(&inode->i_mutex); brelse(bh); return status; } int ocfs2_info_handle_blocksize(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_blocksize oib; if (o2info_from_user(oib, req)) goto bail; oib.ib_blocksize = inode->i_sb->s_blocksize; o2info_set_request_filled(&oib.ib_req); if (o2info_to_user(oib, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oib.ib_req, req); return status; } int ocfs2_info_handle_clustersize(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_clustersize oic; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oic, req)) goto bail; oic.ic_clustersize = osb->s_clustersize; o2info_set_request_filled(&oic.ic_req); if (o2info_to_user(oic, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oic.ic_req, req); return status; } int ocfs2_info_handle_maxslots(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_maxslots oim; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oim, req)) goto bail; oim.im_max_slots = osb->max_slots; o2info_set_request_filled(&oim.im_req); if (o2info_to_user(oim, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oim.im_req, req); return status; } int ocfs2_info_handle_label(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_label oil; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oil, req)) goto bail; memcpy(oil.il_label, osb->vol_label, OCFS2_MAX_VOL_LABEL_LEN); o2info_set_request_filled(&oil.il_req); if (o2info_to_user(oil, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oil.il_req, req); return status; } int ocfs2_info_handle_uuid(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_uuid oiu; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oiu, req)) goto bail; memcpy(oiu.iu_uuid_str, osb->uuid_str, OCFS2_TEXT_UUID_LEN + 1); o2info_set_request_filled(&oiu.iu_req); if (o2info_to_user(oiu, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oiu.iu_req, req); return status; } int ocfs2_info_handle_fs_features(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_fs_features oif; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oif, req)) goto bail; oif.if_compat_features = osb->s_feature_compat; oif.if_incompat_features = osb->s_feature_incompat; oif.if_ro_compat_features = osb->s_feature_ro_compat; o2info_set_request_filled(&oif.if_req); if (o2info_to_user(oif, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oif.if_req, req); return status; } int ocfs2_info_handle_journal_size(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_journal_size oij; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); if (o2info_from_user(oij, req)) goto bail; oij.ij_journal_size = osb->journal->j_inode->i_size; o2info_set_request_filled(&oij.ij_req); if (o2info_to_user(oij, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oij.ij_req, req); return status; } int ocfs2_info_scan_inode_alloc(struct ocfs2_super *osb, struct inode *inode_alloc, u64 blkno, struct ocfs2_info_freeinode *fi, u32 slot) { int status = 0, unlock = 0; struct buffer_head *bh = NULL; struct ocfs2_dinode *dinode_alloc = NULL; if (inode_alloc) mutex_lock(&inode_alloc->i_mutex); if (o2info_coherent(&fi->ifi_req)) { status = ocfs2_inode_lock(inode_alloc, &bh, 0); if (status < 0) { mlog_errno(status); goto bail; } unlock = 1; } else { status = ocfs2_read_blocks_sync(osb, blkno, 1, &bh); if (status < 0) { mlog_errno(status); goto bail; } } dinode_alloc = (struct ocfs2_dinode *)bh->b_data; fi->ifi_stat[slot].lfi_total = le32_to_cpu(dinode_alloc->id1.bitmap1.i_total); fi->ifi_stat[slot].lfi_free = le32_to_cpu(dinode_alloc->id1.bitmap1.i_total) - le32_to_cpu(dinode_alloc->id1.bitmap1.i_used); bail: if (unlock) ocfs2_inode_unlock(inode_alloc, 0); if (inode_alloc) mutex_unlock(&inode_alloc->i_mutex); brelse(bh); return status; } int ocfs2_info_handle_freeinode(struct inode *inode, struct ocfs2_info_request __user *req) { u32 i; u64 blkno = -1; char namebuf[40]; int status = -EFAULT, type = INODE_ALLOC_SYSTEM_INODE; struct ocfs2_info_freeinode *oifi = NULL; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); struct inode *inode_alloc = NULL; oifi = kzalloc(sizeof(struct ocfs2_info_freeinode), GFP_KERNEL); if (!oifi) { status = -ENOMEM; mlog_errno(status); goto out_err; } if (o2info_from_user(*oifi, req)) goto bail; oifi->ifi_slotnum = osb->max_slots; for (i = 0; i < oifi->ifi_slotnum; i++) { if (o2info_coherent(&oifi->ifi_req)) { inode_alloc = ocfs2_get_system_file_inode(osb, type, i); if (!inode_alloc) { mlog(ML_ERROR, "unable to get alloc inode in " "slot %u\n", i); status = -EIO; goto bail; } } else { ocfs2_sprintf_system_inode_name(namebuf, sizeof(namebuf), type, i); status = ocfs2_lookup_ino_from_name(osb->sys_root_inode, namebuf, strlen(namebuf), &blkno); if (status < 0) { status = -ENOENT; goto bail; } } status = ocfs2_info_scan_inode_alloc(osb, inode_alloc, blkno, oifi, i); if (status < 0) goto bail; iput(inode_alloc); inode_alloc = NULL; } o2info_set_request_filled(&oifi->ifi_req); if (o2info_to_user(*oifi, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oifi->ifi_req, req); kfree(oifi); out_err: return status; } static void o2ffg_update_histogram(struct ocfs2_info_free_chunk_list *hist, unsigned int chunksize) { int index; index = __ilog2_u32(chunksize); if (index >= OCFS2_INFO_MAX_HIST) index = OCFS2_INFO_MAX_HIST - 1; hist->fc_chunks[index]++; hist->fc_clusters[index] += chunksize; } static void o2ffg_update_stats(struct ocfs2_info_freefrag_stats *stats, unsigned int chunksize) { if (chunksize > stats->ffs_max) stats->ffs_max = chunksize; if (chunksize < stats->ffs_min) stats->ffs_min = chunksize; stats->ffs_avg += chunksize; stats->ffs_free_chunks_real++; } void ocfs2_info_update_ffg(struct ocfs2_info_freefrag *ffg, unsigned int chunksize) { o2ffg_update_histogram(&(ffg->iff_ffs.ffs_fc_hist), chunksize); o2ffg_update_stats(&(ffg->iff_ffs), chunksize); } int ocfs2_info_freefrag_scan_chain(struct ocfs2_super *osb, struct inode *gb_inode, struct ocfs2_dinode *gb_dinode, struct ocfs2_chain_rec *rec, struct ocfs2_info_freefrag *ffg, u32 chunks_in_group) { int status = 0, used; u64 blkno; struct buffer_head *bh = NULL; struct ocfs2_group_desc *bg = NULL; unsigned int max_bits, num_clusters; unsigned int offset = 0, cluster, chunk; unsigned int chunk_free, last_chunksize = 0; if (!le32_to_cpu(rec->c_free)) goto bail; do { if (!bg) blkno = le64_to_cpu(rec->c_blkno); else blkno = le64_to_cpu(bg->bg_next_group); if (bh) { brelse(bh); bh = NULL; } if (o2info_coherent(&ffg->iff_req)) status = ocfs2_read_group_descriptor(gb_inode, gb_dinode, blkno, &bh); else status = ocfs2_read_blocks_sync(osb, blkno, 1, &bh); if (status < 0) { mlog(ML_ERROR, "Can't read the group descriptor # " "%llu from device.", (unsigned long long)blkno); status = -EIO; goto bail; } bg = (struct ocfs2_group_desc *)bh->b_data; if (!le16_to_cpu(bg->bg_free_bits_count)) continue; max_bits = le16_to_cpu(bg->bg_bits); offset = 0; for (chunk = 0; chunk < chunks_in_group; chunk++) { /* * last chunk may be not an entire one. */ if ((offset + ffg->iff_chunksize) > max_bits) num_clusters = max_bits - offset; else num_clusters = ffg->iff_chunksize; chunk_free = 0; for (cluster = 0; cluster < num_clusters; cluster++) { used = ocfs2_test_bit(offset, (unsigned long *)bg->bg_bitmap); /* * - chunk_free counts free clusters in #N chunk. * - last_chunksize records the size(in) clusters * for the last real free chunk being counted. */ if (!used) { last_chunksize++; chunk_free++; } if (used && last_chunksize) { ocfs2_info_update_ffg(ffg, last_chunksize); last_chunksize = 0; } offset++; } if (chunk_free == ffg->iff_chunksize) ffg->iff_ffs.ffs_free_chunks++; } /* * need to update the info for last free chunk. */ if (last_chunksize) ocfs2_info_update_ffg(ffg, last_chunksize); } while (le64_to_cpu(bg->bg_next_group)); bail: brelse(bh); return status; } int ocfs2_info_freefrag_scan_bitmap(struct ocfs2_super *osb, struct inode *gb_inode, u64 blkno, struct ocfs2_info_freefrag *ffg) { u32 chunks_in_group; int status = 0, unlock = 0, i; struct buffer_head *bh = NULL; struct ocfs2_chain_list *cl = NULL; struct ocfs2_chain_rec *rec = NULL; struct ocfs2_dinode *gb_dinode = NULL; if (gb_inode) mutex_lock(&gb_inode->i_mutex); if (o2info_coherent(&ffg->iff_req)) { status = ocfs2_inode_lock(gb_inode, &bh, 0); if (status < 0) { mlog_errno(status); goto bail; } unlock = 1; } else { status = ocfs2_read_blocks_sync(osb, blkno, 1, &bh); if (status < 0) { mlog_errno(status); goto bail; } } gb_dinode = (struct ocfs2_dinode *)bh->b_data; cl = &(gb_dinode->id2.i_chain); /* * Chunksize(in) clusters from userspace should be * less than clusters in a group. */ if (ffg->iff_chunksize > le16_to_cpu(cl->cl_cpg)) { status = -EINVAL; goto bail; } memset(&ffg->iff_ffs, 0, sizeof(struct ocfs2_info_freefrag_stats)); ffg->iff_ffs.ffs_min = ~0U; ffg->iff_ffs.ffs_clusters = le32_to_cpu(gb_dinode->id1.bitmap1.i_total); ffg->iff_ffs.ffs_free_clusters = ffg->iff_ffs.ffs_clusters - le32_to_cpu(gb_dinode->id1.bitmap1.i_used); chunks_in_group = le16_to_cpu(cl->cl_cpg) / ffg->iff_chunksize + 1; for (i = 0; i < le16_to_cpu(cl->cl_next_free_rec); i++) { rec = &(cl->cl_recs[i]); status = ocfs2_info_freefrag_scan_chain(osb, gb_inode, gb_dinode, rec, ffg, chunks_in_group); if (status) goto bail; } if (ffg->iff_ffs.ffs_free_chunks_real) ffg->iff_ffs.ffs_avg = (ffg->iff_ffs.ffs_avg / ffg->iff_ffs.ffs_free_chunks_real); bail: if (unlock) ocfs2_inode_unlock(gb_inode, 0); if (gb_inode) mutex_unlock(&gb_inode->i_mutex); if (gb_inode) iput(gb_inode); brelse(bh); return status; } int ocfs2_info_handle_freefrag(struct inode *inode, struct ocfs2_info_request __user *req) { u64 blkno = -1; char namebuf[40]; int status = -EFAULT, type = GLOBAL_BITMAP_SYSTEM_INODE; struct ocfs2_info_freefrag *oiff; struct ocfs2_super *osb = OCFS2_SB(inode->i_sb); struct inode *gb_inode = NULL; oiff = kzalloc(sizeof(struct ocfs2_info_freefrag), GFP_KERNEL); if (!oiff) { status = -ENOMEM; mlog_errno(status); goto out_err; } if (o2info_from_user(*oiff, req)) goto bail; /* * chunksize from userspace should be power of 2. */ if ((oiff->iff_chunksize & (oiff->iff_chunksize - 1)) || (!oiff->iff_chunksize)) { status = -EINVAL; goto bail; } if (o2info_coherent(&oiff->iff_req)) { gb_inode = ocfs2_get_system_file_inode(osb, type, OCFS2_INVALID_SLOT); if (!gb_inode) { mlog(ML_ERROR, "unable to get global_bitmap inode\n"); status = -EIO; goto bail; } } else { ocfs2_sprintf_system_inode_name(namebuf, sizeof(namebuf), type, OCFS2_INVALID_SLOT); status = ocfs2_lookup_ino_from_name(osb->sys_root_inode, namebuf, strlen(namebuf), &blkno); if (status < 0) { status = -ENOENT; goto bail; } } status = ocfs2_info_freefrag_scan_bitmap(osb, gb_inode, blkno, oiff); if (status < 0) goto bail; o2info_set_request_filled(&oiff->iff_req); if (o2info_to_user(*oiff, req)) { status = -EFAULT; goto bail; } status = 0; bail: if (status) o2info_set_request_error(&oiff->iff_req, req); kfree(oiff); out_err: return status; } int ocfs2_info_handle_unknown(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_request oir; if (o2info_from_user(oir, req)) goto bail; o2info_clear_request_filled(&oir); if (o2info_to_user(oir, req)) goto bail; status = 0; bail: if (status) o2info_set_request_error(&oir, req); return status; } /* * Validate and distinguish OCFS2_IOC_INFO requests. * * - validate the magic number. * - distinguish different requests. * - validate size of different requests. */ int ocfs2_info_handle_request(struct inode *inode, struct ocfs2_info_request __user *req) { int status = -EFAULT; struct ocfs2_info_request oir; if (o2info_from_user(oir, req)) goto bail; status = -EINVAL; if (oir.ir_magic != OCFS2_INFO_MAGIC) goto bail; switch (oir.ir_code) { case OCFS2_INFO_BLOCKSIZE: if (oir.ir_size == sizeof(struct ocfs2_info_blocksize)) status = ocfs2_info_handle_blocksize(inode, req); break; case OCFS2_INFO_CLUSTERSIZE: if (oir.ir_size == sizeof(struct ocfs2_info_clustersize)) status = ocfs2_info_handle_clustersize(inode, req); break; case OCFS2_INFO_MAXSLOTS: if (oir.ir_size == sizeof(struct ocfs2_info_maxslots)) status = ocfs2_info_handle_maxslots(inode, req); break; case OCFS2_INFO_LABEL: if (oir.ir_size == sizeof(struct ocfs2_info_label)) status = ocfs2_info_handle_label(inode, req); break; case OCFS2_INFO_UUID: if (oir.ir_size == sizeof(struct ocfs2_info_uuid)) status = ocfs2_info_handle_uuid(inode, req); break; case OCFS2_INFO_FS_FEATURES: if (oir.ir_size == sizeof(struct ocfs2_info_fs_features)) status = ocfs2_info_handle_fs_features(inode, req); break; case OCFS2_INFO_JOURNAL_SIZE: if (oir.ir_size == sizeof(struct ocfs2_info_journal_size)) status = ocfs2_info_handle_journal_size(inode, req); break; case OCFS2_INFO_FREEINODE: if (oir.ir_size == sizeof(struct ocfs2_info_freeinode)) status = ocfs2_info_handle_freeinode(inode, req); break; case OCFS2_INFO_FREEFRAG: if (oir.ir_size == sizeof(struct ocfs2_info_freefrag)) status = ocfs2_info_handle_freefrag(inode, req); break; default: status = ocfs2_info_handle_unknown(inode, req); break; } bail: return status; } int ocfs2_get_request_ptr(struct ocfs2_info *info, int idx, u64 *req_addr, int compat_flag) { int status = -EFAULT; u64 __user *bp = NULL; if (compat_flag) { #ifdef CONFIG_COMPAT /* * pointer bp stores the base address of a pointers array, * which collects all addresses of separate request. */ bp = (u64 __user *)(unsigned long)compat_ptr(info->oi_requests); #else BUG(); #endif } else bp = (u64 __user *)(unsigned long)(info->oi_requests); if (o2info_from_user(*req_addr, bp + idx)) goto bail; status = 0; bail: return status; } /* * OCFS2_IOC_INFO handles an array of requests passed from userspace. * * ocfs2_info_handle() recevies a large info aggregation, grab and * validate the request count from header, then break it into small * pieces, later specific handlers can handle them one by one. * * Idea here is to make each separate request small enough to ensure * a better backward&forward compatibility, since a small piece of * request will be less likely to be broken if disk layout get changed. */ int ocfs2_info_handle(struct inode *inode, struct ocfs2_info *info, int compat_flag) { int i, status = 0; u64 req_addr; struct ocfs2_info_request __user *reqp; if ((info->oi_count > OCFS2_INFO_MAX_REQUEST) || (!info->oi_requests)) { status = -EINVAL; goto bail; } for (i = 0; i < info->oi_count; i++) { status = ocfs2_get_request_ptr(info, i, &req_addr, compat_flag); if (status) break; reqp = (struct ocfs2_info_request __user *)(unsigned long)req_addr; if (!reqp) { status = -EINVAL; goto bail; } status = ocfs2_info_handle_request(inode, reqp); if (status) break; } bail: return status; } long ocfs2_ioctl(struct file *filp, unsigned int cmd, unsigned long arg) { struct inode *inode = file_inode(filp); unsigned int flags; int new_clusters; int status; struct ocfs2_space_resv sr; struct ocfs2_new_group_input input; struct reflink_arguments args; const char __user *old_path; const char __user *new_path; bool preserve; struct ocfs2_info info; void __user *argp = (void __user *)arg; switch (cmd) { case OCFS2_IOC_GETFLAGS: status = ocfs2_get_inode_attr(inode, &flags); if (status < 0) return status; flags &= OCFS2_FL_VISIBLE; return put_user(flags, (int __user *) arg); case OCFS2_IOC_SETFLAGS: if (get_user(flags, (int __user *) arg)) return -EFAULT; status = mnt_want_write_file(filp); if (status) return status; status = ocfs2_set_inode_attr(inode, flags, OCFS2_FL_MODIFIABLE); mnt_drop_write_file(filp); return status; case OCFS2_IOC_RESVSP: case OCFS2_IOC_RESVSP64: case OCFS2_IOC_UNRESVSP: case OCFS2_IOC_UNRESVSP64: if (copy_from_user(&sr, (int __user *) arg, sizeof(sr))) return -EFAULT; return ocfs2_change_file_space(filp, cmd, &sr); case OCFS2_IOC_GROUP_EXTEND: if (!capable(CAP_SYS_RESOURCE)) return -EPERM; if (get_user(new_clusters, (int __user *)arg)) return -EFAULT; status = mnt_want_write_file(filp); if (status) return status; status = ocfs2_group_extend(inode, new_clusters); mnt_drop_write_file(filp); return status; case OCFS2_IOC_GROUP_ADD: case OCFS2_IOC_GROUP_ADD64: if (!capable(CAP_SYS_RESOURCE)) return -EPERM; if (copy_from_user(&input, (int __user *) arg, sizeof(input))) return -EFAULT; status = mnt_want_write_file(filp); if (status) return status; status = ocfs2_group_add(inode, &input); mnt_drop_write_file(filp); return status; case OCFS2_IOC_REFLINK: if (copy_from_user(&args, argp, sizeof(args))) return -EFAULT; old_path = (const char __user *)(unsigned long)args.old_path; new_path = (const char __user *)(unsigned long)args.new_path; preserve = (args.preserve != 0); return ocfs2_reflink_ioctl(inode, old_path, new_path, preserve); case OCFS2_IOC_INFO: if (copy_from_user(&info, argp, sizeof(struct ocfs2_info))) return -EFAULT; return ocfs2_info_handle(inode, &info, 0); case FITRIM: { struct super_block *sb = inode->i_sb; struct fstrim_range range; int ret = 0; if (!capable(CAP_SYS_ADMIN)) return -EPERM; if (copy_from_user(&range, argp, sizeof(range))) return -EFAULT; ret = ocfs2_trim_fs(sb, &range); if (ret < 0) return ret; if (copy_to_user(argp, &range, sizeof(range))) return -EFAULT; return 0; } case OCFS2_IOC_MOVE_EXT: return ocfs2_ioctl_move_extents(filp, argp); default: return -ENOTTY; } } #ifdef CONFIG_COMPAT long ocfs2_compat_ioctl(struct file *file, unsigned cmd, unsigned long arg) { bool preserve; struct reflink_arguments args; struct inode *inode = file_inode(file); struct ocfs2_info info; void __user *argp = (void __user *)arg; switch (cmd) { case OCFS2_IOC32_GETFLAGS: cmd = OCFS2_IOC_GETFLAGS; break; case OCFS2_IOC32_SETFLAGS: cmd = OCFS2_IOC_SETFLAGS; break; case OCFS2_IOC_RESVSP: case OCFS2_IOC_RESVSP64: case OCFS2_IOC_UNRESVSP: case OCFS2_IOC_UNRESVSP64: case OCFS2_IOC_GROUP_EXTEND: case OCFS2_IOC_GROUP_ADD: case OCFS2_IOC_GROUP_ADD64: case FITRIM: break; case OCFS2_IOC_REFLINK: if (copy_from_user(&args, argp, sizeof(args))) return -EFAULT; preserve = (args.preserve != 0); return ocfs2_reflink_ioctl(inode, compat_ptr(args.old_path), compat_ptr(args.new_path), preserve); case OCFS2_IOC_INFO: if (copy_from_user(&info, argp, sizeof(struct ocfs2_info))) return -EFAULT; return ocfs2_info_handle(inode, &info, 1); case OCFS2_IOC_MOVE_EXT: break; default: return -ENOIOCTLCMD; } return ocfs2_ioctl(file, cmd, arg); } #endif
{ "content_hash": "961a9bab803fda0c13f297fa51e54471", "timestamp": "", "source": "github", "line_count": 1034, "max_line_length": 78, "avg_line_length": 22.719535783365572, "alnum_prop": 0.6606078665077473, "repo_name": "ghostkim-sc/SMG920T_profiling_enabled", "id": "0c60ef2d8056ea4412e1338a3580f03acc72210a", "size": "23605", "binary": false, "copies": "2080", "ref": "refs/heads/master", "path": "fs/ocfs2/ioctl.c", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "4528" }, { "name": "Assembly", "bytes": "9791460" }, { "name": "Awk", "bytes": "18681" }, { "name": "C", "bytes": "518034272" }, { "name": "C++", "bytes": "13105745" }, { "name": "GDB", "bytes": "18113" }, { "name": "Lex", "bytes": "40805" }, { "name": "M4", "bytes": "3388" }, { "name": "Makefile", "bytes": "1522326" }, { "name": "Objective-C", "bytes": "1278363" }, { "name": "Perl", "bytes": "372361" }, { "name": "Python", "bytes": "22590" }, { "name": "Roff", "bytes": "22012" }, { "name": "Scilab", "bytes": "21433" }, { "name": "Shell", "bytes": "218756" }, { "name": "SourcePawn", "bytes": "2711" }, { "name": "Stata", "bytes": "4176" }, { "name": "UnrealScript", "bytes": "6113" }, { "name": "Yacc", "bytes": "83091" } ], "symlink_target": "" }
RSpec.describe ManageIQ::Providers::CloudManager::VmOrTemplate do describe "#all" do it "scopes" do vm = FactoryBot.create(:vm_openstack) t = FactoryBot.create(:template_openstack) FactoryBot.create(:vm_vmware) FactoryBot.create(:template_vmware) expect(described_class.all).to match_array([vm, t]) end end describe "#all_archived" do it "scopes" do ems = FactoryBot.create(:ems_openstack) vm = FactoryBot.create(:vm_openstack) t = FactoryBot.create(:template_openstack) # non archived FactoryBot.create(:vm_openstack, :ext_management_system => ems) FactoryBot.create(:template_openstack, :ext_management_system => ems) # non cloud FactoryBot.create(:vm_vmware) FactoryBot.create(:template_vmware) expect(described_class.archived).to match_array([vm, t]) end end let(:root_tenant) do Tenant.seed end let(:default_tenant) do root_tenant Tenant.default_tenant end describe "miq_group" do let(:user) { FactoryGirl.create(:user, :userid => 'user', :miq_groups => [tenant_group]) } let(:tenant) { FactoryGirl.build(:tenant, :parent => default_tenant) } let(:tenant_users) { FactoryGirl.create(:miq_user_role, :name => "tenant-users") } let(:tenant_group) { FactoryGirl.create(:miq_group, :miq_user_role => tenant_users, :tenant => tenant) } let(:cloud_template_1) { FactoryGirl.create(:class => "TemplateCloud") } it "finds correct tenant id clause when tenant has source_id" do User.current_user = user tenant.source_id = 1 expect(VmOrTemplate.tenant_id_clause(user)).to eql ["vms.template = true AND vms.tenant_id = (?) AND vms.publicly_available = false OR vms.template = false AND vms.tenant_id IN (?) OR vms.template = true AND vms.tenant_id IN (?) AND vms.type NOT IN (?) OR vms.template = true AND vms.publicly_available = true AND vms.type IN (?)", tenant.id, [tenant.id], [root_tenant.id, tenant.id], ["ManageIQ::Providers::Openstack::CloudManager::Template"], ["ManageIQ::Providers::Openstack::CloudManager::Template"]] end it "finds correct tenant id clause when tenant doesn't have source_id" do User.current_user = user expect(VmOrTemplate.tenant_id_clause(user)).to eql ["vms.template = true AND vms.tenant_id IN (?) OR vms.template = true AND vms.publicly_available = true AND vms.type IN (?) OR vms.template = false AND vms.tenant_id IN (?)", [root_tenant.id, tenant.id], ["ManageIQ::Providers::Openstack::CloudManager::Template"], [tenant.id]] end end end
{ "content_hash": "2579ae699ac2256069df8b0a1fb05e56", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 510, "avg_line_length": 46.357142857142854, "alnum_prop": 0.6744992295839753, "repo_name": "gmcculloug/manageiq", "id": "a25b32023f2ce2242dac7f7608b1ba7e80c51c8f", "size": "2596", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "spec/models/manageiq/providers/cloud_manager/vm_or_template_spec.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "3042" }, { "name": "Dockerfile", "bytes": "890" }, { "name": "HTML", "bytes": "2167" }, { "name": "JavaScript", "bytes": "183" }, { "name": "Ruby", "bytes": "8050979" }, { "name": "Shell", "bytes": "22723" } ], "symlink_target": "" }
Simple application built with Flask
{ "content_hash": "cc0bdc10b426ee56d6df4a08eb7705b2", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 35, "avg_line_length": 36, "alnum_prop": 0.8611111111111112, "repo_name": "rmotr/example-flask-app", "id": "6e8bd7f5233f921bc39ab0fb82af1748aaccbff1", "size": "56", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "919" }, { "name": "HTML", "bytes": "1699" }, { "name": "Python", "bytes": "2235" }, { "name": "Shell", "bytes": "46" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE124_Buffer_Underwrite__malloc_char_loop_42.c Label Definition File: CWE124_Buffer_Underwrite__malloc.label.xml Template File: sources-sink-42.tmpl.c */ /* * @description * CWE: 124 Buffer Underwrite * BadSource: Set data pointer to before the allocated memory buffer * GoodSource: Set data pointer to the allocated memory buffer * Sink: loop * BadSink : Copy string to data using a loop * Flow Variant: 42 Data flow: data returned from one function to another in the same source file * * */ #include "std_testcase.h" #include <wchar.h> #ifndef OMITBAD static char * badSource(char * data) { { char * dataBuffer = (char *)malloc(100*sizeof(char)); memset(dataBuffer, 'A', 100-1); dataBuffer[100-1] = '\0'; /* FLAW: Set data pointer to before the allocated memory buffer */ data = dataBuffer - 8; } return data; } void CWE124_Buffer_Underwrite__malloc_char_loop_42_bad() { char * data; data = NULL; data = badSource(data); { size_t i; char source[100]; memset(source, 'C', 100-1); /* fill with 'C's */ source[100-1] = '\0'; /* null terminate */ /* POTENTIAL FLAW: Possibly copying data to memory before the destination buffer */ for (i = 0; i < 100; i++) { data[i] = source[i]; } /* Ensure the destination buffer is null terminated */ data[100-1] = '\0'; printLine(data); /* INCIDENTAL CWE-401: Memory Leak - data may not point to location * returned by malloc() so can't safely call free() on it */ } } #endif /* OMITBAD */ #ifndef OMITGOOD static char * goodG2BSource(char * data) { { char * dataBuffer = (char *)malloc(100*sizeof(char)); memset(dataBuffer, 'A', 100-1); dataBuffer[100-1] = '\0'; /* FIX: Set data pointer to the allocated memory buffer */ data = dataBuffer; } return data; } /* goodG2B uses the GoodSource with the BadSink */ static void goodG2B() { char * data; data = NULL; data = goodG2BSource(data); { size_t i; char source[100]; memset(source, 'C', 100-1); /* fill with 'C's */ source[100-1] = '\0'; /* null terminate */ /* POTENTIAL FLAW: Possibly copying data to memory before the destination buffer */ for (i = 0; i < 100; i++) { data[i] = source[i]; } /* Ensure the destination buffer is null terminated */ data[100-1] = '\0'; printLine(data); /* INCIDENTAL CWE-401: Memory Leak - data may not point to location * returned by malloc() so can't safely call free() on it */ } } void CWE124_Buffer_Underwrite__malloc_char_loop_42_good() { goodG2B(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE124_Buffer_Underwrite__malloc_char_loop_42_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE124_Buffer_Underwrite__malloc_char_loop_42_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
{ "content_hash": "4ce969171174593cf8744dec279d7420", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 97, "avg_line_length": 28.8, "alnum_prop": 0.5910790598290598, "repo_name": "maurer/tiamat", "id": "909979fe07c5bee046d4f1a40bf68b60d6d13ad8", "size": "3744", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "samples/Juliet/testcases/CWE124_Buffer_Underwrite/s02/CWE124_Buffer_Underwrite__malloc_char_loop_42.c", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
''' @author Angel -Ote- Cortes @version 0.1 Anonymizer is a class based on the requests package to simplify the use of proxies and Tor. The class automatically select a proxy from a list, change headers randomly and keep control of not working proxies. Copyright (C) 2012 Angel Cortés All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of copyright holders nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' import requests from random import choice from time import sleep import socket import datetime TOR_CONF = {"MaxCircuitDirtiness":"60","NewCircuitPeriod":"10","CircuitBuildTimeout":"20"} class AnonymizerException(Exception): ''' Simple exception for the class ''' def __init__(self,errorCode,content): self.errorCode = errorCode self.content = content def __str__(self): Lista = [self.errorCode,self.content] return repr(Lista) class Anonymizer(object): ''' Anonymize any http GET petition throught a proxy list or a TOR. If the proxy is a TOR you can manage how many connections want to do per minute and if you configure a TOR Control Port automatically change the circuit. Params: -proxy: Required. Dict with the http proxies list. Accept standar HTTP proxies: {'http':["127.0.0.1:3128","127.0.0.1:3129"]} Or TOR format with/without TORCTL port: {'tor':"127.0.0.1:8118",'torctl':"127.0.0.1:9051"} -petitions: (default 15) Number of petitions per minute with TOR -user: (default None) Reserved for future uses -passwd: (default None) Passphrase of the TOR control AUTHENTICATE -timeout: (default 15) Timeout for HTTP petitions ''' def __init__(self,proxy,petitions=15,user=None,passwd=None, timeout=15): self.MAX_PETITIONS=petitions self.CURR_PETITIONS=0 self.LAST_TIMESTAMP = datetime.datetime.now() self.timeout = timeout self.proxy_to_use = {'http':None} self.isTor = False self.torCTL = None ##TorCtl user/pass self.proxy_user = user self.proxy_passwd = passwd ##Set the Headers self.request_headers = {} ##Temporal objects self.url = None ##Result object self.http_response = None #Validate the proxy list provided self.__check_proxy_list(proxy) def __check_proxy_list(self,proxyDict): if not (proxyDict or (not (isinstance(proxyDict,dict)))): raise AnonymizerException(501,"No good proxy dict/list provided for Anonymizer") if "tor" in proxyDict.keys(): self.isTor = True self.proxy = {'http':[proxyDict['tor']]} if "torctl" in proxyDict.keys(): self.torCTL = proxyDict['torctl'] self.__prepare_tor() return True if "http" in proxyDict.keys(): if isinstance(proxyDict['http'],list): self.proxy = proxyDict return True else: raise AnonymizerException(502,"No good HTTP proxy list provided for Anonymizer") def __check_timestamps(self): now=datetime.datetime.now() delta=now-self.LAST_TIMESTAMP #print("Delta Seconds:%s"%str(delta.seconds)) if delta.seconds > int(TOR_CONF['MaxCircuitDirtiness']): self.LAST_TIMESTAMP = now return True return False def __set_RandomHeaders(self): ''' Select a random headers from a list and asings it to the the connection ''' ##User Agent user_agents_list = [] user_agents_list.append('Mozilla/5.0 (iPhone; U; CPU iOS 2_0 like Mac OS X; en-us)') user_agents_list.append('Mozilla/5.0 (Linux; U; Android 0.5; en-us)') user_agents_list.append('Mozilla/5.0 (iPad; U; CPU OS 3_2_1 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko)') user_agents_list.append('Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)') user_agents_list.append('Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)') user_agents_list.append('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/0.2.149.29 Safari/525.13') user_agents_list.append('Opera/9.25 (Windows NT 6.0; U; en)') user_agents_list.append('Opera/9.80 (X11; Linux x86_64; U; pl) Presto/2.7.62 Version/11.00') user_agents_list.append('Opera/9.80 (Windows NT 6.0; U; en) Presto/2.7.39 Version/11.00') user_agents_list.append('Mozilla/5.0 (Windows NT 6.0; U; ja; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00') user_agents_list.append('Mozilla/4.0 (compatible; MSIE 8.0; X11; Linux x86_64; pl) Opera 11.00') user_agents_list.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; fr) Opera 11.00') user_agents_list.append('Opera/9.80 (Windows NT 6.1 x64; U; en) Presto/2.7.62 Version/11.00') user_agents_list.append('Mozilla/5.0 (Windows NT 5.1; U; de; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6 Opera 11.00') user_agents_list.append('Mozilla/4.0 (compatible; MSIE 8.0; X11; Linux x86_64; pl) Opera 11.00') user_agent = choice(user_agents_list).strip() ##Language accept_language_list = [] accept_language_list.append('de-de,es-es;q=0.8,en-us;q=0.5,en;q=0.3') accept_language_list.append('en-us;q=0.8,en;q=0.3') accept_language_list.append('es;q=0.8,en-us;q=0.5,en;q=0.3') accept_language_list.append('es-es;q=0.8,en;q=0.3') accept_language_list.append('de-de;q=0.8,en;q=0.3') accept_language_list.append('de-de;q=0.8,en-us;q=0.5)') language = choice(accept_language_list).strip() self.request_headers = {'User-Agent': user_agent, 'Accept-Language':language, 'Referer': ''} def __prepare_request(self,url): """ Prepare the random objects for the request. """ self.url = url self.__set_RandomHeaders() requests.defaults.defaults['keep_alive']=False if self.isTor: if self.torCTL != None: if not self.__check_timestamps(): if (self.CURR_PETITIONS == self.MAX_PETITIONS): self.CURR_PETITIONS = 0 raise AnonymizerException(111,"Max number of petitions(%s) in %sseconds reached"%(self.MAX_PETITIONS,TOR_CONF['MaxCircuitDirtiness'])) self.CURR_PETITIONS = self.CURR_PETITIONS + 1 else: self.CURR_PETITIONS = 1 self.__reroute_tor() self.proxy_to_use['http'] = choice(self.proxy['http']) def __prepare_tor(self): host, port = self.torCTL.split(':') #print("Servidor de control TOR: %s"%host) #print("Puerto de control TOR: %s"%port) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host,int(port))) if self.proxy_passwd: s.send(str.encode('AUTHENTICATE "%s"\r\n'%self.proxy_passwd)) data = s.recv(100) if not str(data.decode()).startswith("250"): raise Anonymizer(211, "Error in the AUTHENTICATE command to the TOR control port.") #Short circuit time s.send(str.encode('SETCONF NewCircuitPeriod=%s\r\n'%TOR_CONF['NewCircuitPeriod'])) data = s.recv(100) #Short circuit build time s.send(str.encode('SETCONF CircuitBuildTimeout=%s\r\n'%TOR_CONF['CircuitBuildTimeout'])) data = s.recv(100) #Short circuit Valid time s.send(str.encode('SETCONF MaxCircuitDirtiness="%s"\r\n'%TOR_CONF['MaxCircuitDirtiness'])) data = s.recv(100) sleep(5) s.close() #print("Tor ReConfigured") def __reroute_tor(self): host, port = self.torCTL.split(':') #print("Servidor de control TOR: %s"%host) #print("Puerto de control TOR: %s"%port) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) data = None s.connect((host,int(port))) #print("Conectado al servidor de control") if self.proxy_passwd: s.send(str.encode("AUTHENTICATE \"%s\"\r\n"%self.proxy_passwd)) data = s.recv(100) if not str(data.decode()).startswith("250"): raise Anonymizer(211, "Error in the AUTHENTICATE command to the TOR control port.") s.send(str.encode('SIGNAL NEWNYM\r\n')) s.recv(100) sleep(5) s.close() #print("Tor rerouted") def get(self,url,pureAnon=False,DEBUG=False): ''' get will return the url requested using a randomized proxy from the list as a request.response item. PARAMS: -url: The url to retrieve -pureAnon: (default False) If set to True no cookies are accepted in this petition and will not be returned. -DEBUG: (default False) If True, return a dict like: {'response':http_response,'proxy':"proxy used",'headers':"Fake headers used"} ''' self.__prepare_request(url) if pureAnon: requests.defaults.defaults['store_cookies'] = False try: self.http_response = requests.get(self.url,proxies=self.proxy_to_use,headers=self.request_headers, timeout=self.timeout) except Exception as e: raise AnonymizerException(101,"Requests unable to get %s using the proxy %s"%(url,self.proxy_to_use)) if not DEBUG: return self.http_response else: output = {'response':self.http_response,'proxy':self.proxy_to_use,'headers':self.request_headers} return output
{ "content_hash": "c9c8e4efc094de6449c7fafb5b31813d", "timestamp": "", "source": "github", "line_count": 256, "max_line_length": 158, "avg_line_length": 43.45703125, "alnum_prop": 0.6314606741573033, "repo_name": "OteCortes/Anopymizer", "id": "362998b22aa4236ae7c6fd7aebe46c56727579c8", "size": "11151", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "anonymizer.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "9656" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="pt"> <head> <!-- Generated by javadoc (version 1.7.0_79) on Mon Jun 22 15:50:37 BRT 2015 --> <title>Uses of Class org.sdnplatform.sync.error.AuthException</title> <meta name="date" content="2015-06-22"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.sdnplatform.sync.error.AuthException"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/sdnplatform/sync/error/AuthException.html" title="class in org.sdnplatform.sync.error">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/sdnplatform/sync/error/class-use/AuthException.html" target="_top">Frames</a></li> <li><a href="AuthException.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.sdnplatform.sync.error.AuthException" class="title">Uses of Class<br>org.sdnplatform.sync.error.AuthException</h2> </div> <div class="classUseContainer">No usage of org.sdnplatform.sync.error.AuthException</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/sdnplatform/sync/error/AuthException.html" title="class in org.sdnplatform.sync.error">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-files/index-1.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/sdnplatform/sync/error/class-use/AuthException.html" target="_top">Frames</a></li> <li><a href="AuthException.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> </body> </html>
{ "content_hash": "9bd6d2dd2c9f5375a0da0efd3df2be89", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 143, "avg_line_length": 36.391304347826086, "alnum_prop": 0.6157706093189964, "repo_name": "paulorvj/sdnvoip", "id": "71612203bc2152dc65bf65e44d8ea5a963cd0d42", "size": "4185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/org/sdnplatform/sync/error/class-use/AuthException.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "181" }, { "name": "HTML", "bytes": "10186" }, { "name": "Java", "bytes": "3772566" }, { "name": "JavaScript", "bytes": "55112" }, { "name": "Makefile", "bytes": "426" }, { "name": "Python", "bytes": "32743" }, { "name": "Shell", "bytes": "5610" }, { "name": "Thrift", "bytes": "7114" } ], "symlink_target": "" }
layout: staff seoTitle: Staff seoDescription: Our staff rocks! Meet the people that grind every day to make ARTWORKS the best program possible. featureImg: artwork-004.jpg featureStatement: STAFF title: Hardwork subtitle: Pays off permalink: /staff/ unique-cta: true mega-cta-img: artwork-012.jpg mega-cta-heading: Work for us! mega-cta-button-text: Apply today! mega-cta-button-text-url: mailto:info@stlartworks.org reveal: staff --- Our staff rocks! Meet the people that grind every day to make ARTWORKS the best program possible.
{ "content_hash": "324ca9f68eaaa3c2945a36a9ac0e7bec", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 113, "avg_line_length": 22.541666666666668, "alnum_prop": 0.7818853974121996, "repo_name": "stlartworks/stlartworks.github.io", "id": "5a53d1a48331cf43960bb745635da3b2e4e1670c", "size": "545", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "staff.md", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "78962" }, { "name": "JavaScript", "bytes": "12177" }, { "name": "Ruby", "bytes": "283" }, { "name": "SCSS", "bytes": "1531" }, { "name": "Sass", "bytes": "72546" } ], "symlink_target": "" }
 #pragma once #include <aws/s3/S3_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> namespace Aws { namespace S3 { namespace Model { enum class ObjectVersionStorageClass { NOT_SET, STANDARD }; namespace ObjectVersionStorageClassMapper { AWS_S3_API ObjectVersionStorageClass GetObjectVersionStorageClassForName(const Aws::String& name); AWS_S3_API Aws::String GetNameForObjectVersionStorageClass(ObjectVersionStorageClass value); } // namespace ObjectVersionStorageClassMapper } // namespace Model } // namespace S3 } // namespace Aws
{ "content_hash": "ea06ba4cca1342ee57e5b3749913033b", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 98, "avg_line_length": 21.615384615384617, "alnum_prop": 0.7829181494661922, "repo_name": "ambasta/aws-sdk-cpp", "id": "324cd5435bdf35735582d4cfd50fde5966182310", "size": "1135", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-cpp-sdk-s3/include/aws/s3/model/ObjectVersionStorageClass.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2305" }, { "name": "C++", "bytes": "74273816" }, { "name": "CMake", "bytes": "412257" }, { "name": "Java", "bytes": "229873" }, { "name": "Python", "bytes": "62933" } ], "symlink_target": "" }
import React, { Component, PropTypes } from 'react'; import classnames from 'classnames'; import CSSClassnames from '../../../utils/CSSClassnames'; import Intl from '../../../utils/Intl'; import Props from '../../../utils/Props'; const CLASS_ROOT = CSSClassnames.CONTROL_ICON; const COLOR_INDEX = CSSClassnames.COLOR_INDEX; export default class Icon extends Component { render () { const { className, colorIndex } = this.props; let { a11yTitle, size, responsive } = this.props; let { intl } = this.context; const classes = classnames( CLASS_ROOT, `${CLASS_ROOT}-circle-information`, className, { [`${CLASS_ROOT}--${size}`]: size, [`${CLASS_ROOT}--responsive`]: responsive, [`${COLOR_INDEX}-${colorIndex}`]: colorIndex } ); a11yTitle = a11yTitle || Intl.getMessage(intl, 'circle-information'); const restProps = Props.omit(this.props, Object.keys(Icon.propTypes)); return <svg {...restProps} version="1.1" viewBox="0 0 24 24" width="24px" height="24px" role="img" className={classes} aria-label={a11yTitle}><path fill="none" stroke="#000000" strokeWidth="2" d="M12,22 C17.5228475,22 22,17.5228475 22,12 C22,6.4771525 17.5228475,2 12,2 C6.4771525,2 2,6.4771525 2,12 C2,17.5228475 6.4771525,22 12,22 Z M12,10 L12,18 M12,6 L12,8"/></svg>; } }; Icon.contextTypes = { intl: PropTypes.object }; Icon.defaultProps = { responsive: true }; Icon.displayName = 'CircleInformation'; Icon.icon = true; Icon.propTypes = { a11yTitle: PropTypes.string, colorIndex: PropTypes.string, size: PropTypes.oneOf(['small', 'medium', 'large', 'xlarge', 'huge']), responsive: PropTypes.bool };
{ "content_hash": "d2ca4f5116047a38a72fb057a0edb9fb", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 374, "avg_line_length": 32.30769230769231, "alnum_prop": 0.6625, "repo_name": "nickjvm/grommet", "id": "8bb83491200939f2cdde4e59159106466729c808", "size": "1750", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/js/components/icons/base/CircleInformation.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "258719" }, { "name": "HTML", "bytes": "4207" }, { "name": "JavaScript", "bytes": "1489305" } ], "symlink_target": "" }
package org.zstack.header.acl; import org.zstack.header.message.APIEvent; import org.zstack.header.rest.RestResponse; /** * @author: zhanyong.miao * @date: 2020-03-09 **/ @RestResponse(allTo = "inventory") public class APIAddAccessControlListEntryEvent extends APIEvent { private AccessControlListEntryInventory inventory; public APIAddAccessControlListEntryEvent() { } public APIAddAccessControlListEntryEvent(String apiId) { super(apiId); } public void setInventory(AccessControlListEntryInventory inventory) { this.inventory = inventory; } public AccessControlListEntryInventory getInventory() { return inventory; } public static APIAddAccessControlListEntryEvent __example__() { APIAddAccessControlListEntryEvent event = new APIAddAccessControlListEntryEvent(); AccessControlListEntryInventory inv = new AccessControlListEntryInventory(); inv.setAclUuid(uuid()); inv.setIpEntries("192.168.48.0/24"); event.setInventory(inv); return event; } }
{ "content_hash": "79137a7aa4af02331fd7f8e23b22c6fd", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 90, "avg_line_length": 28.487179487179485, "alnum_prop": 0.6975697569756976, "repo_name": "zstackorg/zstack", "id": "06269e612be7ee23c61dc24b5368bbc49e6ccebd", "size": "1111", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "plugin/acl/src/main/java/org/zstack/header/acl/APIAddAccessControlListEntryEvent.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "AspectJ", "bytes": "54952" }, { "name": "Batchfile", "bytes": "1132" }, { "name": "Groovy", "bytes": "832169" }, { "name": "Java", "bytes": "15798995" }, { "name": "Shell", "bytes": "152829" } ], "symlink_target": "" }
FactoryBot.define do factory :handler, class: "AwesomeTranslations::CacheDatabaseGenerator::Handler" do identifier { "rails_handler" } name { "RailsHandler" } factory :model_handler do identifier { "model_handler" } name { "ModelHandler" } end end end
{ "content_hash": "6b8ff0a63aaea18dbaff8db93be9b4a9", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 84, "avg_line_length": 25.90909090909091, "alnum_prop": 0.6771929824561403, "repo_name": "kaspernj/awesome_translations", "id": "1f111fc51fe36918a02123c6c05457c92784142b", "size": "285", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/factories/handler.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "546" }, { "name": "HTML", "bytes": "25096" }, { "name": "Haml", "bytes": "344" }, { "name": "JavaScript", "bytes": "289" }, { "name": "Liquid", "bytes": "410" }, { "name": "Ruby", "bytes": "134256" }, { "name": "SCSS", "bytes": "550" } ], "symlink_target": "" }
<?php namespace ShopBundle\Controller; use Symfony\Bundle\FrameworkBundle\Controller\Controller; use Symfony\Component\HttpFoundation\Response; use ShopBundle\Entity\Products; use ShopBundle\Entity\User; class WalletController extends Controller { public function indexAction() { $user = $this->getUser(); $userId = $user->getId(); $em = $this->getDoctrine()->getManager(); $query = $em->createQuery('SELECT a.id, a.username, a.email, a.lastLogin, a.money FROM ShopBundle:User a WHERE a.id = :id')->setParameter('id', $userId); $user_details = $query->getResult(); $rm = $this->getDoctrine()->getManager(); $query2 = $rm->createQuery('SELECT DISTINCT a.categorypath, a.category, a.mother FROM ShopBundle:Products a'); $categories2 = $query2->getResult(); return $this->render('ShopBundle:Wallet:index.html.twig', array('user_details' => $user_details, 'categories2' => $categories2)); } }
{ "content_hash": "dffc1d8cd07821d4cb32045957459818", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 159, "avg_line_length": 34.714285714285715, "alnum_prop": 0.676954732510288, "repo_name": "lukaszuznanski/sh", "id": "43856e80e26bba4d94f6e54934dd6e4b47c1f95b", "size": "972", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ShopBundle/Controller/WalletController.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "3605" }, { "name": "Batchfile", "bytes": "38" }, { "name": "CSS", "bytes": "131254" }, { "name": "HTML", "bytes": "18099" }, { "name": "JavaScript", "bytes": "30409" }, { "name": "PHP", "bytes": "81830" }, { "name": "Shell", "bytes": "587" } ], "symlink_target": "" }
package client import ( "bytes" "context" "io" "net/http" "net/url" goahttp "goa.design/goa/v3/http" calc "goa.design/plugins/v3/zaplogger/examples/calc/gen/calc" ) // BuildAddRequest instantiates a HTTP request object with method and path set // to call the "calc" service "add" endpoint func (c *Client) BuildAddRequest(ctx context.Context, v interface{}) (*http.Request, error) { var ( a int b int ) { p, ok := v.(*calc.AddPayload) if !ok { return nil, goahttp.ErrInvalidType("calc", "add", "*calc.AddPayload", v) } a = p.A b = p.B } u := &url.URL{Scheme: c.scheme, Host: c.host, Path: AddCalcPath(a, b)} req, err := http.NewRequest("GET", u.String(), nil) if err != nil { return nil, goahttp.ErrInvalidURL("calc", "add", u.String(), err) } if ctx != nil { req = req.WithContext(ctx) } return req, nil } // DecodeAddResponse returns a decoder for responses returned by the calc add // endpoint. restoreBody controls whether the response body should be restored // after having been read. func DecodeAddResponse(decoder func(*http.Response) goahttp.Decoder, restoreBody bool) func(*http.Response) (interface{}, error) { return func(resp *http.Response) (interface{}, error) { if restoreBody { b, err := io.ReadAll(resp.Body) if err != nil { return nil, err } resp.Body = io.NopCloser(bytes.NewBuffer(b)) defer func() { resp.Body = io.NopCloser(bytes.NewBuffer(b)) }() } else { defer resp.Body.Close() } switch resp.StatusCode { case http.StatusOK: var ( body int err error ) err = decoder(resp).Decode(&body) if err != nil { return nil, goahttp.ErrDecodingError("calc", "add", err) } return body, nil default: body, _ := io.ReadAll(resp.Body) return nil, goahttp.ErrInvalidResponse("calc", "add", resp.StatusCode, string(body)) } } }
{ "content_hash": "09f2cc2e2ad4c166977175752cdf29f4", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 130, "avg_line_length": 25.16216216216216, "alnum_prop": 0.6595059076262084, "repo_name": "goadesign/plugins", "id": "f332702eb56f575a19bf0326041907cddbee9bb0", "size": "2099", "binary": false, "copies": "1", "ref": "refs/heads/v3", "path": "zaplogger/examples/calc/gen/http/calc/client/encode_decode.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "165642" }, { "name": "Makefile", "bytes": "7723" } ], "symlink_target": "" }
// Borland C++ Builder // Copyright (c) 1995, 2002 by Borland Software Corporation // All rights reserved // (DO NOT EDIT: machine generated header) 'ElTreeMemoEdit.pas' rev: 6.00 #ifndef ElTreeMemoEditHPP #define ElTreeMemoEditHPP #pragma delphiheader begin #pragma option push -w- #pragma option push -Vx #include <ElACtrls.hpp> // Pascal unit #include <ElHeader.hpp> // Pascal unit #include <ElTree.hpp> // Pascal unit #include <Types.hpp> // Pascal unit #include <Classes.hpp> // Pascal unit #include <SysUtils.hpp> // Pascal unit #include <Forms.hpp> // Pascal unit #include <Controls.hpp> // Pascal unit #include <Messages.hpp> // Pascal unit #include <Windows.hpp> // Pascal unit #include <SysInit.hpp> // Pascal unit #include <System.hpp> // Pascal unit //-- user supplied ----------------------------------------------------------- namespace Eltreememoedit { //-- type declarations ------------------------------------------------------- class DELPHICLASS TElTreeInplaceMemoEdit; class PASCALIMPLEMENTATION TElTreeInplaceMemoEdit : public Eltree::TElTreeInplaceEditor { typedef Eltree::TElTreeInplaceEditor inherited; private: Classes::TWndMethod SaveWndProc; void __fastcall EditorWndProc(Messages::TMessage &Message); protected: Elactrls::TElAdvancedMemo* FEditor; virtual void __fastcall DoStartOperation(void); virtual void __fastcall DoStopOperation(bool Accepted); virtual bool __fastcall GetVisible(void); virtual void __fastcall TriggerAfterOperation(bool &Accepted, bool &DefaultConversion); virtual void __fastcall TriggerBeforeOperation(bool &DefaultConversion); virtual void __fastcall SetEditorParent(void); public: __fastcall virtual TElTreeInplaceMemoEdit(Classes::TComponent* AOwner); __fastcall virtual ~TElTreeInplaceMemoEdit(void); __property Elactrls::TElAdvancedMemo* Editor = {read=FEditor}; }; //-- var, const, procedure --------------------------------------------------- } /* namespace Eltreememoedit */ using namespace Eltreememoedit; #pragma option pop // -w- #pragma option pop // -Vx #pragma delphiheader end. //-- end unit ---------------------------------------------------------------- #endif // ElTreeMemoEdit
{ "content_hash": "a7acee906ad6cddd920ad9b32a24f358", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 88, "avg_line_length": 33.55384615384615, "alnum_prop": 0.6808803301237965, "repo_name": "OLR-xray/XRay-NEW", "id": "0d8ae64811e637becf5a55323c7e6ad80b67052a", "size": "2181", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "SDK/components/ElPack/Code/Source/ElTreeMemoEdit.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "62738" }, { "name": "Batchfile", "bytes": "7939" }, { "name": "C", "bytes": "24706439" }, { "name": "C++", "bytes": "42161717" }, { "name": "Groff", "bytes": "287360" }, { "name": "HTML", "bytes": "67830" }, { "name": "Lua", "bytes": "96997" }, { "name": "Makefile", "bytes": "16534" }, { "name": "Objective-C", "bytes": "175957" }, { "name": "Pascal", "bytes": "1259032" }, { "name": "Perl", "bytes": "9355" }, { "name": "PostScript", "bytes": "115918" }, { "name": "Shell", "bytes": "962" }, { "name": "TeX", "bytes": "2421274" }, { "name": "xBase", "bytes": "99233" } ], "symlink_target": "" }
layout: post title: young woman and buddha head date: 2015-07-21 22:34:59 summary: categories: drawing --- ![young woman and buddha head](/images/diary/young-woman-and-buddha-head.png "She appeared like a fairy. Like a ghost she was gone.")
{ "content_hash": "c2e99ec47621de9f4c4d52ae3d1fbee0", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 133, "avg_line_length": 37, "alnum_prop": 0.6988416988416989, "repo_name": "gregist/gregist.github.io", "id": "0391e5ea6a0b1353304c6720953f43ae1365496b", "size": "263", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2015-07-21-young-woman-and-buddha-head.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "25750" }, { "name": "HTML", "bytes": "25271" }, { "name": "JavaScript", "bytes": "1409" }, { "name": "Python", "bytes": "1873" }, { "name": "Ruby", "bytes": "6612" } ], "symlink_target": "" }
ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Hyaloderma perpusillum Speg. ### Remarks null
{ "content_hash": "1acdc3da83c7f042624e86ae0487eeeb", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 28, "avg_line_length": 10.23076923076923, "alnum_prop": 0.7142857142857143, "repo_name": "mdoering/backbone", "id": "34d58e5ae9eb1b06731301f6540a65b86f0b950e", "size": "194", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Dothideomycetes/Pleosporales/Parodiellaceae/Pseudomeliola/Pseudomeliola perpusilla/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
// Copyright (c) 2001, 2002 Per M.A. Bothner and Brainfood Inc. // This is free software; for terms and warranty disclaimer see ./COPYING. package gnu.kawa.functions; import gnu.lists.*; import gnu.mapping.*; import gnu.bytecode.*; import gnu.expr.*; public class AppendValues extends MethodProc implements Inlineable { public static final AppendValues appendValues = new AppendValues(); public AppendValues () { super(); setProperty(Procedure.validateApplyKey, "gnu.kawa.functions.CompileMisc:validateApplyAppendValues"); } public void apply (CallContext ctx) { Object endMarker = Special.dfault; for (;;) { Object arg = ctx.getNextArg(endMarker); if (arg == endMarker) break; if (arg instanceof Consumable) ((Consumable) arg).consume(ctx.consumer); else ctx.writeValue(arg); } } public void compile (ApplyExp exp, Compilation comp, Target target) { Expression[] args = exp.getArgs(); int nargs = args.length; if (target instanceof ConsumerTarget || target instanceof IgnoreTarget) { for (int i = 0; i < nargs; i++) args[i].compileWithPosition(comp, target); } else { ConsumerTarget.compileUsingConsumer(exp, comp, target); /* CodeAttr code = comp.getCode(); Scope scope = code.pushScope(); Variable values = scope.addVariable(code, comp.typeValues, null); ConsumerTarget ctarget = new ConsumerTarget(values); code.emitInvokeStatic(comp.typeValues.getDeclaredMethod("make", 0)); code.emitStore(values); for (int i = 0; i < nargs; i++) args[i].compile(comp, ctarget); code.emitLoad(values); code.popScope(); target.compileFromStack(comp, Compilation.typeValues); */ } } public Type getReturnType (Expression[] args) { return Compilation.typeObject; } }
{ "content_hash": "7e851ade40d587c59a46fe9cc25e374e", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 79, "avg_line_length": 26.691176470588236, "alnum_prop": 0.6892561983471074, "repo_name": "thequixotic/ai2-kawa", "id": "8cff80d93bd315983466f6eec11aa90e3ee31af3", "size": "1815", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "gnu/kawa/functions/AppendValues.java", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "25294" }, { "name": "Common Lisp", "bytes": "814" }, { "name": "Emacs Lisp", "bytes": "264128" }, { "name": "HTML", "bytes": "17724" }, { "name": "Java", "bytes": "3992295" }, { "name": "KRL", "bytes": "223" }, { "name": "Scheme", "bytes": "815034" }, { "name": "Shell", "bytes": "29032" }, { "name": "XQuery", "bytes": "2330" }, { "name": "XSLT", "bytes": "889" } ], "symlink_target": "" }