text
stringlengths
2
1.04M
meta
dict
<?xml version="1.0" encoding="UTF-8"?> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.camel</groupId> <artifactId>examples</artifactId> <version>2.16.1-SNAPSHOT</version> </parent> <artifactId>camel-example-twitter-websocket</artifactId> <packaging>jar</packaging> <name>Camel :: Example :: Twitter WebSocket</name> <description>An example that pushes new tweets to a web page using web-socket</description> <dependencies> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-core</artifactId> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-twitter</artifactId> </dependency> <dependency> <groupId>org.apache.camel</groupId> <artifactId>camel-websocket</artifactId> </dependency> <!-- logging to the console --> <dependency> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> </dependency> </dependencies> <build> <plugins> <!-- Allows the example to be run via 'mvn compile exec:java' --> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>exec-maven-plugin</artifactId> <configuration> <mainClass>org.apache.camel.example.websocket.CamelTwitterWebSocketMain</mainClass> <includePluginDependencies>false</includePluginDependencies> </configuration> </plugin> </plugins> </build> </project>
{ "content_hash": "05cd16d56d80add075eea875921cfeda", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 201, "avg_line_length": 34.693333333333335, "alnum_prop": 0.6933128362797848, "repo_name": "joakibj/camel", "id": "4d996cf9efdc01710d9d5d1a1b5095c51a6a28ff", "size": "2602", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "examples/camel-example-twitter-websocket/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "106" }, { "name": "CSS", "bytes": "18641" }, { "name": "Eagle", "bytes": "2898" }, { "name": "Elm", "bytes": "5970" }, { "name": "Groovy", "bytes": "40961" }, { "name": "HTML", "bytes": "173627" }, { "name": "Java", "bytes": "47614796" }, { "name": "JavaScript", "bytes": "88124" }, { "name": "Protocol Buffer", "bytes": "578" }, { "name": "Python", "bytes": "36" }, { "name": "Ruby", "bytes": "4802" }, { "name": "Scala", "bytes": "321833" }, { "name": "Shell", "bytes": "8819" }, { "name": "Tcl", "bytes": "4974" }, { "name": "XQuery", "bytes": "546" }, { "name": "XSLT", "bytes": "287663" } ], "symlink_target": "" }
<?php namespace gossi\trixionary\serializer\base; use gossi\trixionary\model\Group; use gossi\trixionary\model\Object; use gossi\trixionary\model\Position; use gossi\trixionary\model\Skill; use gossi\trixionary\serializer\TypeInferencer; use keeko\framework\utils\HydrateUtils; use Tobscure\JsonApi\Collection; use Tobscure\JsonApi\Relationship; /** */ trait SportSerializerTrait { /** */ private $methodNames = [ 'objects' => 'Object', 'positions' => 'Position', 'skills' => 'Skill', 'groups' => 'Group' ]; /** */ private $methodPluralNames = [ 'objects' => 'Objects', 'positions' => 'Positions', 'skills' => 'Skills', 'groups' => 'Groups' ]; /** * @param mixed $model * @param array $fields */ public function getAttributes($model, array $fields = null) { return [ 'title' => $model->getTitle(), 'slug' => $model->getSlug(), 'athlete-label' => $model->getAthleteLabel(), 'object-slug' => $model->getObjectSlug(), 'object-label' => $model->getObjectLabel(), 'object-plural-label' => $model->getObjectPluralLabel(), 'skill-slug' => $model->getSkillSlug(), 'skill-label' => $model->getSkillLabel(), 'skill-plural-label' => $model->getSkillPluralLabel(), 'skill-picture-url' => $model->getSkillPictureUrl(), 'group-slug' => $model->getGroupSlug(), 'group-label' => $model->getGroupLabel(), 'group-plural-label' => $model->getGroupPluralLabel(), 'transition-label' => $model->getTransitionLabel(), 'transition-plural-label' => $model->getTransitionPluralLabel(), 'transitions-slug' => $model->getTransitionsSlug(), 'position-slug' => $model->getPositionSlug(), 'position-label' => $model->getPositionLabel(), 'feature-composition' => $model->getFeatureComposition(), 'feature-tester' => $model->getFeatureTester(), 'is-default' => $model->getIsDefault() ]; } /** */ public function getFields() { return ['title', 'slug', 'athlete-label', 'object-slug', 'object-label', 'object-plural-label', 'skill-slug', 'skill-label', 'skill-plural-label', 'skill-picture-url', 'group-slug', 'group-label', 'group-plural-label', 'transition-label', 'transition-plural-label', 'transitions-slug', 'position-slug', 'position-label', 'feature-composition', 'feature-tester', 'is-default']; } /** * @param mixed $model * @return string */ public function getId($model) { if ($model !== null) { return $model->getId(); } return null; } /** */ public function getRelationships() { return [ 'objects' => Object::getSerializer()->getType(null), 'positions' => Position::getSerializer()->getType(null), 'skills' => Skill::getSerializer()->getType(null), 'groups' => Group::getSerializer()->getType(null) ]; } /** */ public function getSortFields() { return ['title', 'slug', 'athlete-label', 'object-slug', 'object-label', 'object-plural-label', 'skill-slug', 'skill-label', 'skill-plural-label', 'skill-picture-url', 'group-slug', 'group-label', 'group-plural-label', 'transition-label', 'transition-plural-label', 'transitions-slug', 'position-slug', 'position-label', 'feature-composition', 'feature-tester', 'is-default']; } /** * @param mixed $model * @return string */ public function getType($model) { return 'gossi.trixionary/sport'; } /** * @param mixed $model * @return Relationship */ public function groups($model) { $method = 'get' . $this->getCollectionMethodPluralName('groups'); $relationship = new Relationship(new Collection($model->$method(), Group::getSerializer())); return $this->addRelationshipSelfLink($relationship, $model, 'group'); } /** * @param mixed $model * @param mixed $data * @return mixed The model */ public function hydrate($model, $data) { // attributes $attribs = isset($data['attributes']) ? $data['attributes'] : []; $model = HydrateUtils::hydrate($attribs, $model, ['id', 'title', 'slug', 'athlete-label', 'object-slug', 'object-label', 'object-plural-label', 'skill-slug', 'skill-label', 'skill-plural-label', 'skill-picture-url', 'group-slug', 'group-label', 'group-plural-label', 'transition-label', 'transition-plural-label', 'transitions-slug', 'position-slug', 'position-label', 'feature-composition', 'feature-tester', 'is-default']); // relationships //$this->hydrateRelationships($model, $data); return $model; } /** * @param mixed $model * @return Relationship */ public function objects($model) { $method = 'get' . $this->getCollectionMethodPluralName('objects'); $relationship = new Relationship(new Collection($model->$method(), Object::getSerializer())); return $this->addRelationshipSelfLink($relationship, $model, 'object'); } /** * @param mixed $model * @return Relationship */ public function positions($model) { $method = 'get' . $this->getCollectionMethodPluralName('positions'); $relationship = new Relationship(new Collection($model->$method(), Position::getSerializer())); return $this->addRelationshipSelfLink($relationship, $model, 'position'); } /** * @param mixed $model * @return Relationship */ public function skills($model) { $method = 'get' . $this->getCollectionMethodPluralName('skills'); $relationship = new Relationship(new Collection($model->$method(), Skill::getSerializer())); return $this->addRelationshipSelfLink($relationship, $model, 'skill'); } /** * @param Relationship $relationship * @param mixed $model * @param string $related * @return Relationship */ abstract protected function addRelationshipSelfLink(Relationship $relationship, $model, $related); /** * @param mixed $relatedName */ protected function getCollectionMethodName($relatedName) { if (isset($this->methodNames[$relatedName])) { return $this->methodNames[$relatedName]; } return null; } /** * @param mixed $relatedName */ protected function getCollectionMethodPluralName($relatedName) { if (isset($this->methodPluralNames[$relatedName])) { return $this->methodPluralNames[$relatedName]; } return null; } /** */ protected function getTypeInferencer() { return TypeInferencer::getInstance(); } }
{ "content_hash": "470034197c3dccbad4782147e4ccf645", "timestamp": "", "source": "github", "line_count": 198, "max_line_length": 427, "avg_line_length": 31.045454545454547, "alnum_prop": 0.6720351390922401, "repo_name": "gossi/trixionary", "id": "feb1c5e6acb6576b366f90933613dc3039b09553", "size": "6147", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/serializer/base/SportSerializerTrait.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "3872582" } ], "symlink_target": "" }
from vitrage.common.constants import VertexProperties as VProps class PropsConverter(object): PROPS_CONVERSION = { 'category': VProps.VITRAGE_CATEGORY, 'type': VProps.VITRAGE_TYPE, 'resource_id': VProps.VITRAGE_RESOURCE_ID, 'sample_timestamp': VProps.VITRAGE_SAMPLE_TIMESTAMP, 'is_deleted': VProps.VITRAGE_IS_DELETED, 'is_placeholder': VProps.VITRAGE_IS_PLACEHOLDER, 'aggregated_state': VProps.VITRAGE_AGGREGATED_STATE, 'operational_state': VProps.VITRAGE_OPERATIONAL_STATE, 'aggregated_severity': VProps.VITRAGE_AGGREGATED_SEVERITY, 'operational_severity': VProps.VITRAGE_OPERATIONAL_SEVERITY } @classmethod def convert_props_with_set(cls, properties): converted_properties = set() for key, value in properties: new_key = cls.PROPS_CONVERSION[key] if key in \ cls.PROPS_CONVERSION else key converted_properties.add((new_key, value)) return converted_properties @classmethod def convert_props_with_dictionary(cls, properties): converted_properties = {} for key, value in properties.items(): new_key = cls.PROPS_CONVERSION[key] if key in \ cls.PROPS_CONVERSION else key converted_properties[new_key] = value return converted_properties
{ "content_hash": "c7d5ac9034726f43cfb9bd5ad9aee206", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 67, "avg_line_length": 39.2, "alnum_prop": 0.6574344023323615, "repo_name": "openstack/vitrage", "id": "c18dda51d93a0335f3a5b23dd1d454c01729cfe2", "size": "1945", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vitrage/evaluator/template_loading/props_converter.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "26541" }, { "name": "Mako", "bytes": "896" }, { "name": "Python", "bytes": "2074427" }, { "name": "Shell", "bytes": "17668" } ], "symlink_target": "" }
/* IBM_PROLOG_BEGIN_TAG */ /* This is an automatically generated prolog. */ /* */ /* $Source: src/import/generic/memory/lib/dimm/ddr5/ddr5_mr12.H $ */ /* */ /* OpenPOWER HostBoot Project */ /* */ /* Contributors Listed Below - COPYRIGHT 2022 */ /* [+] International Business Machines Corp. */ /* */ /* */ /* Licensed under the Apache License, Version 2.0 (the "License"); */ /* you may not use this file except in compliance with the License. */ /* You may obtain a copy of the License at */ /* */ /* http://www.apache.org/licenses/LICENSE-2.0 */ /* */ /* Unless required by applicable law or agreed to in writing, software */ /* distributed under the License is distributed on an "AS IS" BASIS, */ /* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or */ /* implied. See the License for the specific language governing */ /* permissions and limitations under the License. */ /* */ /* IBM_PROLOG_END_TAG */ // EKB-Mirror-To: hostboot /// /// @file ddr5_mr12.H /// @brief Run and manage the DDR5 MR12 loading /// // *HWP HWP Owner: Stephen Glancy <sglancy@us.ibm.com> // *HWP HWP Backup: Louis Stermole <stermole@us.ibm.com> // *HWP Team: Memory // *HWP Level: 3 // *HWP Consumed by: FSP:HB #ifndef _GENERIC_DDR5_MR12_H_ #define _GENERIC_DDR5_MR12_H_ #include <fapi2.H> #include <mss_generic_attribute_getters.H> #include <mss_generic_attribute_setters.H> #include <generic/memory/lib/utils/shared/mss_generic_consts.H> #include <generic/memory/lib/utils/c_str.H> #include <generic/memory/lib/utils/mss_rank.H> #include <generic/memory/lib/utils/mss_generic_check.H> #include <generic/memory/lib/utils/num.H> namespace mss { namespace ddr5 { /// /// @brief Data structure for DDR5 MR12 /// @tparam MC the memory controller type /// template<mss::mc_type MC> class mr12_data { public: // Needed as we need to know what MR for the CCS instruction created by the lab tooling static constexpr uint64_t iv_mr = 12; /// /// @brief mr12_data ctor /// @param[in] a fapi2::TARGET_TYPE_DIMM target /// @param[in,out] fapi2::ReturnCode FAPI2_RC_SUCCESS iff ok /// mr12_data( const fapi2::Target<fapi2::TARGET_TYPE_DIMM>& i_target, fapi2::ReturnCode& io_rc ) { if (io_rc != fapi2::FAPI2_RC_SUCCESS) { return; } FAPI_TRY( mss::attr::get_ddr5_dram_vrefcs(i_target, iv_vrefcs_value), "Error in ddr5::mr12_data()" ); io_rc = fapi2::FAPI2_RC_SUCCESS; return; fapi_try_exit: io_rc = fapi2::current_err; FAPI_ERR(TARGTIDFORMAT " unable to get attributes for ddr5::mr12", TARGTID); return; } /// /// @brief Default constructor /// @note Default constructor is defined to allow for the use of STL data structures /// mr12_data() = default; /// /// @brief Checks to ensure the DRAM is in bounds /// @param[in] a fapi2::TARGET_TYPE_DIMM target /// @param[in] i_dram the value to check /// @return FAPI2_RC_SUCCESS if and only if ok /// fapi2::ReturnCode check_dram(const fapi2::Target<fapi2::TARGET_TYPE_DIMM>& i_target, const uint8_t i_dram) const { constexpr uint64_t MAX = 19; FAPI_ASSERT( (i_dram <= MAX), fapi2::MSS_DDR5_MR_BAD_DRAM() .set_DIMM_IN_ERROR(i_target) .set_MAX(MAX) .set_MR(12) .set_DRAM(i_dram), TARGTIDFORMAT " DDR5 MR12. Bad dram input: %u <= %u", TARGTID, i_dram, MAX); return fapi2::FAPI2_RC_SUCCESS; fapi_try_exit: return fapi2::current_err; } /// /// @brief Checks to ensure the VREF for a specific rank and DRAM is in bounds /// @param[in] a fapi2::TARGET_TYPE_DIMM target /// @param[in] i_rank the rank /// @param[in] i_dram the dram /// @param[in] i_dram the vref value to be checked /// @return FAPI2_RC_SUCCESS if and only if ok /// fapi2::ReturnCode check_vref_value(const fapi2::Target<fapi2::TARGET_TYPE_DIMM>& i_target, const uint64_t i_rank, const uint8_t i_dram, const uint8_t i_vref) const { constexpr uint64_t MAX = 0b01111101; FAPI_ASSERT( (i_vref <= MAX), fapi2::MSS_DDR5_MR12_BAD_VREFCS() .set_DIMM_IN_ERROR(i_target) .set_MAX(MAX) .set_RANK(i_rank) .set_DRAM(i_dram), TARGTIDFORMAT " DDR5 MR12. rank%u, dram:%u. Bad VREFCS input: %u", TARGTID, i_rank, i_dram, i_vref ); return fapi2::FAPI2_RC_SUCCESS; fapi_try_exit: return fapi2::current_err; } /// /// @brief Assembles the MR based upon the passed in rank info and DRAM number /// @param[in] i_rank_info the rank information class /// @param[out] o_mr_data the mode register OP data in [7:0] format /// @param[in] i_dram_number the DRAM number - not used for MR12 /// @return FAPI2_RC_SUCCESS if and only if ok /// @note DRAM number is not used for MR12 but is kept to keep the interface common across all MR /// fapi2::ReturnCode assemble_data(const mss::rank::info<MC>& i_rank_info, uint8_t& o_mr_data, const uint8_t i_dram_number = 0) const { FAPI_TRY(check_dram(i_rank_info.get_dimm_target(), i_dram_number)); // Note: not checking the rank input as it is already checked by the rank info class FAPI_TRY(check_vref_value(i_rank_info.get_dimm_target(), i_rank_info.get_dimm_rank(), i_dram_number, iv_vrefcs_value[i_rank_info.get_dimm_rank()][i_dram_number])); // Check DRAM checks the boundaries for the array bounds below // Linux on power compiles do not acknowledge that the assert is doing this check // As such, using pragmas to avoid the warning causing the error #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Warray-bounds" o_mr_data = iv_vrefcs_value[i_rank_info.get_dimm_rank()][i_dram_number]; #pragma GCC diagnostic pop fapi_try_exit: return fapi2::current_err; } /// /// @brief Displays the information that makes up the MR /// @param[in] i_rank_info the rank on which to operate /// @param[in] i_dram_number the DRAM on which to operate - not used for MR12 but included to keep the API common /// @return FAPI2_RC_SUCCESS if and only if ok /// fapi2::ReturnCode display(const mss::rank::info<MC>& i_rank_info, const uint8_t i_dram_number = 0) const { FAPI_TRY(check_dram(i_rank_info.get_dimm_target(), i_dram_number)); // Note: not checking the rank input as it is already checked by the rank info class this->display(i_rank_info.get_dimm_target(), i_rank_info.get_dimm_rank(), i_dram_number); return fapi2::FAPI2_RC_SUCCESS; fapi_try_exit: return fapi2::current_err; } /// /// @brief Reads in the MR information into this data class /// @param[in] i_rank_info the rank on which to operate - unused for MR12 but keeping to match existing API /// @param[in] i_mr_data the mode register OP data in [7:0] format /// @param[in] i_dram_number the DRAM on which to operate - not used for MR12 but included to keep the API common /// @return FAPI2_RC_SUCCESS if and only if ok /// @note This can be used for decoding mode register reads /// fapi2::ReturnCode read_from_data(const mss::rank::info<MC>& i_rank_info, const uint8_t i_mr_data, const uint8_t i_dram_number = 0) { FAPI_TRY(check_dram(i_rank_info.get_dimm_target(), i_dram_number)); // Note: not checking the rank input as it is already checked by the rank info class FAPI_TRY(check_vref_value(i_rank_info.get_dimm_target(), i_rank_info.get_dimm_rank(), i_dram_number, i_mr_data)); // Check DRAM checks the boundaries for the array bounds below // Linux on power compiles do not acknowledge that the assert is doing this check // As such, using pragmas to avoid the warning causing the error #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Warray-bounds" iv_vrefcs_value[i_rank_info.get_dimm_rank()][i_dram_number] = i_mr_data; #pragma GCC diagnostic pop fapi_try_exit: return fapi2::current_err; } /// /// @brief Sets the attribute for this MR /// @param[in] a fapi2::TARGET_TYPE_DIMM target /// @return FAPI2_RC_SUCCESS if and only if ok /// fapi2::ReturnCode set_attribute(const fapi2::Target<fapi2::TARGET_TYPE_DIMM>& i_target) { return mss::attr::set_ddr5_dram_vrefcs(i_target, iv_vrefcs_value); } // All 8 bits are used, no need for bit enumerators uint8_t iv_vrefcs_value[mr::ATTR_RANKS][mr::ATTR_DRAM] = {}; private: /// /// @brief Displays the information that makes up the MR /// @param[in] a fapi2::TARGET_TYPE_DIMM target /// @param[in] i_rank the input rank to print /// @param[in] i_dram_number the input DRAM number /// void display(const fapi2::Target<fapi2::TARGET_TYPE_DIMM>& i_target, const uint64_t i_rank, const uint8_t i_dram_number) const { // Check DRAM checks the boundaries for the array bounds below // Linux on power compiles do not acknowledge that the assert is doing this check // As such, using pragmas to avoid the warning causing the error #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Warray-bounds" FAPI_INF(TARGTIDFORMAT " DDR5 MR10. rank%u, DRAM%u VREFCS: 0x%02", TARGTID, i_rank, i_dram_number, iv_vrefcs_value[i_rank][i_dram_number]); #pragma GCC diagnostic pop } }; } // ns ddr5 } // ns mss #endif
{ "content_hash": "80bbe3d05cf7b6a9dc073612f4a81ead", "timestamp": "", "source": "github", "line_count": 261, "max_line_length": 126, "avg_line_length": 44.559386973180075, "alnum_prop": 0.5307824591573517, "repo_name": "open-power/hostboot", "id": "ab916aaa7ea03c1349549dfa48cc7912f5c04199", "size": "11630", "binary": false, "copies": "1", "ref": "refs/heads/master-p10", "path": "src/import/generic/memory/lib/dimm/ddr5/ddr5_mr12.H", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "84276" }, { "name": "C", "bytes": "29945981" }, { "name": "C++", "bytes": "126613976" }, { "name": "CMake", "bytes": "1852" }, { "name": "Lex", "bytes": "8996" }, { "name": "M4", "bytes": "5738" }, { "name": "Makefile", "bytes": "772285" }, { "name": "Meson", "bytes": "23911" }, { "name": "Perl", "bytes": "2605582" }, { "name": "Python", "bytes": "2602753" }, { "name": "Shell", "bytes": "290164" }, { "name": "Tcl", "bytes": "76031" }, { "name": "XSLT", "bytes": "9553" }, { "name": "Yacc", "bytes": "29440" } ], "symlink_target": "" }
package scala.lms package epfl package test10 import common._ import internal._ import test1._ import test7.{Print,PrintExp,ScalaGenPrint} import test7.{ArrayLoops,ArrayLoopsExp,ScalaGenArrayLoops} import test8._ import util.OverloadHack import java.io.{PrintWriter,StringWriter,FileOutputStream} import scala.reflect.SourceContext class TestTransform extends FileDiffSuite { val prefix = home + "test-out/epfl/test10-" trait DSL extends VectorOps with LiftPrimitives with PrimitiveOps with OrderingOps with BooleanOps with LiftVariables with IfThenElse with While with RangeOps with Print { def test(x: Rep[Int]): Rep[Unit] } trait Impl extends DSL with VectorExp with PrimitiveOpsExp with OrderingOpsExpOpt with BooleanOpsExp with EqualExpOpt with ArrayMutationExp with IfThenElseFatExp with LoopsFatExp with WhileExpOptSpeculative with StringOpsExp with SeqOpsExp with RangeOpsExp with PrintExp with FatExpressions { self => override val verbosity = 1 val runner = new Runner { val p: self.type = self } runner.run() } trait Codegen extends ScalaGenVector with ScalaGenArrayMutation with ScalaGenPrimitiveOps with ScalaGenOrderingOps with ScalaGenVariables with ScalaGenEqual with ScalaGenIfThenElse with ScalaGenWhileOptSpeculative with ScalaGenRangeOps with ScalaGenPrint { val IR: Impl } trait Runner { val p: Impl def run() = { import p.{intTyp,unitTyp} val x = p.fresh[Int] val y = p.reifyEffects(p.test(x)) val codegen = new Codegen { val IR: p.type = p } val graph = p.globalDefs println("-- full graph") graph foreach println println("-- before transformation") codegen.withStream(new PrintWriter(System.out)) { codegen.emitBlock(y) } val trans = new MyTransformer { val IR: p.type = p } try { val z = trans.transformBlock(y) println("-- after transformation") codegen.withStream(new PrintWriter(System.out)) { codegen.emitBlock(z) } } catch { case ex => println("error: " + ex) } println("-- done") } } trait MyTransformer extends ForwardTransformer { val IR: Impl import IR.{__newVar => _, _} // a + b --> b + a, but only in then-branches of an if-then-else var isInThenBranch = false override def transformStm(stm: Stm): Exp[Any] = stm match { case TP(s,VectorPlus(a,b)) if isInThenBranch => println("replacing " + stm) vplus(apply(b),apply(a)) case TP(s,Reflect(IfThenElse(c,a,b), u, es)) => println("encountering if then else " + stm) __ifThenElse(apply(c), { val saveFlag = isInThenBranch isInThenBranch = true val r = reflectBlock(a) isInThenBranch = saveFlag r }, { reflectBlock(b) })(mtype(s.tp),mpos(s.pos)) case _ => super.transformStm(stm) } } // test simple block transform def testTransform1 = withOutFileChecked(prefix+"transform1") { trait Prog extends DSL with Impl { def test(x: Rep[Int]) = { val z = vzeros(100) val y = vzeros(100) val a = vplus(z,y) val b = vplus(z,a) print(b) } } new Prog with Impl } def testTransform2 = withOutFileChecked(prefix+"transform2") { trait Prog extends DSL with Impl { def test(x: Rep[Int]) = { val a = vzeros(100) // will be moved into branches val b = vzeros(50) val c = vplus(a,b) if (x == 0) { // dynamic condition print(vlength(c)) } else { print(vlength(c)) } } } new Prog with Impl } def testTransform3 = withOutFileChecked(prefix+"transform3") { trait Prog extends DSL with Impl { def test(x: Rep[Int]) = { val a = vzeros(100) // will be moved into branches val b = vzeros(50) val c = vplus(a,b) if (x == 0) { // dynamic condition print(vlength(c)) } else { print(vlength(c)) } if (x == 1) { // dynamic condition print(vlength(c)) } else { print(vlength(c)) } } } new Prog with Impl } }
{ "content_hash": "e00e7e11625ae21fc86a929cfbfd9b4f", "timestamp": "", "source": "github", "line_count": 162, "max_line_length": 120, "avg_line_length": 26.808641975308642, "alnum_prop": 0.6076444853787705, "repo_name": "cedricbastin/virtualization-lms-core", "id": "bcfb85b2c065f4e3c6247e5d1c9c94a164a8e36c", "size": "4343", "binary": false, "copies": "5", "ref": "refs/heads/develop-1.0.x", "path": "test-src/epfl/test10-transform/TestTransform.scala", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Scala", "bytes": "809916" } ], "symlink_target": "" }
export { FormioAuthConfig } from './auth.config'; export { FormioAuthService } from './auth.service'; export { FormioAuthComponent } from './auth.component'; export { FormioAuthLoginComponent } from './login/login.component'; export { FormioAuthRegisterComponent } from './register/register.component'; export { FormioResetPassComponent } from './resetpass/resetpass.component'; export { FormioAuthRoutes } from './auth.routes'; export { FormioAuth } from './auth.module';
{ "content_hash": "a40c44bc3d81daecd3c8166cfd988e32", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 76, "avg_line_length": 59.25, "alnum_prop": 0.7552742616033755, "repo_name": "formio/angular-formio", "id": "ddc0d1ab85190a198baf0daf99d2d15c1d2c947c", "size": "474", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "projects/angular-formio/auth/src/index.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1379" }, { "name": "Dockerfile", "bytes": "531" }, { "name": "HTML", "bytes": "14330" }, { "name": "JavaScript", "bytes": "140094" }, { "name": "TypeScript", "bytes": "105126" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.example.userlogin" android:versionCode="1" android:versionName="1.0" > <uses-sdk android:minSdkVersion="8" android:targetSdkVersion="18" /> <uses-permission android:name="android.permission.INTERNET" /> <application android:allowBackup="true" android:icon="@drawable/ic_launcher" android:label="@string/app_name" android:theme="@style/AppTheme" > <activity android:name="com.example.userlogin.MainActivity" android:label="@string/app_name" > <intent-filter> <action android:name="android.intent.action.MAIN" /> <category android:name="android.intent.category.LAUNCHER" /> </intent-filter> </activity> <activity android:name="com.example.userlogin.ListActivity" android:label="@string/title_activity_second" > </activity> <activity android:name="com.example.userlogin.RegisterActivity" android:label="@string/title_activity_login" > </activity> </application> </manifest>
{ "content_hash": "44ac2ffafab43417befbbd45e062afaa", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 76, "avg_line_length": 34.888888888888886, "alnum_prop": 0.6090764331210191, "repo_name": "Javacppc/userlogin", "id": "83e3615229eef061466cd3911b8594c8fb8aeb43", "size": "1256", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bin/AndroidManifest.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "21074" } ], "symlink_target": "" }
package io.corbel.oauth.api; import io.corbel.lib.token.reader.TokenReader; import io.corbel.lib.ws.api.error.ErrorResponseFactory; import io.corbel.lib.ws.model.Error; import io.corbel.oauth.model.Client; import io.corbel.oauth.model.Role; import io.corbel.oauth.model.User; import io.corbel.oauth.repository.CreateUserException; import io.corbel.oauth.service.ClientService; import io.corbel.oauth.service.UserService; import io.dropwizard.auth.Auth; import javax.validation.Valid; import javax.ws.rs.*; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import java.net.URI; import java.net.URISyntaxException; import java.util.Optional; /** * @author Francisco Sanchez */ @Path(ApiVersion.CURRENT + "/user") public class UserResource { private static final String ME = "me"; private final UserService userService; private final ClientService clientService; public UserResource(UserService userService, ClientService clientService) { this.userService = userService; this.clientService = clientService; } @POST @Consumes(MediaType.APPLICATION_JSON) public Response create(@Context UriInfo uriInfo, @Auth Client client, @Valid User user) { try { user.setRole(Role.USER); user.setId(null); String id = userService.createUser(user, client); return Response.created(uriInfo.getAbsolutePathBuilder().path(id).build()).build(); } catch (CreateUserException.DuplicatedUser duplicatedUser) { return ErrorResponseFactory.getInstance().conflict(new Error("entity_exists", "User already exists")); } } @Path("/{id}") @GET @Produces(MediaType.APPLICATION_JSON) public Response get(@PathParam("id") String id, @Auth TokenReader token) { User user = getUserFromIdAliases(id, token); return Response.ok().type(MediaType.APPLICATION_JSON_TYPE).entity(user.getUser()).build(); } @Path("/{id}/profile") @GET @Produces(MediaType.APPLICATION_JSON) public Response getUserProfile(@PathParam("id") String id, @Auth TokenReader token) { User user = getUserFromIdAliases(id, token); return Response.ok().type(MediaType.APPLICATION_JSON_TYPE).entity(user.getUserProfile()).build(); } @Path("/{id}/avatar") @GET public Response getAvatar(@PathParam("id") String id, @Auth TokenReader token) { User user = getUserFromIdAliases(id, token); return Optional.ofNullable(user.getAvatarUri()).map(avatarUriAsString -> { try { return new URI(avatarUriAsString); } catch (URISyntaxException e) { return null; } }).map(avatarUri -> Response.temporaryRedirect(avatarUri).build()) .orElseGet(() -> ErrorResponseFactory.getInstance() .notfound(new Error("not_found", "User " + id + " has no avatar."))); } @Path("/{id}") @PUT @Consumes(MediaType.APPLICATION_JSON) public Response update(@PathParam("id") String id, @Auth TokenReader token, User userUpdatedData) { if (userUpdatedData == null) { return ErrorResponseFactory.getInstance().badRequest(new Error("bad_request", "Invalid update data")); } try { // We don't allow updating email validation status via this endpoint Client client = clientService.findByName(token.getInfo().getClientId()) .orElseThrow(() -> new WebApplicationException(ErrorResponseFactory.getInstance().unauthorized())); userUpdatedData.setEmailValidated(null); User user = getUserFromIdAliases(id, token); checkUpdateUserRolePermissions(token.getInfo().getUserId(), userUpdatedData.getRole()); userService.updateUser(user, userUpdatedData, client); return Response.noContent().build(); } catch (CreateUserException.DuplicatedUser duplicatedUser) { return ErrorResponseFactory.getInstance().conflict(new Error("entity_exists", "User already exists")); } } @Path("/{id}/emailConfirmation") @PUT @Consumes(MediaType.APPLICATION_JSON) public Response confirmEmail(@PathParam("id") String id, @Auth TokenReader token) { // Email address is expected in token state. String state = token.getInfo().getState(); if (state == null) { return ErrorResponseFactory.getInstance().badRequest(); } userService.confirmEmail(state); return Response.noContent().build(); } @Path("/{id}") @DELETE public Response delete(@PathParam("id") String id, @Auth TokenReader token) { User user = getUserFromIdAliases(id, token); userService.deleteUser(user.getId()); return Response.noContent().build(); } @Path("/{id}/validate") @GET public Response generateValidationEmail(@PathParam("id") String id, @Auth TokenReader token) { User user = getUserFromIdAliases(id, token); return clientService.findByName(token.getInfo().getClientId()).map(client -> { userService.sendValidationEmail(user, client); return Response.ok().build(); }).orElse(ErrorResponseFactory.getInstance().notFound()); } @Path("/resetPassword") @GET public Response generateResetPasswordEmail(@Auth Client client, @QueryParam("email") String email) { userService.sendMailResetPassword(email, client); return Response.noContent().build(); } private User getUserFromIdAliases(String id, TokenReader token) { String userId = token.getInfo().getUserId(); User authenticatedUser = userService.getUser(userId); if (ME.equals(id) || id.equals(userId)) { return authenticatedUser; } User user = userService.getUser(id); if (user != null && authenticatedUser.getRole().canUpdate(user.getRole()) && user.getDomain().equals(authenticatedUser.getDomain())) { return user; } throw new WebApplicationException(ErrorResponseFactory.getInstance().notFound()); } private void checkUpdateUserRolePermissions(String userId, Role role) { if (role != null) { User authenticatedUser = userService.getUser(userId); if (!authenticatedUser.getRole().canChangeRoleTo(role)) { throw new WebApplicationException(ErrorResponseFactory.getInstance().forbidden()); } } } }
{ "content_hash": "74db5c1f5ce02b79e3ad41543a643a4b", "timestamp": "", "source": "github", "line_count": 165, "max_line_length": 119, "avg_line_length": 40.14545454545455, "alnum_prop": 0.6622886473429952, "repo_name": "corbel-platform/corbel", "id": "bc0e3f7e41f5e6d67755c51ec593ba9f61814e74", "size": "6624", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "oauth-server/src/main/java/io/corbel/oauth/api/UserResource.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1019" }, { "name": "Groovy", "bytes": "30271" }, { "name": "Java", "bytes": "1666221" }, { "name": "Shell", "bytes": "1213" } ], "symlink_target": "" }
{-# LANGUAGE Haskell98, BangPatterns #-} {-# LINE 1 "Data/ByteString/Lazy/Search/KarpRabin.hs" #-} {-# LANGUAGE BangPatterns #-} -- | -- Module : Data.ByteString.Lazy.Search.KarpRabin -- Copyright : (c) 2010 Daniel Fischer -- Licence : BSD3 -- Maintainer : Daniel Fischer <daniel.is.fischer@googlemail.com> -- Stability : Provisional -- Portability : non-portable (BangPatterns) -- -- Simultaneous search for multiple patterns in a lazy 'L.ByteString' -- using the Karp-Rabin algorithm. -- -- A description of the algorithm for a single pattern can be found at -- <http://www-igm.univ-mlv.fr/~lecroq/string/node5.html#SECTION0050>. module Data.ByteString.Lazy.Search.KarpRabin ( -- * Overview -- $overview -- ** Caution -- $caution -- * Function indicesOfAny ) where import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L import Data.ByteString.Unsafe (unsafeIndex) import qualified Data.IntMap as IM import Data.Array import Data.Array.Base (unsafeAt) import Data.Word (Word8) import Data.Int (Int64) import Data.Bits import Data.List (foldl') -- $overview -- -- The Karp-Rabin algorithm works by calculating a hash of the pattern and -- comparing that hash with the hash of a slice of the target string with -- the same length as the pattern. If the hashes are equal, the slice of the -- target is compared to the pattern character by character (since the hash -- function generally isn't injective). -- -- For a single pattern, this tends to be more efficient than the na&#239;ve -- algorithm, but it cannot compete with algorithms like -- Knuth-Morris-Pratt or Boyer-Moore. -- -- However, the algorithm can be generalised to search for multiple patterns -- simultaneously. If the shortest pattern has length @k@, hash the prefix of -- length @k@ of all patterns and compare the hash of the target's slices of -- length @k@ to them. If there's a match, check whether the slice is part -- of an occurrence of the corresponding pattern. -- -- With a hash-function that -- -- * allows to compute the hash of one slice in constant time from the hash -- of the previous slice, the new and the dropped character, and -- -- * produces few spurious matches, -- -- searching for occurrences of any of @n@ patterns has a best-case complexity -- of /O/(@targetLength@ * @lookup n@). The worst-case complexity is -- /O/(@targetLength@ * @lookup n@ * @sum patternLengths@), the average is -- not much worse than the best case. -- -- The functions in this module store the hashes of the patterns in an -- 'IM.IntMap', so the lookup is /O/(@log n@). Re-hashing is done in constant -- time and spurious matches of the hashes /should be/ sufficiently rare. -- The maximal length of the prefixes to be hashed is 32. -- $caution -- -- Unfortunately, the constant factors are high, so these functions are slow. -- Unless the number of patterns to search for is high (larger than 50 at -- least), repeated search for single patterns using Boyer-Moore or DFA and -- manual merging of the indices is faster. /Much/ faster for less than 40 -- or so patterns. -- -- 'indicesOfAny' has the advantage over multiple single-pattern searches that -- it doesn't hold on to large parts of the string (which is likely to happen -- for multiple searches), however, so in contrast to the strict version, it -- may be useful for relatively few patterns already. -- -- Nevertheless, this module seems more of an interesting curiosity than -- anything else. -- | @'indicesOfAny'@ finds all occurrences of any of several non-empty strict -- patterns in a lazy target string. If no non-empty patterns are given, -- the result is an empty list. Otherwise the result list contains -- the pairs of all indices where any of the (non-empty) patterns start -- and the list of all patterns starting at that index, the patterns being -- represented by their (zero-based) position in the pattern list. -- Empty patterns are filtered out before processing begins. {-# INLINE indicesOfAny #-} indicesOfAny :: [S.ByteString] -- ^ List of non-empty patterns -> L.ByteString -- ^ String to search -> [(Int64,[Int])] -- ^ List of matches indicesOfAny pats | null nepats = const [] | otherwise = lazyMatcher nepats . L.toChunks where nepats = filter (not . S.null) pats ------------------------------------------------------------------------------ -- Workers -- ------------------------------------------------------------------------------ {-# INLINE rehash1 #-} rehash1 :: Int -> Int -> Word8 -> Word8 -> Int rehash1 out h o n = (h `shiftL` 1 - (fromIntegral o `shiftL` out)) + fromIntegral n {-# INLINE rehash2 #-} rehash2 :: Int -> Int -> Word8 -> Word8 -> Int rehash2 out h o n = (h `shiftL` 2 - (fromIntegral o `shiftL` out)) + fromIntegral n {-# INLINE rehash3 #-} rehash3 :: Int -> Int -> Word8 -> Word8 -> Int rehash3 out h o n = (h `shiftL` 3 - (fromIntegral o `shiftL` out)) + fromIntegral n {-# INLINE rehash4 #-} rehash4 :: Int -> Int -> Word8 -> Word8 -> Int rehash4 out h o n = (h `shiftL` 4 - (fromIntegral o `shiftL` out)) + fromIntegral n lazyMatcher :: [S.ByteString] -> [S.ByteString] -> [(Int64,[Int])] lazyMatcher pats = search 0 hLen S.empty where !hLen = minimum (32 : map S.length pats) !shDi = case 32 `quot` hLen of q | q < 4 -> q | otherwise -> 4 !outS = shDi*hLen !patNum = length pats !patArr = listArray (0, patNum - 1) pats {-# INLINE rehash #-} rehash :: Int -> Word8 -> Word8 -> Int rehash = case shDi of 1 -> rehash1 hLen 2 -> rehash2 outS 3 -> rehash3 outS _ -> rehash4 outS hash :: S.ByteString -> Int hash = S.foldl' (\h w -> (h `shiftL` shDi) + fromIntegral w) 0 . S.take hLen !hashMap = foldl' (\mp (h,i) -> IM.insertWith (flip (++)) h [i] mp) IM.empty $ zip (map hash pats) [0 :: Int .. ] search _ _ _ [] = [] search !h !rm !prev (!str : rest) | strLen < rm = let !h' = S.foldl' (\o w -> (o `shiftL` 1) + fromIntegral w) h str !prev' = S.append prev str in search h' (rm - strLen) prev' rest | otherwise = let !h' = S.foldl' (\o w -> (o `shiftL` 1) + fromIntegral w) h (S.take rm str) in if S.null prev then noPast 0 rest str h' else past 0 rest prev 0 str rm h' where !strLen = S.length str noPast !prior rest !str hsh = go hsh 0 where !strLen = S.length str !maxIdx = strLen - hLen {-# INLINE strAt #-} strAt !i = unsafeIndex str i go !h sI = case IM.lookup h hashMap of Nothing -> if sI == maxIdx then case rest of [] -> [] (nxt : more) -> let !h' = rehash h (strAt sI) (unsafeIndex nxt 0) !prior' = prior + fromIntegral strLen !prev = S.drop (sI + 1) str in if hLen == 1 then noPast prior' more nxt h' else past prior' more prev 0 nxt 1 h' else go (rehash h (strAt sI) (strAt (sI + hLen))) (sI + 1) Just ps -> let !rst = S.drop sI str !rLen = strLen - sI {-# INLINE hd #-} hd = strAt sI {-# INLINE more #-} more = if sI == maxIdx then case rest of [] -> [] (nxt : fut) -> let !h' = rehash h hd (unsafeIndex nxt 0) !prior' = prior + fromIntegral strLen in if hLen == 1 then noPast prior' fut nxt h' else past prior' fut rst 1 nxt 1 h' else go (rehash h hd (strAt (sI + hLen))) (sI + 1) okay bs | rLen < S.length bs = S.isPrefixOf rst bs && checkFut (S.drop rLen bs) rest | otherwise = S.isPrefixOf bs rst in case filter (okay . (patArr `unsafeAt`)) ps of [] -> more qs -> seq (length qs) $ (prior + fromIntegral sI,qs) : more past !prior rest !prev !pI !str !sI !hsh | strLen < 4040 = let !prior' = prior - 1 + fromIntegral (sI - hLen) !curr = S.append (S.drop pI prev) str in noPast prior' rest curr hsh | otherwise = go hsh pI sI where !strLen = S.length str {-# INLINE strAt #-} strAt !i = unsafeIndex str i {-# INLINE prevAt #-} prevAt !i = unsafeIndex prev i go !h !p !s | s == hLen = noPast prior rest str h | otherwise = case IM.lookup h hashMap of Nothing -> let {-# INLINE h' #-} h' = rehash h (prevAt p) (strAt s) in go h' (p + 1) (s + 1) Just ps -> let !prst = S.drop p prev {-# INLINE more #-} more = go (rehash h (prevAt p) (strAt s)) (p + 1) (s + 1) okay bs = checkFut bs (prst : str : rest) in case filter (okay . (unsafeAt patArr)) ps of [] -> more qs -> seq (length qs) $ (prior + fromIntegral (s - hLen), qs) : more {-# INLINE checkFut #-} checkFut :: S.ByteString -> [S.ByteString] -> Bool checkFut _ [] = False checkFut !bs (!h : t) | hLen < S.length bs = S.isPrefixOf h bs && checkFut (S.drop hLen bs) t | otherwise = S.isPrefixOf bs h where !hLen = S.length h
{ "content_hash": "390e898f1a1bcc32d97450af5959d22a", "timestamp": "", "source": "github", "line_count": 253, "max_line_length": 80, "avg_line_length": 41.50197628458498, "alnum_prop": 0.5303809523809524, "repo_name": "phischu/fragnix", "id": "790e6327f8e41f5db89489319a1d77394883c524", "size": "10500", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/packages/application/Data.ByteString.Lazy.Search.KarpRabin.hs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "264094" }, { "name": "EQ", "bytes": "192" }, { "name": "HTML", "bytes": "784" }, { "name": "Haskell", "bytes": "15415812" }, { "name": "Io", "bytes": "10015" }, { "name": "Lex", "bytes": "1519" }, { "name": "Objective-C", "bytes": "14787" }, { "name": "Shell", "bytes": "351" } ], "symlink_target": "" }
<html xmlns:saxon="http://icl.com/saxon"> <head> <link rel="stylesheet" type="text/css" href="doc.css"/> <link rel="stylesheet" type="text/css" href=""/> <meta author="The MathWorks Ltd."/> <meta copyright="2020 The MathWorks Ltd."/> <title>Layout basics</title> </head> <body> <table class="header" width="100%" border="0" cellspacing="0" cellpadding="0"> <tr> <td bgcolor="#e4f0f8"><A href="User_guide.html"><font face="Arial" bgcolor="#e4f0f8" size="+0" underline="0" color="#000000"><b>User_guide</b></font></A></td> <td width="36" bgcolor="#e4f0f8"><A HREF="User_guide1.html"><IMG SRC="Images/leftarrow.png" BORDER="0" ALT="previous page"/></A><A HREF="User_guide1_2.html"><IMG SRC="Images/rightarrow.png" BORDER="0" ALT="next page"/></A></td> </tr> </table> <br clear="all"/> <h2>1.1: Layout basics&nbsp;<a href="User_guide1.html"><img src="Images/uparrow.png" border="0" align="top" alt="Go back up one level"/></a></h2> <p>To see how layouts work, let's use the most basic layout, a horizontal list (or box). We first create a window:</p> <example> <pre style="background-color: #eeeeff; margin-left: 20px; margin-right: 20px"><font color="#000011">f = <a href="matlab:doc figure"><code class="FUNCTION">figure</code></a>();</font></pre> </example> <p>Now let's create the horizontal layout and add it to the figure. Note that in common with other MATLAB graphics objects, one object is added to another by setting the <code>Parent</code> property - this will automatically adjust the list of <code>Children</code> in the parent object. The job of a horizontal box layout is to arrange its contents in a horizontal line, setting the position of each element to best fill the space:</p> <example> <pre style="background-color: #eeeeff; margin-left: 20px; margin-right: 20px"><font color="#000011">layout = <a href="uix.HBox.html"><code class="FUNCTION">uix.HBox</code></a>( 'Parent', f );</font></pre> </example> <p>Nothing's changed! That's because the layout is for arranging other user-interface components - it doesn't draw anything itself. Let's add some buttons. Note how after creating each button the existing contents of the box make room for the new addition; we don't need to set the position of any user-interface component!</p> <example> <pre style="background-color: #eeeeff; margin-left: 20px; margin-right: 20px"><font color="#000011"><a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 1', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 2', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 3', 'Parent', layout );</font></pre> <p style="background-color: #ddddee; margin-left: 20px; margin-right: 20px"><font color="#000022"><center><img src="Images/basics_example2.png"/>.<img src="Images/basics_example3.png"/>.<img src="Images/basics_example4.png"/></center></font></p> </example> <p>Other layouts work in exactly the same way, although visually the end-result is quite different:</p> <example> <pre style="background-color: #eeeeff; margin-left: 20px; margin-right: 20px"><font color="#000011">f = <a href="matlab:doc figure"><code class="FUNCTION">figure</code></a>(); layout = <a href="uix.VBox.html"><code class="FUNCTION">uix.VBox</code></a>( 'Parent', f ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 1', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 2', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 3', 'Parent', layout );</font></pre> <p style="background-color: #ddddee; margin-left: 20px; margin-right: 20px"><font color="#000022"><center><img src="Images/basics_example_vbox.png"/></center></font></p> </example> <example> <pre style="background-color: #eeeeff; margin-left: 20px; margin-right: 20px"><font color="#000011">f = <a href="matlab:doc figure"><code class="FUNCTION">figure</code></a>(); layout = <a href="uix.TabPanel.html"><code class="FUNCTION">uix.TabPanel</code></a>( 'Parent', f ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 1', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 2', 'Parent', layout ); <a href="matlab:doc uicontrol"><code class="FUNCTION">uicontrol</code></a>( 'String', 'Button 3', 'Parent', layout );</font></pre> <p style="background-color: #ddddee; margin-left: 20px; margin-right: 20px"><font color="#000022"><center><img src="Images/basics_example_tab.png"/></center></font></p> </example> <br clear="ALL"/> <table class="footer" width="100%" border="0" cellspacing="0" cellpadding="0"> <tr> <td width="18" height="15" bgcolor="#e4f0f8" align="left"><a href="User_guide1.html"><img src="Images/leftarrow.png" border="0" alt="previous page"/></a></td> <td width="40%" height="15" bgcolor="#e4f0f8" align="left"><a href="User_guide1.html"><font face="arial" bgcolor="#e4f0f8" size="normal" underline="0" color="#000000">Understanding layouts</font></a></td> <td width="20%" height="15" bgcolor="#e4f0f8" align="center"><a href="index.html"><font face="arial" bgcolor="#e4f0f8" size="normal" underline="0" color="#000000">[Top]</font></a></td> <td width="40%" height="15" bgcolor="#e4f0f8" align="right"><a href="User_guide1_2.html"><font face="arial" bgcolor="#e4f0f8" size="normal" underline="0" color="#000000">Types of layout</font></a></td> <td width="18" height="15" bgcolor="#e4f0f8" align="right"><a href="User_guide1_2.html"><img src="Images/rightarrow.png" border="0" alt="next page"/></a></td> </tr> </table> <font face="Arial" bgcolor="#e4f0f8" size="normal" underline="0" color="#000000">&copy; 2020 The MathWorks Ltd</font> <TT>&#149; </TT><a href="matlab: termsOfUse">Terms of Use</a> <TT>&#149; </TT><a href="matlab: helpview([matlabroot,'/patents.txt'])">Patents</a> <TT>&#149; </TT><a href="matlab: helpview([matlabroot,'/trademarks.txt'])">Trademarks</a> </body> </html>
{ "content_hash": "442dde780dfd3bdd0a4d4cc61baf3b3b", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 261, "avg_line_length": 79.19318181818181, "alnum_prop": 0.6091261300043048, "repo_name": "yzontov/pls-da", "id": "f12c0c16329a03d7432fce02644f15b6d6af9a1c", "size": "6969", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "utils/GUI Layout Toolbox/layoutdoc/User_guide1_1.html", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "CSS", "bytes": "1948" }, { "name": "HTML", "bytes": "446187" }, { "name": "MATLAB", "bytes": "755277" } ], "symlink_target": "" }
using System.Windows.Controls; using System.Windows.Input; using Cirrious.MvvmCross.WindowsPhone.Views; using BristolNightlife.Core.ViewModels; namespace BristolNightlife.Phone.Views { public partial class PivotView : MvxPhonePage { public PivotView() { InitializeComponent(); } private void EventsName_OnTap(object sender, GestureEventArgs e) { var listBox = ((ListBox)sender); var eventSummaryViewModel = (EventSummaryViewModel)listBox.SelectedItems[0]; eventSummaryViewModel.GoToEventCommand.Execute(sender); _ClearListItemAfterItHasBeenClicked(listBox); } private static void _ClearListItemAfterItHasBeenClicked(ListBox listBox) { listBox.SelectionMode = SelectionMode.Multiple; listBox.SelectedItems.Clear(); } } }
{ "content_hash": "a948846737eb658e2bea96c7f7b0b705", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 79, "avg_line_length": 26.6, "alnum_prop": 0.7481203007518797, "repo_name": "CBurbidge/BristolNightlife", "id": "d71e53cb14af3be4c410621d0e7b2cc7a1c4b37e", "size": "800", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "BristolNightlife.Phone/Views/PivotView.xaml.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "39878" } ], "symlink_target": "" }
membership-sites-on-wordpress ============================ An example of using Grunt for presentations. 1. `npm install -g grunt-cli` 1. clone the repo 1. open shell in the root of the repo 1. `npm install` 1. wait for it... 1. `grunt` 1. `cd public/` 1. `open index.html` 1. stand in awe (you don't actually *have* to stand, though)
{ "content_hash": "395f8d8d149946a81108fb810ca7d8b7", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 60, "avg_line_length": 24, "alnum_prop": 0.6458333333333334, "repo_name": "morganestes/opinionated-git-workflows", "id": "66c845d75d3aaa9727be16202a96bf3b1c4fa6c1", "size": "336", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6351" }, { "name": "HTML", "bytes": "798" }, { "name": "JavaScript", "bytes": "5056" } ], "symlink_target": "" }
There are typically two feature releases a year, around June and December, and one to three bug fix release between feature releases. Releasing ITK has many steps. This document is a central location for all of the tribal knowledge around it. Current Maintainers ------------------- The current ITK maintainers with a trusted GPG key are: * Matt McCormick (@thewtex) <matt.mccormick@kitware.com> * François Budin (@fbudin69500) <francois.budin@kitware.com> * Brad King (@bradking) <brad.king@kitware.com> * Jean-Christophe Fillion-Robin (@jcfr) <jchris.fillionr@kitware.com> Release Life Cycle ------------------ The first release candidate (RC) is the initial branch point, so it does not have special steps to create. However, as `master` moves fairly quickly, branches need to be corralled into the `release` branch afterwards. When releasing a new ITK version, the following steps are be taken: ### Bug fix release * **Before the release**: post a topic on the [ITK discussion] requesting additional bug fix patches that should be merged to the `release` branch. * **Create the release**. * Update content links * Bump ITK's version * Tag the ITK repository * Bump ITKPythonPackage's version * **Generate tarballs** * Generate the *InsightToolkit* and *InsightData* tarballs. * Tarballs are tested locally. * Generate Python packages and ITKPythonBuilds. * Tarballs are posted to [ITK GitHub Releases]. * Tarballs are archived on the [ITK data.kitware.com Releases]. * Tarballs are linked from the ITK [download page]. * Python packages are uploaded to PyPI * conda-forge libitk and itk Packages are updated * Announcement * GitHub Release * Discourse * Kitware Blog ### Feature release * **Before the RC announcement**: last period for adding classes and features. * New features and new methods can be added during this period. * **Feature freeze** * Increase code coverage * Address any **UNTESTED** files * Address files with code coverage lower than 80% * **Address run-time memory issues** * Purify reports * Valgrind reports * **RC process**: two to three release candidates are generally used before releasing a new version. A one-week notice on the [ITK discussion] should be sufficient as an initial deadline for RCs. * No new features should merged during the feature freeze, i.e. `ENH:` commits, although they can be prepared in Gerrit. * Release candidates (RC's) will be tagged weekly. * RC's will be tagged after [Dashboard] examination and discussion at the Friday ITK Confab. * The repository will be hard frozen to only allow merging by gatekeepers on Wednesday evening before the [Dashboard] builds start. The freeze will be released after tagging. * For the final RC, only gatekeeper merges will occur. * **Create the release**. * **Updating documentation, guides and websites**. * **Generate tarballs** * Tarballs are posted to [ITK GitHub Releases]. * Tarballs are linked from the ITK [download page]. * Announcement Initial steps ------------- Check the [ITK issue tracking] for critical bugs, or [GitHub] for critical fixes. ### Announcements Announcements should be sent to the community first and Linux distribution maintainers. #### ITK Discussion Announce to the [community] that a release is being planned. Template: Hi, Does anyone have work in progress that should delay the branch point for $version? If so, please add the $version_rc1 milestone on any relevant merge requests and @mention $maintainers so they can be more easily tracked. We are hoping to get $version started by $date. Thanks, The ITK Maintenance Team #### Distributions Patches can accumulate over time in distributions as time goes on. An email asking if anything needs to go into the next release should be sent to maintainers of the packages. This ITK [blog post] describes the Linux distributions that package ITK. Integrate bug fixes in the release branch ----------------------------------------- Update `master` and `release` branches: ```sh git fetch upstream git checkout master git reset --hard upstream/master git checkout release git reset --hard upstream/release ``` List differences between last release and current release branch: ```sh git shortlog --no-merges $release_version..release ``` Merge bug fix commits in release. The topic branch should be named `<bug-name>-for-release`: * If topic branch was created from the `release` branch, `checkout` topic in new branch. * If topic branch was created on `master`, `cherry-pick` commit (see command line on [GitHub]) on a topic branch created off `release`. The commit will be visible twice in the history once release in merged into `master`. * Merge new branch on `release`: ```sh $ git merge <bug-name>-for-release --no-ff ``` Merge `release-4.13` into `release` (so that `release` keeps track of release history): Similarly for the `release-4.13` branch: ```sh git checkout release-4.13 git pull upstream release-4.13 git checkout release git merge release-4.13 git push origin release release-4.13 ``` Similarly for the `master` and `release` branches: ```sh git checkout master git pull git merge release git push origin master release ``` For patches that need to be merged to the `release-3.20` branch, they are first merged to `release-3.20`, then `release-3.20` is merged to the release branch with `git merge -s ours` to avoid a huge number of merge conflicts. Then, `release` is merged into `master`. Pre-tag activities ------------------ The following must be ensured before tagging the ITK repository: * Check the [Dashboard]. * Make sure to **update the versions** in `ITK/CMake/itkVersion.cmake`. * For bugfix releases, this is done before the tag. For feature releases, this is done after the final tag. * Make sure **all new remote modules are built in by the Doxygen build**. * **Update** the `SphinxExamples` remote modules. ### Increment the version number If the version number in `ITK/CMake/itkVersion.cmake` is not already set accordingly, submit a pull request to update ITK's version number in the `master` branch to what the new release is called. Any point beyond that in the `master` branch could serve as the start of the new release branch. After creating the release branch, submit another merge request to update the master branch's minor version number. Update Zenodo Citation Configuration ------------------------------------ Install the python packages: ```sh python -m pip install gitpython python-Levenshtein fuzzywuzzy ``` Run the update script: ```sh ./Utilities/Maintenance/UpdateZenodo.py ``` Commit the result: ```sh git add -- .zenodo.json git commit -m "DOC: Update .zenodo" ``` Archive ExternalData -------------------- Set the environmental or CMake variable `ExternalData_OBJECT_STORES` to a local directory. e.g. ```sh export ExternalData_OBJECT_STORES=${HOME}/data ``` Pre-populate the store with the contents of the 'InsightData' tarballs from a previous release. Once the tarball extracted, move the content of its subfolder called `.ExternalData` in your local `ExternalData_OBJECT_STORES` directory. Then, from the ITK build directory, configure ITK enabling the flags: * `ITK_WRAP_PYTHON` * `ITK_LEGACY_SILENT` * `BUILD_TESTING` * `BUILD_EXAMPLES` If you have previously enabled remote modules using the same ITK source directory, either verify that they are enabled in your current build, or remove their source directory that has been added inside ITK source directory (i.e. `./Modules/Remote/$name_of_remote_module`). Build the `ITKData` target ```sh make ITKData ``` This will download new testing data since the previous release. Next, run the script from within the ITK source directory: ```sh ./Utilities/Maintenance/ContentLinkSynchronization.sh ${ExternalData_OBJECT_STORES} ``` Do not use `--cleanup` as for the purpose of the GitHub resource, it is important to keep the older files: some are from older revisions of ITK, and people continue to use the older versions of ITK and request the testing data. This is will verify all contents, fully populate the `MD5/` and `SHA512/` directories in the object store, and create any missing `.md5` or `.sha512` content links. If any new content link files are created, commit the result. Next, archive the data on data.kitware.com. Create a folder, e.g. `$MAJOR_VERSION.$MINOR_VERSION`, in `ITK/ITKTestingData`, and run ```sh python -m pip install girder-client python ./Utilities/Maintenance/ArchiveTestingDataOnGirder.py --object-store ${ExternalData_OBJECT_STORES} --parent-id <the-girder-id-of-the-folder-created> --api-key <your-girder-user-api-key> ``` This script requires the girder-client Python package install from Girder master, November 2016 or later, (Girder > 2.0.0). Archive the `InsightData` contents on ITK's file server at Kitware: ```sh rsync -vrt ${ExternalData_OBJECT_STORES}/MD5/ kitware@public:ITKExternalData/MD5/ ``` Update the data archive at https://github.com/InsightSoftwareConsortium/ITKTestingData. Tag the ITK repository ---------------------- Tagging the repository should only be done with the agreement of the maintainers after an ITK Confab. ### Update the repository Use the commmand: ```sh git checkout master git pull ``` to make sure that the source tree is updated. This must correspond to a source tree that has been fully tested in the [Dashboard]. * When tagging a **bugfix release** on the `release` branch, make sure to bump the `ITK_VERSION_PATCH` variable in the `CMake/itkVersion.cmake` file before tagging. * When tagging a **feature release**, make sure to bump the `ITK_VERSION_MINOR` version on the `master` branch after tagging. ### Tag with a branch point reference In the source tree that was just updated, use the command ```sh git tag -m "ITK $version" -s v$version $commit_hash_to_be_tagged ``` where, of course, `$version` must be changed for the for the correct release number and $commit_hash_to_be_tagged` to the correct commit hash. Push it to the repository ```sh git push upstream v$version ``` Note that only trusted GPG key holders may do this step. ### Update the release branch Update the `release` branch only during feature releases after the tag for the release. Perform a `fast-forward` merge of `master` into release: ```sh git checkout release git reset --hard upstream/release git merge --ff-only v$version git push upstream release git checkout master ``` This will not create a new commit, only move the release branch to the tag, i.e. it will be fast forwarded. For minor releases, merge the release branch into `master` branch as for a normal commit, and resolve conflicts (arising from mismatch in version number) by keeping `master` branch versions. ### Remote modules Add any new remote modules to nightly builds. Some builds may be difficult to add due to third-party dependencies. Update Remote Modules --------------------- In order to have the latest versions for all remote modules, and have them use the latest ITK tag, the following steps should be performed: 1. Update the ITK tag used in the `azure-pipelines.yml` CI configuration and the `setup.py` Python setup files, and update the remote module Python package version to a new major version using the [UpdateRequiredITKVersionInRemoteModules.sh](https://github.com/InsightSoftwareConsortium/ITK/tree/master/Utilities/Maintenance/UpdateRequiredITKVersionInRemoteModules.sh) script. This will involve merging a new pull request to each remote module repository. 2. Upload the new remote module Python wheel to [PyPI]. 3. Update the remote modules to their latest commits using the [UpdateRemoteModules.sh](https://github.com/InsightSoftwareConsortium/ITK/tree/master/Utilities/Maintenance/UpdateRemoteModules.sh) script. Create Tarballs --------------- Tarballs need to be created and uploaded for each release. The source tarballs should be generated in both `.tar.gz` format and `.zip` format. The `.zip` files are for Windows users and the non-data files contain Windows newline endings. The `InsightData` tarballs are generated along with the source code tarballs. Once the repository has been tagged, we use the following script in the repository to create the tarballs: ### Unix Run: ```sh ./Utilities/Maintenance/SourceTarball.bash --tgz ``` This will generate tarballs for the source and testing data. ### Windows From a Git Bash shell, run: ```sh ./Utilities/Maintenance/SourceTarball.bash --zip ``` This should be done on Windows so that the sources have Windows-style newline endings. **Note**: tarballs can be created from a specific commit. The user can manually specify the version of ITK used to name the output files: ```sh ./Utilities/Maintenance/SourceTarball.bash -v $version $commit_hash_to_be_tagged ``` where, of course, `$version` must be changed for the for the correct release number and `$commit_hash_to_be_tagged` to the correct commit hash. Alternatively, ```sh ./Utilities/Maintenance/SourceTarball.bash -v $version v$version ``` can be used to specify the version starting with `v`. Once all tarballs have been collected for upload to GitHub, create *MD5SUMS* and *SHA512SUMS* checksum files. These checksums are used by clients downloading the source tarballs to verify their contents, e.g. with `sha512sum -c SHA512SUMS`. ```sh md5sum ./Insight* > MD5SUMS sha512sum ./Insight* > SHA512SUMS ``` Generate Python Packages ------------------------ The [ITKPythonPackage](https://itkpythonpackage.readthedocs.io/en/latest/) website describes how to [build ITK Python wheels](https://itkpythonpackage.readthedocs.io/en/latest/Build_ITK_Python_packages.html). Python packages are currently generated nightly by the systems, `metroplex`, `misty`, and `overload` at Kitware and uploaded to the [ITKPythonPackage GitHub Release page](https://github.com/InsightSoftwareConsortium/ITKPythonPackage/releases/tag/latest). Additionally, external module wheels can also be generated. Please, visit the [ITK module Python packages](https://itkpythonpackage.readthedocs.io/en/latest/Build_ITK_Module_Python_packages.html) documentation for further information. ### Generate release ITK Python wheels First, merge the [ITKPythonPackage](https://github.com/InsightSoftwareConsortium/ITKPythonPackage) `master` branch into the `release` branch. Next, update the `VERSION` variable in *ITKPythonPackage/itkVersion.py* and `ITK_GIT_TAG` in *ITKPythonPackage/CMakeLists.txt*. Commit the update locally to the release branch and push to GitHub `upstream`. Then [build the wheels](https://itkpythonpackage.readthedocs.io/en/latest/Build_ITK_Python_packages.html) from the `release` branch locally. Build the sdist and wheels for Linux: ```sh ssh blaster cd ~/Packaging/ITKPythonPackage git reset --hard HEAD git checkout release git pull origin release git clean -fdx /home/kitware/Support/skbuild-venv/bin/python setup.py sdist --formats=gztar,zip ./scripts/dockcross-manylinux-build-wheels.sh tar cvzf /tmp/dist-linux.tar.gz ./dist rm dist/* cd .. ./ITKPythonPackage/scripts/dockcross-manylinux-build-tarball.sh ``` Build the wheels for macOS: ```sh ssh misty cd ~/Dashboards/ITK/ITKPythonPackage git reset --hard HEAD git checkout release git pull git clean -fdx ./scripts/macpython-build-wheels.sh tar cvzf /tmp/dist-macos.tar.gz ./dist rm dist/* cd .. ./ITKPythonPackage/scripts/macpython-build-tarball.sh ``` Build the wheels for Windows: ```sh vncviewer overload # Open Git Bash shell cd /c/P/IPP git reset --hard HEAD git checkout release git pull git clean -fdx # Open a x64 Native Tools Command Prompt for VS 2019 cd C:\P\IPP set PATH=C:\P\doxygen;%PATH% C:\Python36-x64\python.exe ./scripts/windows_build_wheels.py # Back in Git Bash... tar cvzf /c/P/dist-windows.tar.gz ./dist rm dist/* cd .. rm -f ./ITKPythonBuilds-windows.zip powershell "IPP/scripts/windows-build-tarball.ps1" ``` Next, tag the release branch `HEAD` and push to GitHub: ```sh git tag -m "ITKPythonPackage $version" -s v$version HEAD git push upstream release v$version ``` ### Upload the wheels to PyPI Next, [upload the wheels to the Python Package Index (PyPI)](https://itkpythonpackage.readthedocs.io/en/latest/Build_ITK_Module_Python_packages.html#upload-the-packages-to-pypi). ### Verify the binaries Run `pip install itk` in a fresh virtualenv and run all the [ITKExamples](https://github.com/InsightSoftwareConsortium/ITKExamples) Python tests against this Python. For example, ```sh virtualenv itk-venv ./itk-venv/bin/python -m pip install itk git clone https://github.com/InsightSoftwareConsortium/ITKExamples mkdir ITKExamples-build cd ITKExamples-build cmake -DITK_DIR=/path/to/ITK-build -DPython3_ROOT_DIR=../itk-venv/bin/python -DPython3_FIND_VIRTUALENV=ONLY ../ITKExamples ctest -R Python ``` ### Upload the ITKPythonBuilds Create a new GitHub Release from the new git tag in the [ITKPythonPackage repository](https://github.com/InsightSoftwareConsortium/ITKPythonPackage/releases), and upload the wheels there. Also, create a corresponding GitHub Release in the [ITKPythonBuilds](https://github.com/InsightSoftwareConsortium/ITKPythonBuilds) repository. Upload builds tarballs created from the build trees with scripts found in *ITKPythonPackage/scripts/*, i.e. *ITKPythonPackage/scripts/macpython-build-tarball.sh*, etc.. Update the *ITKPythonPackage/scripts/*download-cache-and-build-module-wheels** scripts to use the new version of *ITKPythonBuilds*. ### Verify external module GitHub CI builds Re-run [TravisCI](https://travis-ci.org/InsightSoftwareConsortium/ITKModuleTemplate), [AppveyorCI](https://ci.appveyor.com/project/itkrobot/itkmoduletemplate), and [CircleCI](https://circleci.com/gh/InsightSoftwareConsortium/ITKModuleTemplate) in the [ITKModuleTemplate](https://github.com/InsightSoftwareConsortium/ITKModuleTemplate) repository to ensure the new *ITKPythonBuilds* and external module package build scripts are functioning properly. ### Update the conda-forge package Create a PR to update [conda-forge/itk-feedstock](https://github.com/conda-forge/itk-feedstock) to the new version. This conda recipe downloads the wheel binary packages and re-packages them as conda packages. Note: A `post1` wheel corresponds to bumping the *build.number* field in *recipe/meta.yml*. `POST` variables in *bld.bat* and *build.sh* are available to specify an optional wheel post version. Generate Doxygen Documentation ------------------------------ Note: links to the nightly generated Doxygen can be found in the footer of the Doxygen HTML pages. Use the files to upload and create: * `InsightDoxygenDocTag-MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION.gz` * `InsightDoxygenXml-MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION.tar.gz` * `InsightDoxygenDocHtml-MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION.tar.gz.` Prior to the release, new `Remote` modules should be enabled in the Doxygen build's configuration. Run CMake in the binary build and enable `BUILD_DOXYGEN`, configure and generate, then: ```sh cd Binaries/ITK make Documentation cd Utilities mv old_doxygen_directory DoxygenInsightToolkit-$MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION tar -cf DoxygenInsightToolkit-$MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION.tar DoxygenInsightToolkit-$MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION gzip -9 DoxygenInsightToolkit-$MAJOR_VERSION.$MINOR_VERSION.$PATCH_VERSION.tar ``` Historical note: Before ITK 3.8, the documentation used to be generated in a directory called `Documentation/Doxygen`. In `Public`, copy the documentation to `/projects/Insight/Doxygen`, and create a subdirectory `Insight34-doxygen/Documentation/Doxygen`. The final directory will look like `/projects/Insight/Doxygen/Insight34-doxygen/Documentation/Doxygen`, and at that level copy the `html` directory and the `InsightToolkit.tag` file. Finally, create symbolic link at `/projects/Insight/WWW/InsightDocuments/Web`. Update the ITK Software Guide ----------------------------- The [ITK Sofware Guide] is available in both electronic and printed formats. Every time a new ITK version is released, the following steps must be taken to update the guide and make it available: * Add the necessary ITK contents (i.e. new modules, etc.). * Update the compiler and necessary tool (e.g. CMake minimum version) information if necessary. * Bump the ITK version in `Superbuild/External_ITK.cmake`. * Set the `DRAFT_WATERMARK` CMake varable to `OFF` to remove the draft watermark. * Set the `PDF_QUALITY_LEVEL` CMake configuration option to `Screen` for the electronic version and `Printer` for the print version. * Generate the cover page for the concatenated, single PDF by exporting a PDF from *SoftwareGuide/Cover/ITKSoftwareGuideSinglePDFCoverPage.odt*. To create `ItkSoftwareGuide.pdf` to deposit at itk.org/ItkSoftwareGuide.pdf from `InsightSoftwareGuide-Book{1,2}-5.X.0.pdf`, use `pdftk`: ``` pdftk ITKSoftwareGuideSinglePDFCoverPage.pdf ITKSoftwareGuide-Book1.pdf ITKSoftwareGuide-Book2.pdf cat output /tmp/ItkSoftwareGuide.pdf ``` Update *ItkSoftwareGuide.pdf* hosted at itk.org. Many links point at this resource. ```sh scp /tmp/ItkSoftwareGuide.pdf public.kitware.com:/tmp/ cd /projects/Insight/WWW/InsightWeb rm ItkSoftwareGuide.pdf mv /tmp/ItkSoftwareGuide.pdf ./ItkSoftwareGuide.pdf ``` ### Prepare the print version Set the `PDF_QUALITY_LEVEL` to `Printer`, and rebuild. Remove the junk initial page, and then also one of the blank pages so pages fall on *left-side*/*right-side* as formatted. Then, run: ```sh pdftk ITKSoftwareGuide-Book1.pdf cat 2-3 5-end output /tmp/ITKSoftwareGuide-Book1.pdf pdftk ITKSoftwareGuide-Book2.pdf cat 2-3 5-end output /tmp/ITKSoftwareGuide-Book2.pdf ``` Update ITK Sphinx Examples -------------------------- In order to update the [ITK Sphinx examples], bump the `Superbuild` ITK version in `Superbuild/External-ITK.cmake`. Update the CMake minimum version in the example files if necessary. Rendered versions (epub, pdf, html) can be downloaded from the download page and rename them. Set the prefix and tag: ```sh tag = $(git describe) prefix = InsightSphinxExamples-$version ``` where `$version` is the appropriate release number, e.g., `4.12.0`. Generate the `.tar.gz` and `.zip` tarballs: ```sh git archive --format=tar --prefix=${prefix}/ --output=${prefix}.tar ${tag} gzip -9 ${prefix}.tar git archive --format=zip -9 --prefix=${prefix}/ --output=${prefix}.zip ${tag} ``` Upload the release artifacts to GitHub -------------------------------------- [GitHub Releases](https://help.github.com/articles/creating-releases/) are how we distribute project release artifacts from ITK 5 and onward. Prior to ITK 5, ITK releases were [hosted on Sourceforge.net](https://sourceforge.net/projects/itk/). Visit the [ITK GitHub Releases](https://github.com/InsightSoftwareConsortium/ITK/releases) page. There will be a new release that was generated by pushing the Git tag. Click the tag's link to start creating the GitHub Release. Then, click the *Edit Tag* link. Set the release title to "ITK $version", e.g. *ITK 5.0.0* or *ITK 5.0 Release Candidate 1*. Add the release notes (described below). Note: Do not publish the release until the release notes and all artifacts have been added. The Zenodo citation is created at the time of the release publication and will not include amended information. Upload the release artifacts. These include: - InsightToolkit-$version.tar.gz - InsightToolkit-$version.zip - InsightData-$version.tar.gz - InsightData-$version.zip - MD5SUMS - SHA512SUMS - InsightDoxygenDocHtml-$version.tar.gz - InsightDoxygenDocTag-$version.gz - InsightDoxygenDocXml-$version.tar.gz - InsightSoftwareGuide-Book1-$version.pdf - InsightSoftwareGuide-Book2-$version.pdf - InsightSphinxExamples-5.1.0.tar.gz - InsightSphinxExamples-5.1.0.zip - InsightSphinxExamplesEpub-5.1.0.epub - InsightSphinxExamplesHtml-5.1.0.zip - InsightSphinxExamplesPdf-5.1.0.pdf If this is an alpha, beta, or release candidate release, check the *This is a pre-release* box. Click *Update release*. Upload the release artifacts to data.kitware.com ------------------------------------------------ Backup and archive the release artifacts in the [data.kitware.com ITK Collection Releases folder](https://data.kitware.com/#collection/57b5c9e58d777f126827f5a1/folder/5b1ec0378d777f2e622561e9). This should include 1. GitHub Release artifacts 2. Python packages 3. Python builds Update the testing data cache used for CI testing ------------------------------------------------- In the *ITK/Testing/ContinuousIntegration* Azure Pipelines continuous integration testing configuration script. Update the `ExternalDataVersion` to point to data archive for the most recently created release. Commit and create a PR. Update the Website ------------------ The website is managed by Kitware folks. Access is currently granted to the ITK maintainer group. * Add or modify the `Current Release` entries on the [download page](https://itk.org/ITK/resources/software.html). * Update the [documentation page](https://itk.org/ITK/help/documentation.html) with a link to the Doxygen files for the new release. * Verify that the links work ! Contact Communications at <comm@kitware.com> in order to update the above pages and to produce a press release. Update Issue Tracker -------------------- In the [ITK GitHub Milestones](https://github.com/InsightSoftwareConsortium/ITK/milestones), create a new milestone for the next release. Migrate issues to the new milestone, and close the current release's milestone. Delete the `kwrobot` time stamp commits. Further Testing --------------- The purpose of this testing is to replicate the experience that a user may have when trying the new release. This means that a number of ITK developers should download the tarballs or do Git checkouts with the release tag, and build the toolkit in as many configurations as possible. Release Notes Posts ------------------- To get started with the release notes, first use the download link cookiecutter to generate Markdown and webpage Download page HTML: ``` pip install cookiecutter cookiecutter ~/src/ITK/Utilities/Maintenance/DownloadLinksCookieCutter/ ``` Start with the previous GitHub Release markdown content to produce the release notes. To generate the changelog by running ```sh cd ITK ./Utilities/Maintenance/AuthorsChangesSince.py $old_version ``` The log is generated at */tmp/AuthorsChangesSince/Changelog.md*. The count of recent authors is found in the script output, and a list of new authors are found at */tmp/AuthorsChangesSince/NewAuthors.txt*. Announcements ------------- For the final release, the release notes produced should be used to * Provide the release notes in the [ITK GitHub Releases] * Post a message in the [ITK discussion] * Create a post in the [Kitware blog] * Add a release note doc in [ITK/Documentation/ReleaseNotes](https://github.com/InsightSoftwareConsortium/ITK/tree/master/Documentation/ReleaseNotes) * Create a post in ITK project on [ResearchGate] * Update [ITK's Wikipedia page](https://en.wikipedia.org/wiki/Insight_Segmentation_and_Registration_Toolkit). Finally, inform Communications at <comm@kitware.com>. Send Contributor Momentos ------------------------- This file: https://github.com/thewtex/vtkGEB/blob/itk/itkgeb.stl can be ordered from ShapeWays and sent to contributors. Deb Howell has generated excellent packaging. [Kitware blog]: https://blog.kitware.com/ [blog post]: https://blog.kitware.com/itk-packages-in-linux-distributions/ [Dashboard]: https://open.cdash.org/index.php?project=Insight [community]: https://discourse.itk.org/ [documentation page]: http://www.itk.org/ITK/help/documentation.html [download page]: https://itk.org/ITK/resources/software.html [GitHub]: http://github.com/InsightSoftwareConsortium/ITK [ITKPythonPackage]: https://itkpythonpackage.readthedocs.io/en/latest/index.html [ITK discussion]: https://discourse.itk.org/ [ITK issue tracking]: http://issues.itk.org/ [ITK Sofware Guide]: https://itk.org/ItkSoftwareGuide.pdf [ITK wiki]: https://itk.org/Wiki/ITK [ITK Sphinx examples]: https://itk.org/ITKExamples/ [releases page]: https://itk.org/Wiki/ITK/Releases [release schedule]: https://itk.org/Wiki/ITK/Release_Schedule [Software Guide]: http://itk.org/ItkSoftwareGuide.pdf [kitware]: https://www.kitware.com/ [public.kitware.com]: public.kitware.com [Doxygen]: http://www.stack.nl/~dimitri/doxygen/ [PyPi]: https://pypi.python.org/pypi [ResearchGate]: https://www.researchgate.net/project/Insight-Toolkit-ITK [SourceForge]: https://sourceforge.net/downloads/itk/itk/ [ITK GitHub Releases]: https://github.com/InsightSoftwareConsortium/ITK/releases [ITK data.kitware.com Releases]: https://data.kitware.com/#item/5b22a47f8d777f2e622564d8 [ITK GitHub Milestones]: https://github.com/InsightSoftwareConsortium/ITK/milestones
{ "content_hash": "0c1e30b0194eb825ccce763a5d750cbe", "timestamp": "", "source": "github", "line_count": 876, "max_line_length": 212, "avg_line_length": 33.5148401826484, "alnum_prop": 0.7480840628086788, "repo_name": "malaterre/ITK", "id": "2a15779ccc2355457a8fd1dbb2c86f555d1b64ea", "size": "29360", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Documentation/Maintenance/Release.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "435417" }, { "name": "C++", "bytes": "34591024" }, { "name": "CMake", "bytes": "1452219" }, { "name": "CSS", "bytes": "17428" }, { "name": "HTML", "bytes": "8263" }, { "name": "Java", "bytes": "28585" }, { "name": "JavaScript", "bytes": "1522" }, { "name": "Objective-C++", "bytes": "5773" }, { "name": "Perl", "bytes": "6029" }, { "name": "Python", "bytes": "448031" }, { "name": "Ruby", "bytes": "296" }, { "name": "Shell", "bytes": "162676" }, { "name": "Tcl", "bytes": "72988" }, { "name": "XSLT", "bytes": "8634" } ], "symlink_target": "" }
package protobuf // TODO mappings: // // Wrapper types various types 2, "2", "foo", true, "true", null, 0, … Wrappers use the same representation in JSON as the wrapped primitive type, except that null is allowed and preserved during data conversion and transfer. // FieldMask string "f.fooBar,h" See field_mask.proto. // Any {"@type":"url", See struct.proto. // f1: value, // ...} import ( "os" "path/filepath" "sort" "strings" "github.com/mpvl/unique" "cuelang.org/go/cue/ast" "cuelang.org/go/cue/build" "cuelang.org/go/cue/errors" "cuelang.org/go/cue/format" "cuelang.org/go/cue/parser" "cuelang.org/go/cue/token" "cuelang.org/go/internal" // Generated protobuf CUE may use builtins. Ensure that these can always be // found, even if the user does not use cue/load or another package that // triggers its loading. // // TODO: consider whether just linking in the necessary packages suffices. // It probably does, but this may reorder some of the imports, which may, // in turn, change the numbering, which can be confusing while debugging. _ "cuelang.org/go/pkg" ) // Config specifies the environment into which to parse a proto definition file. type Config struct { // Root specifies the root of the CUE project, which typically coincides // with, for example, a version control repository root or the Go module. // Any imports of proto files within the directory tree of this of this root // are considered to be "project files" and are generated at the // corresponding location with this hierarchy. Any other imports are // considered to be external. Files for such imports are rooted under the // $Root/pkg/, using the Go package path specified in the .proto file. Root string // Module is the Go package import path of the module root. It is the value // as after "module" in a cue.mod/modules.cue file, if a module file is // present. Module string // TODO: determine automatically if unspecified. // Paths defines the include directory in which to search for imports. Paths []string // PkgName specifies the package name for a generated CUE file. A value // will be derived from the Go package name if undefined. PkgName string // EnumMode defines whether enums should be set as integer values, instead // of strings. // // json value is a string, corresponding to the standard JSON mapping // of Protobuf. The value is associated with a #enumValue // to allow the json+pb interpretation to interpret integers // as well. // // int value is an integer associated with an #enumValue definition // The json+pb interpreter uses the definition names in the // disjunction of the enum to interpret strings. // EnumMode string } // An Extractor converts a collection of proto files, typically belonging to one // repo or module, to CUE. It thereby observes the CUE package layout. // // CUE observes the same package layout as Go and requires .proto files to have // the go_package directive. Generated CUE files are put in the same directory // as their corresponding .proto files if the .proto files are located in the // specified Root (or current working directory if none is specified). // All other imported files are assigned to the CUE pkg dir ($Root/pkg) // according to their Go package import path. // type Extractor struct { root string cwd string module string paths []string pkgName string enumMode string fileCache map[string]result imports map[string]*build.Instance errs errors.Error done bool } type result struct { p *protoConverter err error } // NewExtractor creates an Extractor. If the configuration contained any errors // it will be observable by the Err method fo the Extractor. It is safe, // however, to only check errors after building the output. func NewExtractor(c *Config) *Extractor { cwd, _ := os.Getwd() b := &Extractor{ root: c.Root, cwd: cwd, paths: c.Paths, pkgName: c.PkgName, module: c.Module, enumMode: c.EnumMode, fileCache: map[string]result{}, imports: map[string]*build.Instance{}, } if b.root == "" { b.root = b.cwd } return b } // Err returns the errors accumulated during testing. The returned error may be // of type cuelang.org/go/cue/errors.List. func (b *Extractor) Err() error { return b.errs } func (b *Extractor) addErr(err error) { b.errs = errors.Append(b.errs, errors.Promote(err, "unknown error")) } // AddFile adds a proto definition file to be converted into CUE by the builder. // Relatives paths are always taken relative to the Root with which the b is // configured. // // AddFile assumes that the proto file compiles with protoc and may not report // an error if it does not. Imports are resolved using the paths defined in // Config. // func (b *Extractor) AddFile(filename string, src interface{}) error { if b.done { err := errors.Newf(token.NoPos, "protobuf: cannot call AddFile: Instances was already called") b.errs = errors.Append(b.errs, err) return err } if b.root != b.cwd && !filepath.IsAbs(filename) { filename = filepath.Join(b.root, filename) } _, err := b.parse(filename, src) return err } // TODO: some way of (recursively) adding multiple proto files with filter. // Files returns a File for each proto file that was added or imported, // recursively. func (b *Extractor) Files() (files []*ast.File, err error) { defer func() { err = b.Err() }() b.done = true instances, err := b.Instances() if err != nil { return nil, err } for _, p := range instances { for _, f := range p.Files { files = append(files, f) } } return files, nil } // Instances creates a build.Instances for every package for which a proto file // was added to the builder. This includes transitive dependencies. It does not // write the generated files to disk. // // The returned instances can be passed to cue.Build to generated the // corresponding CUE instances. // // All import paths are located within the specified Root, where external // packages are located under $Root/pkg. Instances for builtin (like time) // packages may be omitted, and if not will have no associated files. func (b *Extractor) Instances() (instances []*build.Instance, err error) { defer func() { err = b.Err() }() b.done = true for _, r := range b.fileCache { if r.err != nil { b.addErr(r.err) continue } inst := b.getInst(r.p) if inst == nil { continue } // Set canonical CUE path for generated file. f := r.p.file base := filepath.Base(f.Filename) base = base[:len(base)-len(".proto")] + "_proto_gen.cue" f.Filename = filepath.Join(inst.Dir, base) buf, err := format.Node(f) if err != nil { b.addErr(err) // return nil, err continue } f, err = parser.ParseFile(f.Filename, buf, parser.ParseComments) if err != nil { b.addErr(err) continue } inst.Files = append(inst.Files, f) for pkg := range r.p.imported { inst.ImportPaths = append(inst.ImportPaths, pkg) } } for _, p := range b.imports { instances = append(instances, p) sort.Strings(p.ImportPaths) unique.Strings(&p.ImportPaths) for _, i := range p.ImportPaths { if imp := b.imports[i]; imp != nil { p.Imports = append(p.Imports, imp) } } sort.Slice(p.Files, func(i, j int) bool { return p.Files[i].Filename < p.Files[j].Filename }) } sort.Slice(instances, func(i, j int) bool { return instances[i].ImportPath < instances[j].ImportPath }) if err != nil { return instances, err } return instances, nil } func (b *Extractor) getInst(p *protoConverter) *build.Instance { if b.errs != nil { return nil } importPath := p.qualifiedImportPath() if importPath == "" { err := errors.Newf(token.NoPos, "no package clause for proto package %q in file %s", p.id, p.file.Filename) b.errs = errors.Append(b.errs, err) // TODO: find an alternative. Is proto package good enough? return nil } dir := b.root path := p.importPath() file := p.file.Filename if !filepath.IsAbs(file) { file = filepath.Join(b.root, p.file.Filename) } // Determine whether the generated file should be included in place, or // within cue.mod. inPlace := strings.HasPrefix(file, b.root) if !strings.HasPrefix(path, b.module) { // b.module is either "", in which case we assume the setting for // inPlace, or not, in which case the module in the protobuf must // correspond with that of the proto package. inPlace = false } if !inPlace { dir = filepath.Join(internal.GenPath(dir), path) } else { dir = filepath.Dir(p.file.Filename) } // TODO: verify module name from go_package option against that of actual // CUE module. Maybe keep this old code for some strict mode? // want := filepath.Dir(p.file.Filename) // dir = filepath.Join(dir, path[len(b.module)+1:]) // if !filepath.IsAbs(want) { // want = filepath.Join(b.root, want) // } // if dir != want { // err := errors.Newf(token.NoPos, // "file %s mapped to inconsistent path %s; module name %q may be inconsistent with root dir %s", // want, dir, b.module, b.root, // ) // b.errs = errors.Append(b.errs, err) // } inst := b.imports[importPath] if inst == nil { inst = &build.Instance{ Root: b.root, Dir: dir, ImportPath: importPath, PkgName: p.shortPkgName, DisplayPath: p.protoPkg, } b.imports[importPath] = inst } return inst } // Extract parses a single proto file and returns its contents translated to a CUE // file. If src is not nil, it will use this as the contents of the file. It may // be a string, []byte or io.Reader. Otherwise Extract will open the given file // name at the fully qualified path. // // Extract assumes the proto file compiles with protoc and may not report an error // if it does not. Imports are resolved using the paths defined in Config. // func Extract(filename string, src interface{}, c *Config) (f *ast.File, err error) { if c == nil { c = &Config{} } b := NewExtractor(c) p, err := b.parse(filename, src) if err != nil { return nil, err } p.file.Filename = filename[:len(filename)-len(".proto")] + "_gen.cue" return p.file, b.Err() } // TODO // func GenDefinition // func MarshalText(cue.Value) (string, error) { // return "", nil // } // func MarshalBytes(cue.Value) ([]byte, error) { // return nil, nil // } // func UnmarshalText(descriptor cue.Value, b string) (ast.Expr, error) { // return nil, nil // } // func UnmarshalBytes(descriptor cue.Value, b []byte) (ast.Expr, error) { // return nil, nil // }
{ "content_hash": "e26b4134d9f0fbbf2150e958c9bf3021", "timestamp": "", "source": "github", "line_count": 355, "max_line_length": 225, "avg_line_length": 29.909859154929578, "alnum_prop": 0.6822377095498211, "repo_name": "tektoncd/operator", "id": "48f8667664343369ce32cd942ee40b3b0cb9a730", "size": "14439", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "vendor/cuelang.org/go/encoding/protobuf/protobuf.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1972" }, { "name": "Go", "bytes": "1049401" }, { "name": "Makefile", "bytes": "7852" }, { "name": "Python", "bytes": "14477" }, { "name": "Shell", "bytes": "42938" }, { "name": "Smarty", "bytes": "4243" } ], "symlink_target": "" }
from django.views.generic.base import TemplateView class HomePageView(TemplateView): template_name = "home.html"
{ "content_hash": "2730c53abd4a94764912fee03e62403b", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 50, "avg_line_length": 29.5, "alnum_prop": 0.788135593220339, "repo_name": "tomp/food_pantry", "id": "b4f1ee8eebccd76afe08d366469127d8a4b2e7a9", "size": "118", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pantry/views.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "19728" }, { "name": "HTML", "bytes": "10272" }, { "name": "JavaScript", "bytes": "380502" }, { "name": "Python", "bytes": "30598" } ], "symlink_target": "" }
namespace OpenApoc { TileView::TileView(Framework &fw, TileMap &map, Vec3<int> isoTileSize, Vec2<int> stratTileSize, TileViewMode initialMode) : Stage(fw), map(map), isoTileSize(isoTileSize), stratTileSize(stratTileSize), viewMode(initialMode), maxZDraw(10), offsetX(0), offsetY(0), cameraScrollX(0), cameraScrollY(0), selectedTilePosition(0, 0, 0), selectedTileImageBack(fw.data->load_image("CITY/SELECTED-CITYTILE-BACK.PNG")), selectedTileImageFront(fw.data->load_image("CITY/SELECTED-CITYTILE-FRONT.PNG")), pal(fw.data->load_palette("xcom3/ufodata/PAL_01.DAT")) { } TileView::~TileView() {} void TileView::Begin() {} void TileView::Pause() {} void TileView::Resume() {} void TileView::Finish() {} void TileView::EventOccurred(Event *e) { bool selectionChanged = false; if (e->Type == EVENT_KEY_DOWN) { switch (e->Data.Keyboard.KeyCode) { case ALLEGRO_KEY_UP: // offsetY += tileSize.y; cameraScrollY = isoTileSize.y / 8; break; case ALLEGRO_KEY_DOWN: // offsetY -= tileSize.y; cameraScrollY = -isoTileSize.y / 8; break; case ALLEGRO_KEY_LEFT: // offsetX += tileSize.x; cameraScrollX = isoTileSize.x / 8; break; case ALLEGRO_KEY_RIGHT: // offsetX -= tileSize.x; cameraScrollX = -isoTileSize.x / 8; break; case ALLEGRO_KEY_PGDN: if (fw.gamecore->DebugModeEnabled && maxZDraw > 1) { maxZDraw--; } break; case ALLEGRO_KEY_PGUP: if (fw.gamecore->DebugModeEnabled && maxZDraw < map.size.z) { maxZDraw++; } break; case ALLEGRO_KEY_S: selectionChanged = true; if (selectedTilePosition.y < (map.size.y - 1)) selectedTilePosition.y++; break; case ALLEGRO_KEY_W: selectionChanged = true; if (selectedTilePosition.y > 0) selectedTilePosition.y--; break; case ALLEGRO_KEY_A: selectionChanged = true; if (selectedTilePosition.x > 0) selectedTilePosition.x--; break; case ALLEGRO_KEY_D: selectionChanged = true; if (selectedTilePosition.x < (map.size.x - 1)) selectedTilePosition.x++; break; case ALLEGRO_KEY_R: selectionChanged = true; if (selectedTilePosition.z < (map.size.z - 1)) selectedTilePosition.z++; break; case ALLEGRO_KEY_F: selectionChanged = true; if (selectedTilePosition.z > 0) selectedTilePosition.z--; break; case ALLEGRO_KEY_1: pal = fw.data->load_palette("xcom3/ufodata/PAL_01.DAT"); break; case ALLEGRO_KEY_2: pal = fw.data->load_palette("xcom3/ufodata/PAL_02.DAT"); break; case ALLEGRO_KEY_3: pal = fw.data->load_palette("xcom3/ufodata/PAL_03.DAT"); break; } } else if (e->Type == EVENT_MOUSE_DOWN) { // FIXME: Object selection } else if (e->Type == EVENT_KEY_UP) { switch (e->Data.Keyboard.KeyCode) { case ALLEGRO_KEY_UP: case ALLEGRO_KEY_DOWN: cameraScrollY = 0; break; case ALLEGRO_KEY_LEFT: case ALLEGRO_KEY_RIGHT: cameraScrollX = 0; break; } } if (fw.gamecore->DebugModeEnabled && selectionChanged) { LogInfo("Selected tile {%d,%d,%d}", selectedTilePosition.x, selectedTilePosition.y, selectedTilePosition.z); } } void TileView::Render() { int dpyWidth = fw.Display_GetWidth(); int dpyHeight = fw.Display_GetHeight(); Renderer &r = *fw.renderer; r.clear(); r.setPalette(this->pal); offsetX += cameraScrollX; offsetY += cameraScrollY; // offsetX/offsetY is the 'amount added to the tile coords' - so we want // the inverse to tell which tiles are at the screen bounds auto topLeft = screenToTileCoords(Vec2<int>{-offsetX - isoTileSize.x, -offsetY - isoTileSize.y}, 0); auto topRight = screenToTileCoords(Vec2<int>{-offsetX + dpyWidth, -offsetY - isoTileSize.y}, 0); auto bottomLeft = screenToTileCoords(Vec2<int>{-offsetX - isoTileSize.x, -offsetY + dpyHeight}, map.size.z); auto bottomRight = screenToTileCoords(Vec2<int>{-offsetX + dpyWidth, -offsetY + dpyHeight}, map.size.z); int minX = std::max(0, topLeft.x); int maxX = std::min(map.size.x, bottomRight.x); int minY = std::max(0, topRight.y); int maxY = std::min(map.size.y, bottomLeft.y); for (int z = 0; z < maxZDraw; z++) { for (int layer = 0; layer < map.getLayerCount(); layer++) { for (int y = minY; y < maxY; y++) { for (int x = minX; x < maxX; x++) { bool showOrigin = fw.state->showTileOrigin; bool showSelected = (fw.gamecore->DebugModeEnabled && z == selectedTilePosition.z && y == selectedTilePosition.y && x == selectedTilePosition.x); auto tile = map.getTile(x, y, z); auto screenPos = tileToScreenCoords(Vec3<float>{ static_cast<float>(x), static_cast<float>(y), static_cast<float>(z)}); screenPos.x += offsetX; screenPos.y += offsetY; if (showSelected) r.draw(selectedTileImageBack, screenPos); for (auto obj : tile->drawnObjects[layer]) { Vec2<float> pos = tileToScreenCoords(obj->getPosition()); pos.x += offsetX; pos.y += offsetY; obj->draw(r, *this, pos, this->viewMode); } if (showSelected) r.draw(selectedTileImageFront, screenPos); } } } } } bool TileView::IsTransition() { return false; } void TileView::setViewMode(TileViewMode newMode) { this->viewMode = newMode; } TileViewMode TileView::getViewMode() const { return this->viewMode; } }; // namespace OpenApoc
{ "content_hash": "ff651f55bfb43fc6dd2deba49f4f4e54", "timestamp": "", "source": "github", "line_count": 198, "max_line_length": 97, "avg_line_length": 27.474747474747474, "alnum_prop": 0.6461397058823529, "repo_name": "AndO3131/OpenApoc", "id": "6be61c01e720a65abf4f30130e5de9a24c2be7ab", "size": "5734", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "game/tileview/tileview.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "278" }, { "name": "C", "bytes": "9116" }, { "name": "C++", "bytes": "1076389" }, { "name": "CMake", "bytes": "17606" }, { "name": "QMake", "bytes": "4376" }, { "name": "Shell", "bytes": "89" } ], "symlink_target": "" }
package org.opencloudb.mpp.model; import java.util.ArrayList; import java.util.List; import org.opencloudb.net.mysql.RowDataPacket; import org.opencloudb.route.RouteResultsetNode; public class NodeRowDataPacket { private RouteResultsetNode node; private long trimTotal = 0; private int trimSize = 0; private List<RangRowDataPacket> trimRangRDPacketList = new ArrayList<RangRowDataPacket>(); private List<RangRowDataPacket> rangRDPacketList = new ArrayList<RangRowDataPacket>(); public NodeRowDataPacket(RouteResultsetNode node, int trimSize) { this.node = node; this.trimSize = trimSize; } public void newRang() { RangRowDataPacket rangPacket = new RangRowDataPacket(); rangRDPacketList.add(rangPacket); } public long loadTotal() { return this.loadTrimTotal() + this.loadNotTrimTotal(); } public long loadTrimTotal() { this.trimTotal = 0; for (RangRowDataPacket packet : trimRangRDPacketList) { if (packet.isTrim()) { this.trimTotal += packet.allSize(); } } return this.trimTotal; } public long loadNotTrimTotal() { long total = 0; for (RangRowDataPacket packet : rangRDPacketList) { total += packet.allSize(); } return total; } public void moveToTrim() { RangRowDataPacket head = this.loadHeadPacket(); if (head != null) { if (this.rangRDPacketList.remove(head)) { this.trimRangRDPacketList.add(head); if (head.allSize() == this.trimSize) { head.leftHeadTail(); } } } } public void moveHeadTail3ToTrim() { if (this.rangRDPacketList.size() >= 3) { int m = 0; while ((m = this.rangRDPacketList.size()) > 3) { RangRowDataPacket packet = this.rangRDPacketList.remove(0); //this.trimRangRDPacketList.add(packet); if (packet.allSize() == this.trimSize) { packet.leftHeadTail(); } addTrimWithCombine(packet); } } } private void addTrimWithCombine(RangRowDataPacket packet) { if (packet.allSize() == this.trimSize) { if (this.trimRangRDPacketList.isEmpty()) { this.trimRangRDPacketList.add(packet); } else { int last = this.trimRangRDPacketList.size() - 1; RangRowDataPacket lastPacket = this.trimRangRDPacketList.get(last); if (lastPacket.isTrim()) { lastPacket.combine(packet); } else { //异常 } } } } public void moveAllToTrim() { int m = 0; while ((m = this.rangRDPacketList.size()) > 0) { RangRowDataPacket packet = this.rangRDPacketList.remove(0); //this.trimRangRDPacketList.add(packet); if (packet.getRowDataPacketList().size() == this.trimSize) { packet.leftHeadTail(); } addTrimWithCombine(packet); } } public void addPacket(RowDataPacket packet) { RangRowDataPacket rangPacket = rangRDPacketList.get(rangRDPacketList.size() - 1); rangPacket.appendPacket(packet); } public RouteResultsetNode getNode() { return node; } public List<RowDataPacket> loadData() { List<RowDataPacket> result = new ArrayList<RowDataPacket>(); for (RangRowDataPacket packet : rangRDPacketList) { result.addAll(packet.getRowDataPacketList()); } for (RangRowDataPacket packet : trimRangRDPacketList) { if (!packet.isTrim()) { result.addAll(packet.getRowDataPacketList()); } } return result; } public RangRowDataPacket loadHeadPacket() { if (rangRDPacketList.size() > 0) { return rangRDPacketList.get(0); } return null; } public RangRowDataPacket loadTailPacket() { return this.loadTailPacket(1); } public RangRowDataPacket loadTailPacket(int tailIndex) { int size = rangRDPacketList.size() - tailIndex; if (size >= 0) { return rangRDPacketList.get(size); } return null; } }
{ "content_hash": "da818e972bf80593c945fcf02dad0212", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 91, "avg_line_length": 25.473333333333333, "alnum_prop": 0.6691965454069615, "repo_name": "youngor/openclouddb", "id": "767eed11092f1a1794eb976a3abdc8dcdfba38f2", "size": "3825", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MyCAT/src/main/java/org/opencloudb/mpp/model/NodeRowDataPacket.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "668381" }, { "name": "Java", "bytes": "11214966" }, { "name": "JavaScript", "bytes": "660124" }, { "name": "Shell", "bytes": "34282" } ], "symlink_target": "" }
<meta http-equiv="refresh" content="1; url=https://hamburg.serverlessdays.io/"> <div id="home" class="dtc w-100"> <div class="tc tl-ns"> <div class="black mw6 mw8-ns tc center ph3 ph0-ns"> <h3 class="f2 fw6 tc code" >One Day. One Track. One Community.</h3> <p class="mw7-ns center">JeffConf is an attempt to move past the word Serverless, and focus on the use of these platforms and the value they provide.</p> </div> <div class="cf center dt-ns ph3 ph0-ns"> <div class="dt-row-ns"> <article onclick="changePage('venue')" class="dtc-ns black-60 mv1 mv4-ns w-100 w-33-ns mw5 center pointer"> <img src="/imgs/altonaer-museum-small.jpg" class="db w-100 dim" alt="JeffConf Hamburg 2017 Venue" height="143" /> <div class="pa2 ph3-ns pb3-ns hamburg-red-bg flag-bg-container"> <h3 class="mb0 mt1"> <span class="fw6 f5 f4-ns mv0 white underline-hover" >Venue</span> </h3> <p class="f6 lh-copy measure mt2 white-90" > Altonaer Museum Hamburg is a mix of tradition and modernity in Hamburg, and an iconic location to spread cultural ideas. </p> </div> </article> <article onclick="changePage('tickets')" class="dtc-ns black-60 mv1 mv4-ns w-100 w-33-ns mw5 center pointer"> <img src="/imgs/tickets.jpg" class="db w-100 dim" alt="JeffConf Hamburg Tickets" height="143" /> <div class="pa2 ph3-ns pb3-ns it-white-bg flag-bg-container bb b--gray"> <h3 class="mb0 mt1"> <span class="fw6 f5 f4-ns mv0 underline-hover" >Tickets</span> </h3> <p class="f6 lh-copy measure mt2 mid-gray" > Tickets are reasonably priced from €35 to make sure JeffConf Hamburg is financially accessible for the community. </p> </div> </article> <article onclick="changePage('speakers')" class="dtc-ns black-60 mv1 mv4-ns w-100 w-33-ns mw5 center pointer"> <img src="/imgs/speakers.jpg" class="db w-100 dim" alt="JeffConf Speakers" height="143" /> <div class="pa2 ph3-ns pb3-ns hamburg-red-bg flag-bg-container"> <h3 class="mb0 mt1"> <span class="fw6 f5 f4-ns mv0 white underline-hover" >Speakers</span> </h3> <p class="f6 lh-copy measure mt2 white-90 no-underline" > Real users showcasing the problems they&#39;ve solved using serverless platforms. Focus is learning through sharing. </p> </div> </article> </div> </div> <!-- Begin MailChimp Signup Form --> <link href="//cdn-images.mailchimp.com/embedcode/horizontal-slim-10_7.css" rel="stylesheet" type="text/css"> <style type="text/css"> #mc_embed_signup{background:#fff; clear:left; font:14px Helvetica,Arial,sans-serif; width:100%;} </style> <div id="mc_embed_signup" class="pt4"> <form action="https://superluminar.us17.list-manage.com/subscribe/post?u=1850848feb35c80ecb5a4a6b8&amp;id=0fad9cde9b" method="post" id="mc-embedded-subscribe-form" name="mc-embedded-subscribe-form" class="validate" target="_blank" novalidate> <div id="mc_embed_signup_scroll"> <label for="mce-EMAIL">Stay tuned and subscribe to our newsletter!</label> <input type="email" value="" name="EMAIL" class="email" id="mce-EMAIL" placeholder="email address" required> <!-- real people should not fill this in and expect good things - do not remove this or risk form bot signups--> <div style="position: absolute; left: -5000px;" aria-hidden="true"><input type="text" name="b_1850848feb35c80ecb5a4a6b8_0fad9cde9b" tabindex="-1" value=""></div> <div class="clear"><input type="submit" value="Subscribe" name="subscribe" id="mc-embedded-subscribe" class="button"></div> </div> </form> </div> <!--End mc_embed_signup--> <div class="black mw6 mw8-ns tc center ph3 ph0-ns"> <p class="mw7-ns center">JeffConf was born in the spirit of Paul Johnston’s <a href="https://serverless.zone/serverless-is-just-a-name-we-could-have-called-it-jeff-1958dd4c63d7">blog post</a>, “Serverless is just a name. We could have called it Jeff”, an attempt to move beyond the Serverless buzzword and focus on the practical use of function as a service platforms and the value they provide. It is a one day, community focused, single track event centred on real world Jeff (or Serverless) based solutions. It’s about fostering a community and helping all of us learn from each other as we embrace a new way of building applications. </p> </div> </div> </div>
{ "content_hash": "b6523903b727fa041998223602d09f5d", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 643, "avg_line_length": 60.32051282051282, "alnum_prop": 0.6412327311370882, "repo_name": "superluminar-io/hamburg.jeffconf.com", "id": "e7e0e31083bcac028850cb63de330a1e40f9299d", "size": "4715", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/views/home.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "111544" }, { "name": "JavaScript", "bytes": "5540" } ], "symlink_target": "" }
Just my Awesome WM Configuration * OS: Arch Linux * WM: Awesome WM * Themes: Multicolor (little mod) * Lock screen: xautolock * System info: conky * Screenshot manager: escrotum * Dynamic tagging: Eminent * Event reminder: gcalcli + notify-send * Scripts: shutdown dialog ,screenshot, lockscreen, shutdown, reboot, suspend, logoff * Put scripts/bin into your $HOME or anywhere, and update path inside ```rc.lua``` ![alt tag](https://raw.githubusercontent.com/dr-slump/conf/master/desktop-manager/wallpaper/mydesktop.png) ### Credit * https://awesome.naquadah.org/wiki/Eminent * https://github.com/copycat-killer/lain * https://github.com/cedlemo/blingbling * https://github.com/copycat-killer/awesome-copycats
{ "content_hash": "bdcd4006bf27975a84bef04b248285c7", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 106, "avg_line_length": 27.535714285714285, "alnum_prop": 0.7029831387808041, "repo_name": "dr-slump/bajawa", "id": "535d910ab513f9b04c9fa65693e320d21d220135", "size": "805", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "conf/desktop-manager/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "4010" }, { "name": "CSS", "bytes": "6275" }, { "name": "HTML", "bytes": "225369" }, { "name": "Lua", "bytes": "544939" }, { "name": "Makefile", "bytes": "944" }, { "name": "Perl", "bytes": "37312" }, { "name": "Python", "bytes": "41406" }, { "name": "Shell", "bytes": "67914" }, { "name": "Vim script", "bytes": "3678" } ], "symlink_target": "" }
(function () { 'use strict'; angular .module('core.routes') .config(routeConfig); routeConfig.$inject = ['$stateProvider', '$urlRouterProvider']; function routeConfig($stateProvider, $urlRouterProvider) { $urlRouterProvider.rule(function ($injector, $location) { var path = $location.path(); var hasTrailingSlash = path.length > 1 && path[path.length - 1] === '/'; if (hasTrailingSlash) { // if last character is a slash, return the same url without the slash var newPath = path.substr(0, path.length - 1); $location.replace().path(newPath); } }); // Redirect to 404 when route not found $urlRouterProvider.otherwise(function ($injector, $location) { $injector.get('$state').transitionTo('not-found', null, { location: false }); }); $stateProvider .state('home', { url: '/', templateUrl: '/modules/core/client/views/home.client.view.html', controller: 'HomeController', controllerAs: 'vm' }) .state('not-found', { url: '/not-found', templateUrl: '/modules/core/client/views/404.client.view.html', controller: 'ErrorController', controllerAs: 'vm', params: { message: function($stateParams) { return $stateParams.message; } }, data: { ignoreState: true, pageTitle: 'Not Found' } }) .state('bad-request', { url: '/bad-request', templateUrl: '/modules/core/client/views/400.client.view.html', controller: 'ErrorController', controllerAs: 'vm', params: { message: function($stateParams) { return $stateParams.message; } }, data: { ignoreState: true, pageTitle: 'Mauvaise requête' } }) .state('forbidden', { url: '/forbidden', templateUrl: '/modules/core/client/views/403.client.view.html', data: { ignoreState: true, pageTitle: 'Interdit' } }); } }());
{ "content_hash": "a3a4dc971d5d8aa685c163d92fcd58ab", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 78, "avg_line_length": 28.386666666666667, "alnum_prop": 0.5504931892907469, "repo_name": "vive-belmondo/ProjetAFPA", "id": "ff3660a146655e8682a60ed4806a2339b90c94d0", "size": "2130", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/core/client/config/core.client.routes.js", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "7297" }, { "name": "HTML", "bytes": "118402" }, { "name": "JavaScript", "bytes": "521535" }, { "name": "Shell", "bytes": "685" } ], "symlink_target": "" }
package com.gdm.aws; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.commons.codec.binary.Base64; import org.apache.log4j.Logger; import com.amazonaws.ClientConfiguration; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import com.amazonaws.services.ec2.model.Address; import com.amazonaws.services.ec2.model.AllocateAddressRequest; import com.amazonaws.services.ec2.model.AllocateAddressResult; import com.amazonaws.services.ec2.model.AssociateAddressRequest; import com.amazonaws.services.ec2.model.AssociateAddressResult; import com.amazonaws.services.ec2.model.CreateNetworkInterfaceRequest; import com.amazonaws.services.ec2.model.CreateNetworkInterfaceResult; import com.amazonaws.services.ec2.model.CreateTagsRequest; import com.amazonaws.services.ec2.model.DeleteNetworkInterfaceRequest; import com.amazonaws.services.ec2.model.DescribeAddressesRequest; import com.amazonaws.services.ec2.model.DescribeAddressesResult; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesResult; import com.amazonaws.services.ec2.model.DescribeNetworkInterfaceAttributeRequest; import com.amazonaws.services.ec2.model.Filter; import com.amazonaws.services.ec2.model.Instance; import com.amazonaws.services.ec2.model.InstanceNetworkInterface; import com.amazonaws.services.ec2.model.InstanceNetworkInterfaceSpecification; import com.amazonaws.services.ec2.model.InstanceState; import com.amazonaws.services.ec2.model.InstanceType; import com.amazonaws.services.ec2.model.NetworkInterface; import com.amazonaws.services.ec2.model.Reservation; import com.amazonaws.services.ec2.model.RunInstancesRequest; import com.amazonaws.services.ec2.model.RunInstancesResult; import com.amazonaws.services.ec2.model.Tag; import com.amazonaws.services.ec2.model.TerminateInstancesRequest; import com.amazonaws.services.opsworks.model.DescribeElasticIpsRequest; import com.amazonaws.services.opsworks.model.DescribeElasticIpsResult; import com.gdm.aws.auth.ConnectorCustomAWSCredentialsProvider; public class VNF { AmazonEC2 ec2; Region usWest2; public static Logger logger = Logger.getLogger(VNF.class); public static void main(String[] argas) throws IOException { System.setProperty("http.proxyHost", "10.192.222.14"); System.setProperty("http.proxyPort", "3128"); System.setProperty("https.proxyHost", "10.192.222.14"); System.setProperty("https.proxyPort", "3128"); VNF s = new VNF(); s.demo(); } private void demo() { init(); // createEC2instance(); //String instanceID = createEC2instanceWithMultipleInterfaces(); listFreeElasticIPs(); // getInstanceDetails(); // setName(); // getInstanceDetails(); //deleteInstance(instanceID); // getUserData(instanceID); // List<NetworkInterface> interfaces = null; // getUserData(interfaces); } private void listFreeElasticIPs() { DescribeAddressesRequest request = new DescribeAddressesRequest(); List<String> valuesUUID = new ArrayList<String>(); String emty = "*"; valuesUUID.add(emty); Filter filter = new Filter("association-id", valuesUUID); request.withFilters(filter); //request.putCustomQueryParameter("association-id", null); //request. */ DescribeAddressesResult result = ec2.describeAddresses(request); for ( Address address : result.getAddresses() ) { logger.info("Address : " + address.getPublicIp() ); logger.info("AllocationID : " + address.getAllocationId() ); logger.info("AssocciationID : " + address.getAssociationId() ); logger.info("OwnerID : " + address.getNetworkInterfaceOwnerId() ); logger.info("Domain: " + address.getDomain() ); if ( address.getAssociationId() != null && ! address.getAssociationId().isEmpty() ) { logger.info("Address used"); } else { logger.info("Address NOT used"); } } /** DescribeElasticIpsRequest elRequest = new DescribeElasticIpsRequest(); DescribeElasticIpsResult elRsult = ec2.d */ } private void deleteResourcesByTag(String applianceUUID) { String UUID_TAG_KEY = "VersaApplianceUUID"; DescribeInstancesRequest describeInstRequest = new DescribeInstancesRequest(); List<String> valuesUUID = new ArrayList<String>(); valuesUUID.add(applianceUUID); Filter filter = new Filter("tag:VersaApplianceUUID", valuesUUID); describeInstRequest.withFilters(filter); DescribeInstancesResult describeinstancesresult = ec2 .describeInstances(describeInstRequest); List<Reservation> reservatons = describeinstancesresult .getReservations(); String instanceID = null; List<InstanceNetworkInterface> interfaces = null; for (Reservation reservation : reservatons) { List<Instance> instances = reservation.getInstances(); for (Instance instance : instances) { logger.info("Tags : " + instance.getTags()); // Name will have // instance name logger.info("Instance ID: " + instance.getInstanceId()); instanceID = instance.getInstanceId(); logger.info("Image ID: " + instance.getImageId()); logger.info("Instance State: " + instance.getState().getName()); logger.info("SubnetID: " + instance.getSubnetId()); logger.info("Private Address: " + instance.getPrivateIpAddress()); logger.info("Placement and Availability: " + instance.getPlacement()); interfaces = instance.getNetworkInterfaces(); for (InstanceNetworkInterface instantinf : interfaces) { logger.info(" Interface ID:" + instantinf.getNetworkInterfaceId()); logger.info(" Private Address:" + instantinf.getPrivateIpAddress()); logger.info(" MAC Address:" + instantinf.getMacAddress()); logger.info(" Interface Description:" + instantinf.getDescription()); logger.info(" Association :" + instantinf.getAssociation()); } } } if ( instanceID != null ) { Collection<String> instanceids = new ArrayList<String>(); instanceids.add(instanceID); TerminateInstancesRequest terminateRequest = new TerminateInstancesRequest(); terminateRequest.setInstanceIds(instanceids); ec2.terminateInstances(terminateRequest); waitTillInstanceTerminated(instanceID); } // Delete corresponding interfaces for (InstanceNetworkInterface intf : interfaces) { DeleteNetworkInterfaceRequest deleteRequest = new DeleteNetworkInterfaceRequest(); deleteRequest.setNetworkInterfaceId(intf.getNetworkInterfaceId()); ec2.deleteNetworkInterface(deleteRequest); } } private void deleteInstance(String instanceID) { DescribeInstancesRequest describeInstRequest = new DescribeInstancesRequest(); Collection<String> instanceids = new ArrayList<String>(); instanceids.add(instanceID); // comment this line if you want to get all // instances // You can add filter to above if needed describeInstRequest.withInstanceIds(instanceids); DescribeInstancesResult describeinstancesresult = ec2 .describeInstances(describeInstRequest); List<Reservation> reservatons = describeinstancesresult .getReservations(); List<InstanceNetworkInterface> interfaces = null; for (Reservation reservation : reservatons) { List<Instance> instances = reservation.getInstances(); for (Instance instance : instances) { logger.info("Tags : " + instance.getTags()); // Name will have // instance name logger.info("Instance ID: " + instance.getInstanceId()); logger.info("Image ID: " + instance.getImageId()); logger.info("Instance State: " + instance.getState().getName()); logger.info("SubnetID: " + instance.getSubnetId()); logger.info("Private Address: " + instance.getPrivateIpAddress()); logger.info("Placement and Availability: " + instance.getPlacement()); interfaces = instance.getNetworkInterfaces(); for (InstanceNetworkInterface instantinf : interfaces) { logger.info(" Interface ID:" + instantinf.getNetworkInterfaceId()); logger.info(" Private Address:" + instantinf.getPrivateIpAddress()); logger.info(" MAC Address:" + instantinf.getMacAddress()); logger.info(" Interface Description:" + instantinf.getDescription()); logger.info(" Association :" + instantinf.getAssociation()); } } } // Delete Instance TerminateInstancesRequest terminateRequest = new TerminateInstancesRequest(); terminateRequest.setInstanceIds(instanceids); ec2.terminateInstances(terminateRequest); waitTillInstanceTerminated(instanceID); // Delete corresponding interfaces for (InstanceNetworkInterface intf : interfaces) { DeleteNetworkInterfaceRequest deleteRequest = new DeleteNetworkInterfaceRequest(); deleteRequest.setNetworkInterfaceId(intf.getNetworkInterfaceId()); ec2.deleteNetworkInterface(deleteRequest); } } private void waitTillInstanceTerminated(String instanceID) { final int WAIT_TIME_TO_RECHECK = 5; // seconds. final int MAX_RETRIES = 60; // 300 seconds i,e 5 minutes int i = 0; while (i < MAX_RETRIES) { Instance instance = getInstanceDetails(instanceID); InstanceState state = instance.getState(); if (state.getName().equals("terminated")) { logger.info("Instance is Terminated"); return; } i++; logger.info("Instance is still not Terminated. Current State = " + state.getName()); try { TimeUnit.SECONDS.sleep(WAIT_TIME_TO_RECHECK); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } private String createEC2instanceWithMultipleInterfaces() { List<InstanceNetworkInterfaceSpecification> interfaceIDs = new ArrayList<InstanceNetworkInterfaceSpecification>(); List<NetworkInterface> nwInterfaces = new ArrayList<NetworkInterface>(); String instanceID = null; for (int i = 0; i < 4; i++) { // interface NetworkInterface inf = createNetworkInterface(i); nwInterfaces.add(inf); InstanceNetworkInterfaceSpecification spec = new InstanceNetworkInterfaceSpecification(); spec.withDeviceIndex(i); // spec.withSubnetId("subnet-ee879d8a"); spec.withNetworkInterfaceId(inf.getNetworkInterfaceId()); // spec.withDeleteOnTermination(true); // This flag is nice i,e if // instance is deleted delete port No you cannot specify /** * if ( i == 0 ) { spec.setAssociatePublicIpAddress(true); * //interfaceIDs.add(spec); //break; } else { * spec.setAssociatePublicIpAddress(false); } */ // spec.setGroups(Arrays.asList(new String[]{"sg-b24b07cb"})); interfaceIDs.add(spec); } String lines = "Hey GD I am installed from SDK"; String str = new String(Base64.encodeBase64(lines.getBytes())); // CREATE EC2 INSTANCE RunInstancesRequest runInstancesRequest = new RunInstancesRequest() .withInstanceType("c3.xlarge").withImageId("ami-a9d276c9") .withMinCount(1).withMaxCount(1) // .withSecurityGroupIds("sg-b24b07cb") // .withUserData(str) .withKeyName("gdmurali2010"); runInstancesRequest.setNetworkInterfaces(interfaceIDs); String userData = getUserData(nwInterfaces); String encoded = new String(Base64.encodeBase64(userData.getBytes())); runInstancesRequest.setUserData(encoded); // You cannot use tags while creating sorry java.util.Collection<Tag> tags = new ArrayList<Tag>(); tags.add(new Tag("Name", "GDVM-" + new Random().nextInt(10))); // runInstancesRequest.withUserData(userData); // runInstancesRequest.withUserData(userData) RunInstancesResult runInstances = ec2.runInstances(runInstancesRequest); logger.info("EC2Sample :-> " + runInstances.toString()); List<Instance> instances = runInstances.getReservation().getInstances(); for (Instance instance : instances) { instanceID = instance.getInstanceId(); logger.info("Instance ID: " + instance.getInstanceId()); logger.info("Image ID: " + instance.getImageId()); logger.info("Instance State: " + instance.getState().getName()); logger.info("SubnetID: " + instance.getSubnetId()); logger.info("Private Address: " + instance.getPrivateIpAddress()); } setName("VCSN-" + new Random().nextInt(100), instanceID); return instanceID; // return null; } private NetworkInterface createNetworkInterface(int i) { CreateNetworkInterfaceRequest request = new CreateNetworkInterfaceRequest(); request.setSubnetId("subnet-ee879d8a"); request.setDescription("Interface-" + i); CreateNetworkInterfaceResult result = ec2 .createNetworkInterface(request); NetworkInterface intf = result.getNetworkInterface(); setName("Interface-" + i, intf.getNetworkInterfaceId()); intf.getMacAddress(); if (i == 0) { createElasticticIPAndAssigntoInterface(intf.getNetworkInterfaceId()); } logger.info("Interface = " + intf); return intf; } private void createElasticticIPAndAssigntoInterface( String networkInterfaceId) { AllocateAddressRequest elasticIPAllocateAdressReq = new AllocateAddressRequest(); AllocateAddressResult result = ec2 .allocateAddress(elasticIPAllocateAdressReq); logger.info("Elastic Public IP = " + result.getPublicIp()); String elasticRID = result.getAllocationId(); // setName("VCSNPublicIP", elasticRID); AssociateAddressRequest associateAddressRequest = new AssociateAddressRequest(); associateAddressRequest.setAllocationId(elasticRID); associateAddressRequest.setNetworkInterfaceId(networkInterfaceId); AssociateAddressResult res = ec2 .associateAddress(associateAddressRequest); logger.info("Elastic IP Allocated Res = " + res.getAssociationId()); } // Utility Method private void setName(String value, String resourceid) { // TODO Auto-generated method stub String tagName = "Name"; CreateTagsRequest createTagsRequest = new CreateTagsRequest(); Collection<String> resourceids = new ArrayList<String>(); resourceids.add(resourceid); java.util.Collection<Tag> tags = new ArrayList<Tag>(); tags.add(new Tag(tagName, value)); createTagsRequest.withResources(resourceids).withTags(tags); ec2.createTags(createTagsRequest); } private void setName() { CreateTagsRequest createTagsRequest = new CreateTagsRequest(); Collection<String> instanceids = new ArrayList<String>(); instanceids.add("i-0a2fae9e7f3e6a9ae"); // comment this line if you want // to get all instances java.util.Collection<Tag> tags = new ArrayList<Tag>(); tags.add(new Tag("Name", "ChangedfromSDK")); createTagsRequest.withResources(instanceids).withTags(tags); ec2.createTags(createTagsRequest); } private void createEC2instance() { // TODO Auto-generated method stub String lines = "Hey GD I am installed from SDK"; String str = new String(Base64.encodeBase64(lines.getBytes())); // CREATE EC2 INSTANCE RunInstancesRequest runInstancesRequest = new RunInstancesRequest() .withInstanceType("t2.micro").withImageId("ami-a9d276c9") .withMinCount(1).withMaxCount(1) .withSecurityGroupIds("sg-b24b07cb").withUserData(str) .withKeyName("gdmurali2010"); // You cannot use tags while creating sorry java.util.Collection<Tag> tags = new ArrayList<Tag>(); tags.add(new Tag("Name", "GDVM-" + new Random().nextInt(10))); // runInstancesRequest.withUserData(userData); // runInstancesRequest.withUserData(userData) RunInstancesResult runInstances = ec2.runInstances(runInstancesRequest); logger.info("EC2Sample :-> " + runInstances.toString()); List<Instance> instances = runInstances.getReservation().getInstances(); for (Instance instance : instances) { logger.info("Instance ID: " + instance.getInstanceId()); logger.info("Image ID: " + instance.getImageId()); logger.info("Instance State: " + instance.getState().getName()); logger.info("SubnetID: " + instance.getSubnetId()); logger.info("Private Address: " + instance.getPrivateIpAddress()); } } private AmazonEC2Client getEC2() { ConnectorCustomAWSCredentialsProvider credProvider = new ConnectorCustomAWSCredentialsProvider(); ClientConfiguration client = new ClientConfiguration(); //client.setSecureRandom(secureRandom); //client.setPreemptiveBasicProxyAuth(true); //client.setProxyHost("10.192.222.14"); //client.setProxyPort(3128); AmazonEC2Client ec2 = new AmazonEC2Client(credProvider/*, client*/); Region region = Region.getRegion(credProvider.getRegion()); ec2.setRegion(region); try { ec2.dryRun(new DescribeInstancesRequest()); } catch (Exception e) { logger.error("Error in executing dryrun : ", e); } //logger.info("SuccessFully Authenticated"); return ec2; } private void init() { // TODO Auto-generated method stub ec2 = getEC2(); } private Instance getInstanceDetails(String instanceID) { DescribeInstancesRequest describeInstRequest = new DescribeInstancesRequest(); Collection<String> instanceids = new ArrayList<String>(); instanceids.add(instanceID); // comment this line if you want to get all // instances // You can add filter to above if needed describeInstRequest.withInstanceIds(instanceids); DescribeInstancesResult describeinstancesresult = ec2 .describeInstances(describeInstRequest); List<Reservation> reservatons = describeinstancesresult .getReservations(); Instance inst = null; for (Reservation reservation : reservatons) { List<Instance> instances = reservation.getInstances(); for (Instance instance : instances) { logger.info("Tags : " + instance.getTags()); // Name will have // instance name logger.info("Instance ID: " + instance.getInstanceId()); logger.info("Image ID: " + instance.getImageId()); logger.info("Instance State: " + instance.getState().getName()); logger.info("SubnetID: " + instance.getSubnetId()); logger.info("Private Address: " + instance.getPrivateIpAddress()); logger.info("Placement and Availability: " + instance.getPlacement()); inst = instance; break; } } return inst; } private String getUserData(String instanceID) { Instance instance = getInstanceDetails(instanceID); StringBuilder userData = new StringBuilder("#cloud-config\n\n"); StringBuilder interfaceData = new StringBuilder(); List<InstanceNetworkInterface> interfaces = instance .getNetworkInterfaces(); for (InstanceNetworkInterface inf : interfaces) { interfaceData.append("INTF1:").append(inf.getMacAddress()) .append("\n"); } /** * runcmd: - echo "Hello GD...." - echo "Hello GD..." >> /tmp/gd.txt" */ StringBuilder sshKey = new StringBuilder(); sshKey.append("ssh_authorized_keys:\n - "); sshKey.append("ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC+i8h+EOP0dAlJxuhMFgaJMPaSyJnL4y65FzEwm8YbHkqzXtD+VVaUzu1+T7gb9H8fDKSMX3G7lC53poAPXm/5bdmrwy0QO2TeK09LtYE65iNxKQ2/TYwj8bP52Wzfs0yHqhmJh/cTDwDJnJULuX+ao2VAMvHyCC0VLnyM9JJjscSgSK8HgEica3xJwn+giLKXwVuVMvNsDHxSjNHOWYwvIjVDt6DCbZ/oJ9CCYohU2kT1jqZ0jInjRpiModlG3bMeXPNoAnz2maMxU3lYY/WTk0LNXkq1moMOgxJ/ot/9NdxnpMPT3BXUpssJTERas4fosnmW19CJn18VvxsBN6Lj murali@versa-networks.com"); StringBuffer runCmd = new StringBuffer("runcmd:\n - echo "); runCmd.append(interfaceData.toString().trim()).append( " >> /tmp/interfacex.txt\n"); userData.append(runCmd); userData.append(sshKey); System.out.println(userData.toString()); return userData.toString(); } private String getUserData(List<NetworkInterface> nwInterfaces) { if (nwInterfaces == null) { nwInterfaces = new ArrayList<NetworkInterface>(); for (int i = 0; i <= 5; i++) { NetworkInterface n = new NetworkInterface(); n.setMacAddress("MAC" + new Random().nextInt(1000)); nwInterfaces.add(n); } } StringBuilder userData = new StringBuilder("#cloud-config\n\n"); StringBuilder interfaceData = new StringBuilder(); interfaceData.append("INTERFACES_SPEC=\""); for (NetworkInterface networkInterface : nwInterfaces) { interfaceData.append("INTF1:") .append(networkInterface.getMacAddress()).append("\\n"); } /** * runcmd: - echo "Hello GD...." - echo "Hello GD..." >> /tmp/gd.txt" */ StringBuilder sshKey = new StringBuilder(); sshKey.append("ssh_authorized_keys:\n - "); sshKey.append("ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC+i8h+EOP0dAlJxuhMFgaJMPaSyJnL4y65FzEwm8YbHkqzXtD+VVaUzu1+T7gb9H8fDKSMX3G7lC53poAPXm/5bdmrwy0QO2TeK09LtYE65iNxKQ2/TYwj8bP52Wzfs0yHqhmJh/cTDwDJnJULuX+ao2VAMvHyCC0VLnyM9JJjscSgSK8HgEica3xJwn+giLKXwVuVMvNsDHxSjNHOWYwvIjVDt6DCbZ/oJ9CCYohU2kT1jqZ0jInjRpiModlG3bMeXPNoAnz2maMxU3lYY/WTk0LNXkq1moMOgxJ/ot/9NdxnpMPT3BXUpssJTERas4fosnmW19CJn18VvxsBN6Lj murali@versa-networks.com"); StringBuffer runCmd = new StringBuffer("runcmd:\n - echo "); String trimmed = interfaceData.toString().trim(); runCmd.append(trimmed); // userData.append("\""); runCmd.append("\"").append(" >> /tmp/interfacex.txt\n"); userData.append(runCmd); userData.append(sshKey); System.out.println(userData.toString()); return userData.toString(); } }
{ "content_hash": "86ce931f0fafa7868c5c0046de6971f8", "timestamp": "", "source": "github", "line_count": 621, "max_line_length": 426, "avg_line_length": 34.069243156199676, "alnum_prop": 0.7396606324148036, "repo_name": "gdmurali/cloud-samples", "id": "630b31549bb72c7f93b524f373ec80ef5702d2ef", "size": "21157", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws/src/main/java/com/gdm/aws/VNF.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "322229" } ], "symlink_target": "" }
<?php /** * Message translations. * * This file is automatically generated by 'yiic message' command. * It contains the localizable messages extracted from source code. * You may modify this file by translating the extracted messages. * * Each array element represents the translation (value) of a message (key). * If the value is empty, the message is considered as not translated. * Messages that no longer need translation will have their translations * enclosed between a pair of '@@' marks. * * Message string can be used with plural forms format. Check i18n section * of the guide for details. * * NOTE, this file must be saved in UTF-8 encoding. * * @version $Id: $ */ return array( 'address' => 'Адрес', 'apartment' => 'Объект', 'area' => 'Район', 'city' => 'Город', 'container' => 'Контейнер?', 'description' => 'Описание', 'file' => 'Файл', 'floor' => 'Этаж', 'is_filter' => 'Фильтр', 'is_rent' => 'Аренда', 'is_special' => 'Спец. предложение', 'metro' => 'Метро', 'parent_id' => 'Родительский объект', 'room_number' => 'Кол-во комнат', 'square' => 'Общая площадь', 'square_kitchen' => 'Площадь кухни', 'square_live' => 'Жилая площадь', 'type' => 'Тип недвижимости', 'wc_number' => 'Кол-во санузлов', 'ytvideo_code' => 'Код Youtube', );
{ "content_hash": "7c667cb881d413de3d544738d3fa8a0c", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 76, "avg_line_length": 32.023809523809526, "alnum_prop": 0.6312267657992565, "repo_name": "pasitive/restate", "id": "48363e787da48d40e0ea34be4d50c6948ce4bd3c", "size": "1519", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "protected/messages/ru/apartment.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "5193" }, { "name": "PHP", "bytes": "626206" }, { "name": "Perl", "bytes": "85" }, { "name": "Ruby", "bytes": "2547" }, { "name": "Shell", "bytes": "193" } ], "symlink_target": "" }
package com.runner.sportsmeter; import android.content.Context; import android.support.multidex.MultiDex; import android.support.multidex.MultiDexApplication; import com.google.android.gms.analytics.GoogleAnalytics; import com.google.android.gms.analytics.Tracker; import com.parse.Parse; import com.parse.ParseACL; import com.parse.ParseInstallation; import com.parse.ParseObject; import com.runner.sportsmeter.common.ParseCommon; import com.runner.sportsmeter.models.Account; import com.runner.sportsmeter.models.Coordinates; import com.runner.sportsmeter.models.Segments; import com.runner.sportsmeter.models.Sessions; /** * Created by angelr on 30-Jul-15. */ public class Application extends MultiDexApplication { private Tracker mTracker; /** * Gets the default {@link Tracker} for this {@linkApplication}. * @return tracker */ synchronized public Tracker getDefaultTracker() { if (mTracker == null) { GoogleAnalytics analytics = GoogleAnalytics.getInstance(this); // To enable debug logging use: adb shell setprop log.tag.GAv4 DEBUG mTracker = analytics.newTracker(R.xml.global_tracker); } return mTracker; } @Override public void onCreate() { super.onCreate(); // ParseCrashReporting.enable(this); Parse.enableLocalDatastore(this); ParseObject.registerSubclass(Account.class); ParseObject.registerSubclass(Sessions.class); ParseObject.registerSubclass(Coordinates.class); ParseObject.registerSubclass(Segments.class); ParseCommon.ParseInitialize(this); // ParseUser.enableAutomaticUser(); ParseACL defaultACL = new ParseACL(); defaultACL.setPublicReadAccess(true); defaultACL.setPublicWriteAccess(false); ParseACL.setDefaultACL(defaultACL, true); ParseInstallation.getCurrentInstallation().saveEventually(); } @Override protected void attachBaseContext(Context base) { super.attachBaseContext(base); MultiDex.install(this); } }
{ "content_hash": "675c69071aa97a45a42643bd85c0f3c7", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 80, "avg_line_length": 33.62903225806452, "alnum_prop": 0.7194244604316546, "repo_name": "achoraev/RunnerMeter", "id": "a6e2f453c37210d9a75c16c3b03e6025b1e8c5c7", "size": "2085", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SportsMeter/src/main/java/com/runner/sportsmeter/Application.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "322478" } ], "symlink_target": "" }
using System.Collections.Generic; using System.Drawing; using BizHawk.Client.Common; using BizHawk.Emulation.Cores.Atari.Atari7800; namespace BizHawk.Client.EmuHawk { [SchemaAttributes("A78")] public class A78Schema : IVirtualPadSchema { public IEnumerable<PadSchema> GetPadSchemas() { switch ((Global.Emulator as Atari7800).ControlAdapter.ControlType.Name) { case "Atari 7800 Joystick Controller": yield return JoystickController(1); yield return JoystickController(2); break; case "Atari 7800 Paddle Controller": yield return PaddleController(1); yield return PaddleController(2); break; case "Atari 7800 Keypad Controller": break; case "Atari 7800 Driving Controller": break; case "Atari 7800 Booster Grip Controller": break; case "Atari 7800 ProLine Joystick Controller": yield return ProLineController(1); yield return ProLineController(2); break; case "Atari 7800 Light Gun Controller": yield return LightGunController(1); yield return LightGunController(2); break; } yield return ConsoleButtons(); } private static PadSchema ProLineController(int controller) { return new PadSchema { DisplayName = "Player " + controller, IsConsole = false, DefaultSize = new Size(174, 74), MaxSize = new Size(174, 74), Buttons = new[] { new PadSchema.ButtonScema { Name = "P" + controller + " Up", DisplayName = "", Icon = Properties.Resources.BlueUp, Location = new Point(23, 15), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Down", DisplayName = "", Icon = Properties.Resources.BlueDown, Location = new Point(23, 36), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Left", DisplayName = "", Icon = Properties.Resources.Back, Location = new Point(2, 24), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Right", DisplayName = "", Icon = Properties.Resources.Forward, Location = new Point(44, 24), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Trigger", DisplayName = "1", Location = new Point(120, 24), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Trigger 2", DisplayName = "2", Location = new Point(145, 24), Type = PadSchema.PadInputType.Boolean } } }; } private static PadSchema JoystickController(int controller) { return new PadSchema { DisplayName = "Player " + controller, IsConsole = false, DefaultSize = new Size(174, 74), MaxSize = new Size(174, 74), Buttons = new[] { new PadSchema.ButtonScema { Name = "P" + controller + " Up", DisplayName = "", Icon = Properties.Resources.BlueUp, Location = new Point(23, 15), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Down", DisplayName = "", Icon = Properties.Resources.BlueDown, Location = new Point(23, 36), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Left", DisplayName = "", Icon = Properties.Resources.Back, Location = new Point(2, 24), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Right", DisplayName = "", Icon = Properties.Resources.Forward, Location = new Point(44, 24), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "P" + controller + " Trigger", DisplayName = "1", Location = new Point(120, 24), Type = PadSchema.PadInputType.Boolean } } }; } private static PadSchema PaddleController(int controller) { return new PadSchema { DisplayName = "Player " + controller, IsConsole = false, DefaultSize = new Size(250, 74), Buttons = new[] { new PadSchema.ButtonScema { Name = "P" + controller + " Paddle", DisplayName = "Paddle", Location = new Point(23, 15), Type = PadSchema.PadInputType.FloatSingle }, new PadSchema.ButtonScema { Name = "P" + controller + " Trigger", DisplayName = "1", Location = new Point(12, 90), Type = PadSchema.PadInputType.Boolean } } }; } private static PadSchema LightGunController(int controller) { return new PadSchema { DisplayName = "Light Gun", IsConsole = false, DefaultSize = new Size(356, 290), MaxSize = new Size(356, 290), Buttons = new[] { new PadSchema.ButtonScema { Name = "P" + controller + " VPos", Location = new Point(14, 17), Type = PadSchema.PadInputType.TargetedPair, TargetSize = new Size(256, 240), SecondaryNames = new [] { "P" + controller + " HPos", } }, new PadSchema.ButtonScema { Name = "P" + controller + " Trigger", DisplayName = "Trigger", Location = new Point(284, 17), Type = PadSchema.PadInputType.Boolean } } }; } private static PadSchema ConsoleButtons() { return new PadSchema { DisplayName = "Console", IsConsole = true, DefaultSize = new Size(215, 50), Buttons = new[] { new PadSchema.ButtonScema { Name = "Select", DisplayName = "Select", Location = new Point(10, 15), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "Reset", DisplayName = "Reset", Location = new Point(60, 15), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "Power", DisplayName = "Power", Location = new Point(108, 15), Type = PadSchema.PadInputType.Boolean }, new PadSchema.ButtonScema { Name = "Pause", DisplayName = "Pause", Location = new Point(158, 15), Type = PadSchema.PadInputType.Boolean } } }; } } }
{ "content_hash": "057e6ebabac626bdd7bab06d58296854", "timestamp": "", "source": "github", "line_count": 257, "max_line_length": 74, "avg_line_length": 25.101167315175097, "alnum_prop": 0.5980468144473725, "repo_name": "superusercode/RTC3", "id": "4cf53bbdedb0d3c7f192f32cdbcf480f3c53a1e0", "size": "6453", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Real-Time Corruptor/BizHawk_RTC/BizHawk.Client.EmuHawk/tools/VirtualPads/schema/A78Schema.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "273687" }, { "name": "Batchfile", "bytes": "18810" }, { "name": "C", "bytes": "26790735" }, { "name": "C#", "bytes": "13499009" }, { "name": "C++", "bytes": "14428468" }, { "name": "CMake", "bytes": "39873" }, { "name": "GLSL", "bytes": "6610" }, { "name": "HTML", "bytes": "420498" }, { "name": "Inno Setup", "bytes": "3199" }, { "name": "Java", "bytes": "13302" }, { "name": "Limbo", "bytes": "15313" }, { "name": "Lua", "bytes": "303246" }, { "name": "M4", "bytes": "836" }, { "name": "Makefile", "bytes": "147790" }, { "name": "NSIS", "bytes": "3447" }, { "name": "Objective-C", "bytes": "207179" }, { "name": "Perl", "bytes": "78" }, { "name": "Python", "bytes": "34858" }, { "name": "Roff", "bytes": "5448" }, { "name": "Shell", "bytes": "26787" }, { "name": "SourcePawn", "bytes": "7395" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Header Files</title> <link rel="stylesheet" href="../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../circular_buffer.html" title="Chapter&#160;7.&#160;Boost.Circular Buffer"> <link rel="prev" href="examples.html" title="More Examples"> <link rel="next" href="concepts.html" title="Modelled Concepts"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../boost.png"></td> <td align="center"><a href="../../../index.html">Home</a></td> <td align="center"><a href="../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="examples.html"><img src="../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../circular_buffer.html"><img src="../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../index.html"><img src="../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="concepts.html"><img src="../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h2 class="title" style="clear: both"> <a name="circular_buffer.headers"></a><a class="link" href="headers.html" title="Header Files">Header Files</a> </h2></div></div></div> <p> The circular buffer library is defined in the file <a href="../../../boost/circular_buffer.hpp" target="_top">circular_buffer.hpp</a>. </p> <pre class="programlisting"><span class="preprocessor">#include</span> <span class="special">&lt;</span><span class="identifier">boost</span><span class="special">/</span><span class="identifier">circular_buffer</span><span class="special">.</span><span class="identifier">hpp</span><span class="special">&gt;</span> </pre> <p> (There is also a forward declaration for the <code class="computeroutput"><a class="link" href="../boost/circular_buffer.html" title="Class template circular_buffer">circular_buffer</a></code> in the header file <a href="../../../boost/circular_buffer_fwd.hpp" target="_top">circular_buffer_fwd.hpp</a>). </p> <p> The <code class="computeroutput"><a class="link" href="../boost/circular_buffer.html" title="Class template circular_buffer">circular_buffer</a></code> is defined in the file <a href="../../../boost/circular_buffer/base.hpp" target="_top">base.hpp</a>. </p> <p> The <code class="computeroutput"><a class="link" href="../boost/circular_buffe_idp46178672.html" title="Class template circular_buffer_space_optimized">circular_buffer_space_optimized</a></code> is defined in the file <a href="../../../boost/circular_buffer/space_optimized.hpp" target="_top">space_optimized.hpp</a>. </p> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2013 Jan Gaspar<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="examples.html"><img src="../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../circular_buffer.html"><img src="../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../index.html"><img src="../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="concepts.html"><img src="../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "fc3d283022e6796621e1b0202ff90fdf", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 394, "avg_line_length": 70.25, "alnum_prop": 0.6583629893238434, "repo_name": "PatidarWeb/poedit", "id": "d28ed2334657ad6d5d6a0d4b2f84497cfbcc7db3", "size": "4215", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "deps/boost/doc/html/circular_buffer/headers.html", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "13167" }, { "name": "C++", "bytes": "814442" }, { "name": "Inno Setup", "bytes": "10148" }, { "name": "Objective-C", "bytes": "4458" }, { "name": "Objective-C++", "bytes": "12149" }, { "name": "Python", "bytes": "3040" }, { "name": "Ruby", "bytes": "220" }, { "name": "Shell", "bytes": "10288" } ], "symlink_target": "" }
<?php if (class_exists('PHP_CodeSniffer_Standards_IncorrectPatternException', true) === false) { $error = 'Class PHP_CodeSniffer_Standards_IncorrectPatternException not found'; throw new PHP_CodeSniffer_Exception($error); } /** * Processes pattern strings and checks that the code conforms to the pattern. * * This test essentially checks that code is correctly formatted with whitespace. * * @category PHP * @package PHP_CodeSniffer * @author Greg Sherwood <gsherwood@squiz.net> * @author Marc McIntyre <mmcintyre@squiz.net> * @copyright 2006-2012 Squiz Pty Ltd (ABN 77 084 670 600) * @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence * @version Release: @package_version@ * @link http://pear.php.net/package/PHP_CodeSniffer */ abstract class PHP_CodeSniffer_Standards_AbstractPatternSniff implements PHP_CodeSniffer_Sniff { /** * If true, comments will be ignored if they are found in the code. * * @var boolean */ public $ignoreComments = false; /** * The current file being checked. * * @var string */ protected $currFile = ''; /** * The parsed patterns array. * * @var array */ private $_parsedPatterns = array(); /** * Tokens that this sniff wishes to process outside of the patterns. * * @var array(int) * @see registerSupplementary() * @see processSupplementary() */ private $_supplementaryTokens = array(); /** * Positions in the stack where errors have occurred. * * @var array() */ private $_errorPos = array(); /** * Constructs a PHP_CodeSniffer_Standards_AbstractPatternSniff. * * @param boolean $ignoreComments If true, comments will be ignored. */ public function __construct($ignoreComments=null) { // This is here for backwards compatibility. if ($ignoreComments !== null) { $this->ignoreComments = $ignoreComments; } $this->_supplementaryTokens = $this->registerSupplementary(); }//end __construct() /** * Registers the tokens to listen to. * * Classes extending <i>AbstractPatternTest</i> should implement the * <i>getPatterns()</i> method to register the patterns they wish to test. * * @return array(int) * @see process() */ public final function register() { $listenTypes = array(); $patterns = $this->getPatterns(); foreach ($patterns as $pattern) { $parsedPattern = $this->_parse($pattern); // Find a token position in the pattern that we can use // for a listener token. $pos = $this->_getListenerTokenPos($parsedPattern); $tokenType = $parsedPattern[$pos]['token']; $listenTypes[] = $tokenType; $patternArray = array( 'listen_pos' => $pos, 'pattern' => $parsedPattern, 'pattern_code' => $pattern, ); if (isset($this->_parsedPatterns[$tokenType]) === false) { $this->_parsedPatterns[$tokenType] = array(); } $this->_parsedPatterns[$tokenType][] = $patternArray; }//end foreach return array_unique(array_merge($listenTypes, $this->_supplementaryTokens)); }//end register() /** * Returns the token types that the specified pattern is checking for. * * Returned array is in the format: * <code> * array( * T_WHITESPACE => 0, // 0 is the position where the T_WHITESPACE token * // should occur in the pattern. * ); * </code> * * @param array $pattern The parsed pattern to find the acquire the token * types from. * * @return array(int => int) */ private function _getPatternTokenTypes($pattern) { $tokenTypes = array(); foreach ($pattern as $pos => $patternInfo) { if ($patternInfo['type'] === 'token') { if (isset($tokenTypes[$patternInfo['token']]) === false) { $tokenTypes[$patternInfo['token']] = $pos; } } } return $tokenTypes; }//end _getPatternTokenTypes() /** * Returns the position in the pattern that this test should register as * a listener for the pattern. * * @param array $pattern The pattern to acquire the listener for. * * @return int The postition in the pattern that this test should register * as the listener. * @throws PHP_CodeSniffer_Exception If we could not determine a token * to listen for. */ private function _getListenerTokenPos($pattern) { $tokenTypes = $this->_getPatternTokenTypes($pattern); $tokenCodes = array_keys($tokenTypes); $token = PHP_CodeSniffer_Tokens::getHighestWeightedToken($tokenCodes); // If we could not get a token. if ($token === false) { $error = 'Could not determine a token to listen for'; throw new PHP_CodeSniffer_Exception($error); } return $tokenTypes[$token]; }//end _getListenerTokenPos() /** * Processes the test. * * @param PHP_CodeSniffer_File $phpcsFile The PHP_CodeSniffer file where the * token occured. * @param int $stackPtr The postion in the tokens stack * where the listening token type was * found. * * @return void * @see register() */ public final function process(PHP_CodeSniffer_File $phpcsFile, $stackPtr) { $file = $phpcsFile->getFilename(); if ($this->currFile !== $file) { // We have changed files, so clean up. $this->_errorPos = array(); $this->currFile = $file; } $tokens = $phpcsFile->getTokens(); if (in_array($tokens[$stackPtr]['code'], $this->_supplementaryTokens) === true) { $this->processSupplementary($phpcsFile, $stackPtr); } $type = $tokens[$stackPtr]['code']; // If the type is not set, then it must have been a token registered // with registerSupplementary(). if (isset($this->_parsedPatterns[$type]) === false) { return; } $allErrors = array(); // Loop over each pattern that is listening to the current token type // that we are processing. foreach ($this->_parsedPatterns[$type] as $patternInfo) { // If processPattern returns false, then the pattern that we are // checking the code with must not be designed to check that code. $errors = $this->processPattern($patternInfo, $phpcsFile, $stackPtr); if ($errors === false) { // The pattern didn't match. continue; } else if (empty($errors) === true) { // The pattern matched, but there were no errors. break; } foreach ($errors as $stackPtr => $error) { if (isset($this->_errorPos[$stackPtr]) === false) { $this->_errorPos[$stackPtr] = true; $allErrors[$stackPtr] = $error; } } } foreach ($allErrors as $stackPtr => $error) { $phpcsFile->addError($error, $stackPtr); } }//end process() /** * Processes the pattern and verifies the code at $stackPtr. * * @param array $patternInfo Information about the pattern used * for checking, which includes are * parsed token representation of the * pattern. * @param PHP_CodeSniffer_File $phpcsFile The PHP_CodeSniffer file where the * token occured. * @param int $stackPtr The postion in the tokens stack where * the listening token type was found. * * @return array(errors) */ protected function processPattern( $patternInfo, PHP_CodeSniffer_File $phpcsFile, $stackPtr ) { $tokens = $phpcsFile->getTokens(); $pattern = $patternInfo['pattern']; $patternCode = $patternInfo['pattern_code']; $errors = array(); $found = ''; $ignoreTokens = array(T_WHITESPACE); if ($this->ignoreComments === true) { $ignoreTokens = array_merge($ignoreTokens, PHP_CodeSniffer_Tokens::$commentTokens); } $origStackPtr = $stackPtr; $hasError = false; if ($patternInfo['listen_pos'] > 0) { $stackPtr--; for ($i = ($patternInfo['listen_pos'] - 1); $i >= 0; $i--) { if ($pattern[$i]['type'] === 'token') { if ($pattern[$i]['token'] === T_WHITESPACE) { if ($tokens[$stackPtr]['code'] === T_WHITESPACE) { $found = $tokens[$stackPtr]['content'].$found; } // Only check the size of the whitespace if this is not // the first token. We don't care about the size of // leading whitespace, just that there is some. if ($i !== 0) { if ($tokens[$stackPtr]['content'] !== $pattern[$i]['value']) { $hasError = true; } } } else { // Check to see if this important token is the same as the // previous important token in the pattern. If it is not, // then the pattern cannot be for this piece of code. $prev = $phpcsFile->findPrevious( $ignoreTokens, $stackPtr, null, true ); if ($prev === false || $tokens[$prev]['code'] !== $pattern[$i]['token'] ) { return false; } // If we skipped past some whitespace tokens, then add them // to the found string. $tokenContent = $phpcsFile->getTokensAsString( ($prev + 1), ($stackPtr - $prev - 1) ); $found = $tokens[$prev]['content'].$tokenContent.$found; if (isset($pattern[($i - 1)]) === true && $pattern[($i - 1)]['type'] === 'skip' ) { $stackPtr = $prev; } else { $stackPtr = ($prev - 1); } }//end if } else if ($pattern[$i]['type'] === 'skip') { // Skip to next piece of relevant code. if ($pattern[$i]['to'] === 'parenthesis_closer') { $to = 'parenthesis_opener'; } else { $to = 'scope_opener'; } // Find the previous opener. $next = $phpcsFile->findPrevious( $ignoreTokens, $stackPtr, null, true ); if ($next === false || isset($tokens[$next][$to]) === false) { // If there was not opener, then we must be // using the wrong pattern. return false; } if ($to === 'parenthesis_opener') { $found = '{'.$found; } else { $found = '('.$found; } $found = '...'.$found; // Skip to the opening token. $stackPtr = ($tokens[$next][$to] - 1); } else if ($pattern[$i]['type'] === 'string') { $found = 'abc'; } else if ($pattern[$i]['type'] === 'newline') { if ($this->ignoreComments === true && in_array($tokens[$stackPtr]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === true ) { $startComment = $phpcsFile->findPrevious( PHP_CodeSniffer_Tokens::$commentTokens, ($stackPtr - 1), null, true ); if ($tokens[$startComment]['line'] !== $tokens[($startComment + 1)]['line']) { $startComment++; } $tokenContent = $phpcsFile->getTokensAsString( $startComment, ($stackPtr - $startComment + 1) ); $found = $tokenContent.$found; $stackPtr = ($startComment - 1); } if ($tokens[$stackPtr]['code'] === T_WHITESPACE) { if ($tokens[$stackPtr]['content'] !== $phpcsFile->eolChar) { $found = $tokens[$stackPtr]['content'].$found; // This may just be an indent that comes after a newline // so check the token before to make sure. If it is a newline, we // can ignore the error here. if (($tokens[($stackPtr - 1)]['content'] !== $phpcsFile->eolChar) && ($this->ignoreComments === true && in_array($tokens[($stackPtr - 1)]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === false) ) { $hasError = true; } else { $stackPtr--; } } else { $found = 'EOL'.$found; } } else { $found = $tokens[$stackPtr]['content'].$found; $hasError = true; } if ($hasError === false && $pattern[($i - 1)]['type'] !== 'newline') { // Make sure they only have 1 newline. $prev = $phpcsFile->findPrevious($ignoreTokens, ($stackPtr - 1), null, true); if ($prev !== false && $tokens[$prev]['line'] !== $tokens[$stackPtr]['line']) { $hasError = true; } } }//end if }//end for }//end if $stackPtr = $origStackPtr; $lastAddedStackPtr = null; $patternLen = count($pattern); for ($i = $patternInfo['listen_pos']; $i < $patternLen; $i++) { if ($pattern[$i]['type'] === 'token') { if ($pattern[$i]['token'] === T_WHITESPACE) { if ($this->ignoreComments === true) { // If we are ignoring comments, check to see if this current // token is a comment. If so skip it. if (in_array($tokens[$stackPtr]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === true) { continue; } // If the next token is a comment, the we need to skip the // current token as we should allow a space before a // comment for readability. if (in_array($tokens[($stackPtr + 1)]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === true) { continue; } } $tokenContent = ''; if ($tokens[$stackPtr]['code'] === T_WHITESPACE) { if (isset($pattern[($i + 1)]) === false) { // This is the last token in the pattern, so just compare // the next token of content. $tokenContent = $tokens[$stackPtr]['content']; } else { // Get all the whitespace to the next token. $next = $phpcsFile->findNext( PHP_CodeSniffer_Tokens::$emptyTokens, $stackPtr, null, true ); $tokenContent = $phpcsFile->getTokensAsString( $stackPtr, ($next - $stackPtr) ); $lastAddedStackPtr = $stackPtr; $stackPtr = $next; } if ($stackPtr !== $lastAddedStackPtr) { $found .= $tokenContent; } } else { if ($stackPtr !== $lastAddedStackPtr) { $found .= $tokens[$stackPtr]['content']; $lastAddedStackPtr = $stackPtr; } }//end if if (isset($pattern[($i + 1)]) === true && $pattern[($i + 1)]['type'] === 'skip' ) { // The next token is a skip token, so we just need to make // sure the whitespace we found has *at least* the // whitespace required. if (strpos($tokenContent, $pattern[$i]['value']) !== 0) { $hasError = true; } } else { if ($tokenContent !== $pattern[$i]['value']) { $hasError = true; } } } else { // Check to see if this important token is the same as the // next important token in the pattern. If it is not, then // the pattern cannot be for this piece of code. $next = $phpcsFile->findNext( $ignoreTokens, $stackPtr, null, true ); if ($next === false || $tokens[$next]['code'] !== $pattern[$i]['token'] ) { // The next important token did not match the pattern. return false; } if ($lastAddedStackPtr !== null) { if (($tokens[$next]['code'] === T_OPEN_CURLY_BRACKET || $tokens[$next]['code'] === T_CLOSE_CURLY_BRACKET) && isset($tokens[$next]['scope_condition']) === true && $tokens[$next]['scope_condition'] > $lastAddedStackPtr ) { // This is a brace, but the owner of it is after the current // token, which means it does not belong to any token in // our pattern. This means the pattern is not for us. return false; } if (($tokens[$next]['code'] === T_OPEN_PARENTHESIS || $tokens[$next]['code'] === T_CLOSE_PARENTHESIS) && isset($tokens[$next]['parenthesis_owner']) === true && $tokens[$next]['parenthesis_owner'] > $lastAddedStackPtr ) { // This is a bracket, but the owner of it is after the current // token, which means it does not belong to any token in // our pattern. This means the pattern is not for us. return false; } }//end if // If we skipped past some whitespace tokens, then add them // to the found string. if (($next - $stackPtr) > 0) { $hasComment = false; for ($j = $stackPtr; $j < $next; $j++) { $found .= $tokens[$j]['content']; if (in_array($tokens[$j]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === true) { $hasComment = true; } } // If we are not ignoring comments, this additional // whitespace or comment is not allowed. If we are // ignoring comments, there needs to be at least one // comment for this to be allowed. if ($this->ignoreComments === false || ($this->ignoreComments === true && $hasComment === false) ) { $hasError = true; } // Even when ignoring comments, we are not allowed to include // newlines without the pattern specifying them, so // everything should be on the same line. if ($tokens[$next]['line'] !== $tokens[$stackPtr]['line']) { $hasError = true; } }//end if if ($next !== $lastAddedStackPtr) { $found .= $tokens[$next]['content']; $lastAddedStackPtr = $next; } if (isset($pattern[($i + 1)]) === true && $pattern[($i + 1)]['type'] === 'skip' ) { $stackPtr = $next; } else { $stackPtr = ($next + 1); } }//end if } else if ($pattern[$i]['type'] === 'skip') { if ($pattern[$i]['to'] === 'unknown') { $next = $phpcsFile->findNext( $pattern[($i + 1)]['token'], $stackPtr ); if ($next === false) { // Couldn't find the next token, sowe we must // be using the wrong pattern. return false; } $found .= '...'; $stackPtr = $next; } else { // Find the previous opener. $next = $phpcsFile->findPrevious( PHP_CodeSniffer_Tokens::$blockOpeners, $stackPtr ); if ($next === false || isset($tokens[$next][$pattern[$i]['to']]) === false ) { // If there was not opener, then we must // be using the wrong pattern. return false; } $found .= '...'; if ($pattern[$i]['to'] === 'parenthesis_closer') { $found .= ')'; } else { $found .= '}'; } // Skip to the closing token. $stackPtr = ($tokens[$next][$pattern[$i]['to']] + 1); }//end if } else if ($pattern[$i]['type'] === 'string') { if ($tokens[$stackPtr]['code'] !== T_STRING) { $hasError = true; } if ($stackPtr !== $lastAddedStackPtr) { $found .= 'abc'; $lastAddedStackPtr = $stackPtr; } $stackPtr++; } else if ($pattern[$i]['type'] === 'newline') { // Find the next token that contains a newline character. $newline = 0; for ($j = $stackPtr; $j < $phpcsFile->numTokens; $j++) { if (strpos($tokens[$j]['content'], $phpcsFile->eolChar) !== false) { $newline = $j; break; } } if ($newline === 0) { // We didn't find a newline character in the rest of the file. $next = ($phpcsFile->numTokens - 1); $hasError = true; } else { if ($this->ignoreComments === false) { // The newline character cannot be part of a comment. if (in_array($tokens[$newline]['code'], PHP_CodeSniffer_Tokens::$commentTokens) === true) { $hasError = true; } } if ($newline === $stackPtr) { $next = ($stackPtr + 1); } else { // Check that there were no significant tokens that we // skipped over to find our newline character. $next = $phpcsFile->findNext( $ignoreTokens, $stackPtr, null, true ); if ($next < $newline) { // We skipped a non-ignored token. $hasError = true; } else { $next = ($newline + 1); } } }//end if if ($stackPtr !== $lastAddedStackPtr) { $found .= $phpcsFile->getTokensAsString( $stackPtr, ($next - $stackPtr) ); $diff = ($next - $stackPtr); $lastAddedStackPtr = ($next - 1); } $stackPtr = $next; }//end if }//end for if ($hasError === true) { $error = $this->prepareError($found, $patternCode); $errors[$origStackPtr] = $error; } return $errors; }//end processPattern() /** * Prepares an error for the specified patternCode. * * @param string $found The actual found string in the code. * @param string $patternCode The expected pattern code. * * @return string The error message. */ protected function prepareError($found, $patternCode) { $found = str_replace("\r\n", '\n', $found); $found = str_replace("\n", '\n', $found); $found = str_replace("\r", '\n', $found); $found = str_replace('EOL', '\n', $found); $expected = str_replace('EOL', '\n', $patternCode); $error = "Expected \"$expected\"; found \"$found\""; return $error; }//end prepareError() /** * Returns the patterns that should be checked. * * @return array(string) */ protected abstract function getPatterns(); /** * Registers any supplementary tokens that this test might wish to process. * * A sniff may wish to register supplementary tests when it wishes to group * an arbitary validation that cannot be performed using a pattern, with * other pattern tests. * * @return array(int) * @see processSupplementary() */ protected function registerSupplementary() { return array(); }//end registerSupplementary() /** * Processes any tokens registered with registerSupplementary(). * * @param PHP_CodeSniffer_File $phpcsFile The PHP_CodeSniffer file where to * process the skip. * @param int $stackPtr The position in the tokens stack to * process. * * @return void * @see registerSupplementary() */ protected function processSupplementary( PHP_CodeSniffer_File $phpcsFile, $stackPtr ) { }//end processSupplementary() /** * Parses a pattern string into an array of pattern steps. * * @param string $pattern The pattern to parse. * * @return array The parsed pattern array. * @see _createSkipPattern() * @see _createTokenPattern() */ private function _parse($pattern) { $patterns = array(); $length = strlen($pattern); $lastToken = 0; $firstToken = 0; for ($i = 0; $i < $length; $i++) { $specialPattern = false; $isLastChar = ($i === ($length - 1)); $oldFirstToken = $firstToken; if (substr($pattern, $i, 3) === '...') { // It's a skip pattern. The skip pattern requires the // content of the token in the "from" position and the token // to skip to. $specialPattern = $this->_createSkipPattern($pattern, ($i - 1)); $lastToken = ($i - $firstToken); $firstToken = ($i + 3); $i = ($i + 2); if ($specialPattern['to'] !== 'unknown') { $firstToken++; } } else if (substr($pattern, $i, 3) === 'abc') { $specialPattern = array('type' => 'string'); $lastToken = ($i - $firstToken); $firstToken = ($i + 3); $i = ($i + 2); } else if (substr($pattern, $i, 3) === 'EOL') { $specialPattern = array('type' => 'newline'); $lastToken = ($i - $firstToken); $firstToken = ($i + 3); $i = ($i + 2); } if ($specialPattern !== false || $isLastChar === true) { // If we are at the end of the string, don't worry about a limit. if ($isLastChar === true) { // Get the string from the end of the last skip pattern, if any, // to the end of the pattern string. $str = substr($pattern, $oldFirstToken); } else { // Get the string from the end of the last special pattern, // if any, to the start of this special pattern. if ($lastToken === 0) { // Note that if the last special token was zero characters ago, // there will be nothing to process so we can skip this bit. // This happens if you have something like: EOL... in your pattern. $str = ''; } else { $str = substr($pattern, $oldFirstToken, $lastToken); } } if ($str !== '') { $tokenPatterns = $this->_createTokenPattern($str); foreach ($tokenPatterns as $tokenPattern) { $patterns[] = $tokenPattern; } } // Make sure we don't skip the last token. if ($isLastChar === false && $i === ($length - 1)) { $i--; } }//end if // Add the skip pattern *after* we have processed // all the tokens from the end of the last skip pattern // to the start of this skip pattern. if ($specialPattern !== false) { $patterns[] = $specialPattern; } }//end for return $patterns; }//end _parse() /** * Creates a skip pattern. * * @param string $pattern The pattern being parsed. * @param string $from The token content that the skip pattern starts from. * * @return array The pattern step. * @see _createTokenPattern() * @see _parse() */ private function _createSkipPattern($pattern, $from) { $skip = array('type' => 'skip'); $nestedParenthesis = 0; $nestedBraces = 0; for ($start = $from; $start >= 0; $start--) { switch ($pattern[$start]) { case '(': if ($nestedParenthesis === 0) { $skip['to'] = 'parenthesis_closer'; } $nestedParenthesis--; break; case '{': if ($nestedBraces === 0) { $skip['to'] = 'scope_closer'; } $nestedBraces--; break; case '}': $nestedBraces++; break; case ')': $nestedParenthesis++; break; } if (isset($skip['to']) === true) { break; } } if (isset($skip['to']) === false) { $skip['to'] = 'unknown'; } return $skip; }//end _createSkipPattern() /** * Creates a token pattern. * * @param string $str The tokens string that the pattern should match. * * @return array The pattern step. * @see _createSkipPattern() * @see _parse() */ private function _createTokenPattern($str) { // Don't add a space after the closing php tag as it will add a new // whitespace token. $tokens = token_get_all('<?php '.$str.'?>'); // Remove the <?php tag from the front and the end php tag from the back. $tokens = array_slice($tokens, 1, (count($tokens) - 2)); foreach ($tokens as &$token) { $token = PHP_CodeSniffer::standardiseToken($token); } $patterns = array(); foreach ($tokens as $patternInfo) { $patterns[] = array( 'type' => 'token', 'token' => $patternInfo['code'], 'value' => $patternInfo['content'], ); } return $patterns; }//end _createTokenPattern() }//end class ?>
{ "content_hash": "8d3b4f8966b9182019ce4b20847ff170", "timestamp": "", "source": "github", "line_count": 949, "max_line_length": 163, "avg_line_length": 37.59957850368809, "alnum_prop": 0.423238607701362, "repo_name": "drBenway/siteResearch", "id": "ef7ded90ef58f66a9ba8d593ee1cb5677cf4d470", "size": "36151", "binary": false, "copies": "6", "ref": "refs/heads/gh-pages", "path": "vendor/squizlabs/php_codesniffer/CodeSniffer/Standards/AbstractPatternSniff.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "17025" }, { "name": "JavaScript", "bytes": "103190" }, { "name": "PHP", "bytes": "149397" }, { "name": "Shell", "bytes": "184" } ], "symlink_target": "" }
Title: The XSF Infrastructure Team Date: 2010-07-15 11:52:16 Author: admin Slug: the-xsf-infrastructure-team Category: page Tags: Summary: description: Link: http://xmpp.org/participate/become-a-member/the-xsf-infrastructure-team/ post_id: 794 The XSF's Infrastructure Team is responsible for maintaining and improving the machines, software, and other tools used by the XSF to deliver its websites, discussion lists, chatrooms, and other services. Its particular areas of responsibility include: * Several physical server machines hosted at USSHC * Operating system maintenance (Debian GNU/Linux) * Web server and associated software (lighttpd, MySQL, WordPress, MediaWiki) * Tools for XEP publication (Python scripts, shell scripts, XSLT) * Email server and list software (Postfix and Mailman) * Source control (Subversion and various products donated by Atlassian) * XMPP server, chatrooms, and bots (Prosody and homegrown software) * DNS (Bind, with mirrors) * Digital certificates * Data backups (provided by ASET at Penn State University) The team is limited to elected members of the XSF (and invited others at the discretion of the team). Participants are recruited from among the XSF membership by the team lead and approved by a simple majority of the existing team members. The team has a private email list and chatroom, and uses a [wiki page](http://wiki.xmpp.org/web/XSF_Infrastructure) for coordination.
{ "content_hash": "f41fcc2f2737682f2f7f9489c2440f5e", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 373, "avg_line_length": 59.291666666666664, "alnum_prop": 0.7947997189037245, "repo_name": "xsf/site-archived", "id": "158d27f78c841d8cae24c1af3e38d819dbf3f293", "size": "1426", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "content/pages/the-xsf-infrastructure-team.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "156653" }, { "name": "JavaScript", "bytes": "61508" }, { "name": "Python", "bytes": "1306" } ], "symlink_target": "" }
package org.apache.spark.sql.execution.joins import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, BuildSide} import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.physical._ import org.apache.spark.sql.execution.{ExplainUtils, SparkPlan} import org.apache.spark.sql.execution.metric.SQLMetrics import org.apache.spark.util.collection.{BitSet, CompactBuffer} case class BroadcastNestedLoopJoinExec( left: SparkPlan, right: SparkPlan, buildSide: BuildSide, joinType: JoinType, condition: Option[Expression]) extends BaseJoinExec { override def leftKeys: Seq[Expression] = Nil override def rightKeys: Seq[Expression] = Nil override lazy val metrics = Map( "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows")) /** BuildRight means the right relation <=> the broadcast relation. */ private val (streamed, broadcast) = buildSide match { case BuildRight => (left, right) case BuildLeft => (right, left) } override def simpleStringWithNodeId(): String = { val opId = ExplainUtils.getOpId(this) s"$nodeName $joinType ${buildSide} ($opId)".trim } override def requiredChildDistribution: Seq[Distribution] = buildSide match { case BuildLeft => BroadcastDistribution(IdentityBroadcastMode) :: UnspecifiedDistribution :: Nil case BuildRight => UnspecifiedDistribution :: BroadcastDistribution(IdentityBroadcastMode) :: Nil } private[this] def genResultProjection: UnsafeProjection = joinType match { case LeftExistence(j) => UnsafeProjection.create(output, output) case other => // Always put the stream side on left to simplify implementation // both of left and right side could be null UnsafeProjection.create( output, (streamed.output ++ broadcast.output).map(_.withNullability(true))) } override def output: Seq[Attribute] = { joinType match { case _: InnerLike => left.output ++ right.output case LeftOuter => left.output ++ right.output.map(_.withNullability(true)) case RightOuter => left.output.map(_.withNullability(true)) ++ right.output case FullOuter => left.output.map(_.withNullability(true)) ++ right.output.map(_.withNullability(true)) case j: ExistenceJoin => left.output :+ j.exists case LeftExistence(_) => left.output case x => throw new IllegalArgumentException( s"BroadcastNestedLoopJoin should not take $x as the JoinType") } } @transient private lazy val boundCondition = { if (condition.isDefined) { Predicate.create(condition.get, streamed.output ++ broadcast.output).eval _ } else { (r: InternalRow) => true } } /** * The implementation for InnerJoin. */ private def innerJoin(relation: Broadcast[Array[InternalRow]]): RDD[InternalRow] = { streamed.execute().mapPartitionsInternal { streamedIter => val buildRows = relation.value val joinedRow = new JoinedRow streamedIter.flatMap { streamedRow => val joinedRows = buildRows.iterator.map(r => joinedRow(streamedRow, r)) if (condition.isDefined) { joinedRows.filter(boundCondition) } else { joinedRows } } } } /** * The implementation for these joins: * * LeftOuter with BuildRight * RightOuter with BuildLeft */ private def outerJoin(relation: Broadcast[Array[InternalRow]]): RDD[InternalRow] = { streamed.execute().mapPartitionsInternal { streamedIter => val buildRows = relation.value val joinedRow = new JoinedRow val nulls = new GenericInternalRow(broadcast.output.size) // Returns an iterator to avoid copy the rows. new Iterator[InternalRow] { // current row from stream side private var streamRow: InternalRow = null // have found a match for current row or not private var foundMatch: Boolean = false // the matched result row private var resultRow: InternalRow = null // the next index of buildRows to try private var nextIndex: Int = 0 private def findNextMatch(): Boolean = { if (streamRow == null) { if (!streamedIter.hasNext) { return false } streamRow = streamedIter.next() nextIndex = 0 foundMatch = false } while (nextIndex < buildRows.length) { resultRow = joinedRow(streamRow, buildRows(nextIndex)) nextIndex += 1 if (boundCondition(resultRow)) { foundMatch = true return true } } if (!foundMatch) { resultRow = joinedRow(streamRow, nulls) streamRow = null true } else { resultRow = null streamRow = null findNextMatch() } } override def hasNext(): Boolean = { resultRow != null || findNextMatch() } override def next(): InternalRow = { val r = resultRow resultRow = null r } } } } /** * The implementation for these joins: * * LeftSemi with BuildRight * Anti with BuildRight */ private def leftExistenceJoin( relation: Broadcast[Array[InternalRow]], exists: Boolean): RDD[InternalRow] = { assert(buildSide == BuildRight) streamed.execute().mapPartitionsInternal { streamedIter => val buildRows = relation.value val joinedRow = new JoinedRow if (condition.isDefined) { streamedIter.filter(l => buildRows.exists(r => boundCondition(joinedRow(l, r))) == exists ) } else if (buildRows.nonEmpty == exists) { streamedIter } else { Iterator.empty } } } private def existenceJoin(relation: Broadcast[Array[InternalRow]]): RDD[InternalRow] = { assert(buildSide == BuildRight) streamed.execute().mapPartitionsInternal { streamedIter => val buildRows = relation.value val joinedRow = new JoinedRow if (condition.isDefined) { val resultRow = new GenericInternalRow(Array[Any](null)) streamedIter.map { row => val result = buildRows.exists(r => boundCondition(joinedRow(row, r))) resultRow.setBoolean(0, result) joinedRow(row, resultRow) } } else { val resultRow = new GenericInternalRow(Array[Any](buildRows.nonEmpty)) streamedIter.map { row => joinedRow(row, resultRow) } } } } /** * The implementation for these joins: * * LeftOuter with BuildLeft * RightOuter with BuildRight * FullOuter * LeftSemi with BuildLeft * LeftAnti with BuildLeft * ExistenceJoin with BuildLeft */ private def defaultJoin(relation: Broadcast[Array[InternalRow]]): RDD[InternalRow] = { /** All rows that either match both-way, or rows from streamed joined with nulls. */ val streamRdd = streamed.execute() val matchedBuildRows = streamRdd.mapPartitionsInternal { streamedIter => val buildRows = relation.value val matched = new BitSet(buildRows.length) val joinedRow = new JoinedRow streamedIter.foreach { streamedRow => var i = 0 while (i < buildRows.length) { if (boundCondition(joinedRow(streamedRow, buildRows(i)))) { matched.set(i) } i += 1 } } Seq(matched).toIterator } val matchedBroadcastRows = matchedBuildRows.fold( new BitSet(relation.value.length) )(_ | _) joinType match { case LeftSemi => assert(buildSide == BuildLeft) val buf: CompactBuffer[InternalRow] = new CompactBuffer() var i = 0 val rel = relation.value while (i < rel.length) { if (matchedBroadcastRows.get(i)) { buf += rel(i).copy() } i += 1 } return sparkContext.makeRDD(buf) case j: ExistenceJoin => val buf: CompactBuffer[InternalRow] = new CompactBuffer() var i = 0 val rel = relation.value while (i < rel.length) { val result = new GenericInternalRow(Array[Any](matchedBroadcastRows.get(i))) buf += new JoinedRow(rel(i).copy(), result) i += 1 } return sparkContext.makeRDD(buf) case LeftAnti => val notMatched: CompactBuffer[InternalRow] = new CompactBuffer() var i = 0 val rel = relation.value while (i < rel.length) { if (!matchedBroadcastRows.get(i)) { notMatched += rel(i).copy() } i += 1 } return sparkContext.makeRDD(notMatched) case o => } val notMatchedBroadcastRows: Seq[InternalRow] = { val nulls = new GenericInternalRow(streamed.output.size) val buf: CompactBuffer[InternalRow] = new CompactBuffer() val joinedRow = new JoinedRow joinedRow.withLeft(nulls) var i = 0 val buildRows = relation.value while (i < buildRows.length) { if (!matchedBroadcastRows.get(i)) { buf += joinedRow.withRight(buildRows(i)).copy() } i += 1 } buf } val matchedStreamRows = streamRdd.mapPartitionsInternal { streamedIter => val buildRows = relation.value val joinedRow = new JoinedRow val nulls = new GenericInternalRow(broadcast.output.size) streamedIter.flatMap { streamedRow => var i = 0 var foundMatch = false val matchedRows = new CompactBuffer[InternalRow] while (i < buildRows.length) { if (boundCondition(joinedRow(streamedRow, buildRows(i)))) { matchedRows += joinedRow.copy() foundMatch = true } i += 1 } if (!foundMatch && joinType == FullOuter) { matchedRows += joinedRow(streamedRow, nulls).copy() } matchedRows.iterator } } sparkContext.union( matchedStreamRows, sparkContext.makeRDD(notMatchedBroadcastRows) ) } protected override def doExecute(): RDD[InternalRow] = { val broadcastedRelation = broadcast.executeBroadcast[Array[InternalRow]]() val resultRdd = (joinType, buildSide) match { case (_: InnerLike, _) => innerJoin(broadcastedRelation) case (LeftOuter, BuildRight) | (RightOuter, BuildLeft) => outerJoin(broadcastedRelation) case (LeftSemi, BuildRight) => leftExistenceJoin(broadcastedRelation, exists = true) case (LeftAnti, BuildRight) => leftExistenceJoin(broadcastedRelation, exists = false) case (j: ExistenceJoin, BuildRight) => existenceJoin(broadcastedRelation) case _ => /** * LeftOuter with BuildLeft * RightOuter with BuildRight * FullOuter * LeftSemi with BuildLeft * LeftAnti with BuildLeft * ExistenceJoin with BuildLeft */ defaultJoin(broadcastedRelation) } val numOutputRows = longMetric("numOutputRows") resultRdd.mapPartitionsWithIndexInternal { (index, iter) => val resultProj = genResultProjection resultProj.initialize(index) iter.map { r => numOutputRows += 1 resultProj(r) } } } }
{ "content_hash": "5488e974b01713d2331d2488f384c319", "timestamp": "", "source": "github", "line_count": 370, "max_line_length": 93, "avg_line_length": 31.532432432432433, "alnum_prop": 0.6217536641810234, "repo_name": "wzhfy/spark", "id": "52b476f9cf1341941fe164eceef142acdb97330a", "size": "12467", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoinExec.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "50609" }, { "name": "Batchfile", "bytes": "25763" }, { "name": "C", "bytes": "1493" }, { "name": "CSS", "bytes": "24294" }, { "name": "Dockerfile", "bytes": "9556" }, { "name": "HTML", "bytes": "40561" }, { "name": "HiveQL", "bytes": "1890746" }, { "name": "Java", "bytes": "4213400" }, { "name": "JavaScript", "bytes": "218161" }, { "name": "Jupyter Notebook", "bytes": "31865" }, { "name": "Makefile", "bytes": "1591" }, { "name": "PLSQL", "bytes": "7715" }, { "name": "PLpgSQL", "bytes": "389551" }, { "name": "PowerShell", "bytes": "3879" }, { "name": "Python", "bytes": "3330124" }, { "name": "R", "bytes": "1238296" }, { "name": "Roff", "bytes": "36740" }, { "name": "SQLPL", "bytes": "9325" }, { "name": "Scala", "bytes": "34437552" }, { "name": "Shell", "bytes": "219852" }, { "name": "TSQL", "bytes": "483581" }, { "name": "Thrift", "bytes": "67584" }, { "name": "q", "bytes": "79845" } ], "symlink_target": "" }
package com.alibaba.rocketmq.store.stats; import com.alibaba.rocketmq.common.ThreadFactoryImpl; import com.alibaba.rocketmq.common.constant.LoggerName; import com.alibaba.rocketmq.common.stats.MomentStatsItemSet; import com.alibaba.rocketmq.common.stats.StatsItem; import com.alibaba.rocketmq.common.stats.StatsItemSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; public class BrokerStatsManager { private static final Logger log = LoggerFactory.getLogger(LoggerName.RocketmqStatsLoggerName); private final ScheduledExecutorService scheduledExecutorService = Executors .newSingleThreadScheduledExecutor(new ThreadFactoryImpl("BrokerStatsThread")); public static final String TOPIC_PUT_NUMS = "TOPIC_PUT_NUMS"; public static final String TOPIC_PUT_SIZE = "TOPIC_PUT_SIZE"; public static final String GROUP_GET_NUMS = "GROUP_GET_NUMS"; public static final String GROUP_GET_SIZE = "GROUP_GET_SIZE"; public static final String SNDBCK_PUT_NUMS = "SNDBCK_PUT_NUMS"; public static final String BROKER_PUT_NUMS = "BROKER_PUT_NUMS"; public static final String BROKER_GET_NUMS = "BROKER_GET_NUMS"; private final HashMap<String, StatsItemSet> statsTable = new HashMap<String, StatsItemSet>(); private final String clusterName; /** * 读磁盘落后统计 */ public static final String GROUP_GET_FALL = "GROUP_GET_FALL"; private final MomentStatsItemSet momentStatsItemSet = new MomentStatsItemSet(GROUP_GET_FALL, scheduledExecutorService, log); public BrokerStatsManager(String clusterName) { this.clusterName = clusterName; this.statsTable.put(TOPIC_PUT_NUMS, new StatsItemSet(TOPIC_PUT_NUMS, this.scheduledExecutorService, log)); this.statsTable.put(TOPIC_PUT_SIZE, new StatsItemSet(TOPIC_PUT_SIZE, this.scheduledExecutorService, log)); this.statsTable.put(GROUP_GET_NUMS, new StatsItemSet(GROUP_GET_NUMS, this.scheduledExecutorService, log)); this.statsTable.put(GROUP_GET_SIZE, new StatsItemSet(GROUP_GET_SIZE, this.scheduledExecutorService, log)); this.statsTable.put(SNDBCK_PUT_NUMS, new StatsItemSet(SNDBCK_PUT_NUMS, this.scheduledExecutorService, log)); this.statsTable.put(BROKER_PUT_NUMS, new StatsItemSet(BROKER_PUT_NUMS, this.scheduledExecutorService, log)); this.statsTable.put(BROKER_GET_NUMS, new StatsItemSet(BROKER_GET_NUMS, this.scheduledExecutorService, log)); } public void start() { } public void shutdown() { this.scheduledExecutorService.shutdown(); } public StatsItem getStatsItem(final String statsName, final String statsKey) { try { return this.statsTable.get(statsName).getStatsItem(statsKey); } catch (Exception e) { } return null; } public void incTopicPutNums(final String topic) { this.statsTable.get(TOPIC_PUT_NUMS).addValue(topic, 1, 1); } public void incTopicPutSize(final String topic, final int size) { this.statsTable.get(TOPIC_PUT_SIZE).addValue(topic, size, 1); } public void incGroupGetNums(final String group, final String topic, final int incValue) { this.statsTable.get(GROUP_GET_NUMS).addValue(topic + "@" + group, incValue, 1); } public void incGroupGetSize(final String group, final String topic, final int incValue) { this.statsTable.get(GROUP_GET_SIZE).addValue(topic + "@" + group, incValue, 1); } public void incBrokerPutNums() { this.statsTable.get(BROKER_PUT_NUMS).getAndCreateStatsItem(this.clusterName).getValue() .incrementAndGet(); } public void incBrokerGetNums(final int incValue) { this.statsTable.get(BROKER_GET_NUMS).getAndCreateStatsItem(this.clusterName).getValue() .addAndGet(incValue); } public void incSendBackNums(final String group, final String topic) { this.statsTable.get(SNDBCK_PUT_NUMS).addValue(topic + "@" + group, 1, 1); } public double tpsGroupGetNums(final String group, final String topic) { return this.statsTable.get(GROUP_GET_NUMS).getStatsDataInMinute(topic + "@" + group).getTps(); } public void recordDiskFallBehind(final String group, final String topic, final int queueId, final long fallBehind) { final String statsKey = String.format("%d@%s@%s", queueId, topic, group); this.momentStatsItemSet.getAndCreateStatsItem(statsKey).getValue().set(fallBehind); } }
{ "content_hash": "f751cecc859c647b2de802736754955e", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 109, "avg_line_length": 37.00787401574803, "alnum_prop": 0.7057446808510638, "repo_name": "humphery755/RocketMQ", "id": "1c39d1290a1d09825de1b10a7eecdf4de120154a", "size": "4714", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "rocketmq-store/src/main/java/com/alibaba/rocketmq/store/stats/BrokerStatsManager.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "127" }, { "name": "CSS", "bytes": "199" }, { "name": "HTML", "bytes": "118" }, { "name": "Java", "bytes": "3199070" }, { "name": "JavaScript", "bytes": "14727" }, { "name": "Shell", "bytes": "23428" } ], "symlink_target": "" }
<?php /* * This file is part of the RollerworksDatagrid package. * * (c) Sebastiaan Stok <s.stok@rollerscapes.net> * * This source file is subject to the MIT license that is bundled * with this source code in the file LICENSE. */ namespace Rollerworks\Component\Datagrid\Tests\Extension\Core\DataTransformer; use Rollerworks\Component\Datagrid\Extension\Core\DataTransformer\ValueFormatTransformer; class ValueFormatTransformerTest extends \PHPUnit_Framework_TestCase { public function testTransformWithDefaults() { $transformer = new ValueFormatTransformer(); $this->assertEquals('foo', $transformer->transform('foo')); $this->assertEquals(' bar', $transformer->transform(' bar')); } public function testTransformEmptyValue() { $transformer = new ValueFormatTransformer('-'); $this->assertEquals('foo', $transformer->transform('foo')); $this->assertEquals(' bar', $transformer->transform(' bar')); $this->assertEquals('-', $transformer->transform(null)); $this->assertEquals('-', $transformer->transform('')); } public function testTransformEmptyValueWithArray() { $transformer = new ValueFormatTransformer('-', ','); $this->assertEquals('foo', $transformer->transform('foo')); $this->assertEquals('-', $transformer->transform(null)); $this->assertEquals('-', $transformer->transform('')); $this->assertEquals('-,-', $transformer->transform(['id' => '', 'name' => null])); } public function testTransformEmptyValuePerFieldWithArray() { $transformer = new ValueFormatTransformer(['id' => '0', 'name' => 'NV'], ',', null, ['id', 'name']); $this->assertEquals('foo', $transformer->transform('foo')); $this->assertEquals('', $transformer->transform(null)); $this->assertEquals('', $transformer->transform('')); $this->assertEquals('0,NV', $transformer->transform(['id' => '', 'name' => null])); } public function testTransformEmptyValueWithArrayAndFormatter() { $transformer = new ValueFormatTransformer('-', null, '%s/%s'); // Don't test none-array values as mixing these is not supported $this->assertEquals('1/who', $transformer->transform(['id' => '1', 'name' => 'who'])); $this->assertEquals('-/-', $transformer->transform(['id' => '', 'name' => null])); } public function testTransformWithFormatter() { $format = function ($values) { return $values['id'].'/%/'.$values['name']; }; $transformer = new ValueFormatTransformer('-', null, $format, ['id', 'name']); // Don't test none-array values as mixing these is not supported $this->assertEquals('1/%/who', $transformer->transform(['id' => '1', 'name' => 'who'])); $this->assertEquals('-/%/-', $transformer->transform(['id' => '', 'name' => null])); } public function testTransformWithFormatterAndArray() { $format = function ($field) { return '{{ '.$field.' }}'; }; $transformer = new ValueFormatTransformer('-', null, $format); $this->assertEquals('{{ name }}', $transformer->transform('name')); $this->assertEquals('{{ - }}', $transformer->transform(null)); } public function testTransformWithFormatterAndArrayAndGlue() { $transformer = new ValueFormatTransformer('-', ', ', '{{ %s }}'); $this->assertEquals('{{ 1 }}, {{ who }}', $transformer->transform(['id' => '1', 'name' => 'who'])); $this->assertEquals('{{ - }}, {{ - }}', $transformer->transform(['id' => '', 'name' => null])); } public function testTransformWithClosureFormatterAndArrayAndGlue() { $format = function ($field) { return '{{ '.$field.' }}'; }; $transformer = new ValueFormatTransformer('-', ', ', $format); $this->assertEquals('{{ 1 }}, {{ who }}', $transformer->transform(['id' => '1', 'name' => 'who'])); $this->assertEquals('{{ - }}, {{ - }}', $transformer->transform(['id' => '', 'name' => null])); } }
{ "content_hash": "c1aef8899bde5496a737032f3cef5fcc", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 108, "avg_line_length": 37.944954128440365, "alnum_prop": 0.5921179883945842, "repo_name": "cordoval/rollerworks-datagrid", "id": "9a5c7a99daa939ef93c57cc2804cdab7a8e5f1f4", "size": "4136", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/Extension/Core/DataTransformer/ValueFormatTransformerTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "309631" } ], "symlink_target": "" }
FROM balenalib/orange-pi-lite-ubuntu:focal-build ENV GO_VERSION 1.16 RUN mkdir -p /usr/local/go \ && curl -SLO "http://resin-packages.s3.amazonaws.com/golang/v$GO_VERSION/go$GO_VERSION.linux-armv7hf.tar.gz" \ && echo "5d2c637632fc23139c992e7f5adce1e46bccebd5a43fc90f797050ae71f46ab9 go$GO_VERSION.linux-armv7hf.tar.gz" | sha256sum -c - \ && tar -xzf "go$GO_VERSION.linux-armv7hf.tar.gz" -C /usr/local/go --strip-components=1 \ && rm -f go$GO_VERSION.linux-armv7hf.tar.gz ENV GOROOT /usr/local/go ENV GOPATH /go ENV PATH $GOPATH/bin:/usr/local/go/bin:$PATH RUN mkdir -p "$GOPATH/src" "$GOPATH/bin" && chmod -R 777 "$GOPATH" WORKDIR $GOPATH CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@golang.sh" \ && echo "Running test-stack@golang" \ && chmod +x test-stack@golang.sh \ && bash test-stack@golang.sh \ && rm -rf test-stack@golang.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu focal \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nGo v1.16 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
{ "content_hash": "f4c6dc7c31f50468fb8dd0e1431621e8", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 671, "avg_line_length": 64.6774193548387, "alnum_prop": 0.7276807980049875, "repo_name": "nghiant2710/base-images", "id": "bb2debf9e02358726983520138d0db8cf1db40be", "size": "2026", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/golang/orange-pi-lite/ubuntu/focal/1.16/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "144558581" }, { "name": "JavaScript", "bytes": "16316" }, { "name": "Shell", "bytes": "368690" } ], "symlink_target": "" }
package com.comeb; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.google.appengine.api.datastore.DatastoreService; import com.google.appengine.api.datastore.DatastoreServiceFactory; import com.google.appengine.api.datastore.Entity; public class Welcome extends HttpServlet{ @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { // TODO Auto-generated method stub String login = req.getParameter("login"); try{ DatastoreService dsf = DatastoreServiceFactory.getDatastoreService(); Entity e = new Entity("message"); e.setProperty("message", "Welcome "+login); dsf.put(e); }catch(Exception exception){ exception.printStackTrace(); } resp.getWriter().write("saved in db"); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { // TODO Auto-generated method stub doGet(req,resp); } }
{ "content_hash": "598bae787097e54d92c4b802cd0e8257", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 72, "avg_line_length": 27.51219512195122, "alnum_prop": 0.7783687943262412, "repo_name": "ComeBurguburu/myfirstengine", "id": "94bd96c136e928aa802c4a4de14c0fd069336890", "size": "1128", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/comeb/Welcome.java", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1254" }, { "name": "Java", "bytes": "12298" }, { "name": "JavaScript", "bytes": "1216" } ], "symlink_target": "" }
from django.utils import timezone from django.shortcuts import render, get_object_or_404, redirect from .models import Post, Comment, Notice from .forms import PostForm, CommentForm, NoticeForm from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required # Create your views here. def post_list(request): posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date') return render(request,'blog/post_list.html',{'posts': posts}) def post_detail(request, pk): post = get_object_or_404(Post, pk=pk) if request.method == "POST": form = CommentForm(request.POST) if form.is_valid(): comment = form.save(commit=False) comment.author = request.user comment.post = post comment.save() return redirect('blog:post_detail', pk=post.pk) else: form = CommentForm() return render(request, 'blog/post_detail.html', {'post': post, 'form': form}) def notice_list(request): notices = Notice.objects.filter(published_date__lte=timezone.now()).order_by('-published_date') public = Notice.objects.filter(privacy="Public").filter(published_date__lte=timezone.now()).order_by('-published_date') return render(request,'blog/notice_list.html',{'notices': notices, 'public': public}) def notice_detail(request, pk): notice = get_object_or_404(Notice, pk=pk) return render(request, 'blog/notice_detail.html', {'notice': notice}) @login_required def post_new(request): if request.method == "POST": form = PostForm(request.POST) if form.is_valid(): post = form.save(commit=False) post.author = request.user post.published_date = timezone.now() post.save() return redirect('blog:post_detail', pk=post.pk) else: form = PostForm() return render(request, 'blog/post_edit.html', {'form': form}) @login_required def post_edit(request, pk): post = get_object_or_404(Post, pk=pk) if request.method == "POST": form = PostForm(request.POST, instance=post) if form.is_valid(): post = form.save(commit=False) post.author = request.user post.save() return redirect('blog:post_detail', pk=post.pk) else: form = PostForm(instance=post) return render(request, 'blog/post_edit.html', {'form': form}) @staff_member_required def notice_new(request): if request.method == "POST": form = NoticeForm(request.POST, request.FILES) if form.is_valid(): notice = form.save(commit=False) notice.author = request.user notice.published_date = timezone.now() notice.save() return redirect('blog:notice_detail', pk=notice.pk) else: form = NoticeForm() return render(request, 'blog/notice_edit.html', {'form': form}) @staff_member_required def notice_edit(request, pk): notice = get_object_or_404(Notice, pk=pk) if request.method == "POST": form = NoticeForm(request.POST, request.FILES, instance=notice) if form.is_valid(): notice = form.save(commit=False) notice.author = request.user notice.save() return redirect('blog.views.notice_detail', pk=notice.pk) else: form = NoticeForm(instance=notice) return render(request, 'blog/notice_edit.html', {'form': form}) # @login_required # def post_draft_list(request): # posts = Post.objects.filter(published_date__isnull=True).order_by('created_date') # return render(request, 'blog/post_draft_list.html', {'posts': posts}) # @login_required # def post_publish(request, pk): # post = get_object_or_404(Post, pk=pk) # post.publish() # return redirect('blog.views.post_detail', pk=pk) # @login_required # def publish(self): # self.published_date = timezone.now() # self.save() @login_required def post_remove(request, pk): post = get_object_or_404(Post, pk=pk) post.delete() return redirect('blog:post_list') @staff_member_required def notice_remove(request, pk): notice = get_object_or_404(Notice, pk=pk) notice.delete() return redirect('blog.views.notice_list') # def add_comment_to_post(request, pk): # post = get_object_or_404(Post, pk=pk) # if request.method == "POST": # form = CommentForm(request.POST) # if form.is_valid(): # comment = form.save(commit=False) # comment.author = request.user # comment.post = post # comment.save() # return redirect('blog.views.post_detail', pk=post.pk) # else: # form = CommentForm() # return render(request, 'blog/post_detail.html', {'form': form, 'post': post}) @login_required def comment_approve(request, pk): comment = get_object_or_404(Comment, pk=pk) comment.approve() return redirect('blog:post_detail', pk=comment.post.pk) @login_required def comment_remove(request, pk): comment = get_object_or_404(Comment, pk=pk) post_pk = comment.post.pk comment.delete() return redirect('blog:post_detail', pk=post_pk)
{ "content_hash": "2a4d9f6a7f6c1ecb3ed70ec24069208a", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 123, "avg_line_length": 35.37162162162162, "alnum_prop": 0.6389684813753582, "repo_name": "RachellCalhoun/cathotel", "id": "e44c690f21d3715e3cfd8ea9b6a890312066a8b0", "size": "5236", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "blog/views.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4016" }, { "name": "HTML", "bytes": "30678" }, { "name": "Python", "bytes": "33344" } ], "symlink_target": "" }
<html> <!-- Copyright 2011 Henry A Schimke Credit for the 'quick click' feature goes to Martin Weik <martin@weik.at>. The code provided by Martin Weik is marked between the #Martin_Weik below. (Thank you!) Settings capture and checking written by Henry A Schimke. See license.txt for details --> <head> <script src="jquery-1.5.2.min.js" type="text/javascript"></script> <script src="popup.js" type="text/javascript"></script> </head> <body> <div> <button id="enable-disable-button"> <span><script>document.write( chrome.extension.getBackgroundPage().EnDisButtonText );</script></span> </button> <button id="next-button"> <span><script>document.write( chrome.extension.getBackgroundPage().NextButtonMessage );</script></span> </button> </div> </body> </html>
{ "content_hash": "ce60c53b491024e4e593ae2f1519a8cb", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 105, "avg_line_length": 29.607142857142858, "alnum_prop": 0.6731001206272618, "repo_name": "hschimke/BrowseQueue", "id": "024c0225fd0599b7ff57e3527d5d1562885ac60c", "size": "829", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "popup.html", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "15540" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" /> <meta name="generator" content="ApiGen 2.8.0" /> <title>Class Mailchimp_ServerError_MethodUnknown</title> <script type="text/javascript" src="resources/combined.js?394153670"></script> <script type="text/javascript" src="elementlist.js?2821599371"></script> <link rel="stylesheet" type="text/css" media="all" href="resources/style.css?3505392360" /> </head> <body> <div id="left"> <div id="menu"> <a href="index.html" title="Overview"><span>Overview</span></a> <div id="groups"> <h3>Namespaces</h3> <ul> <li class="active"><a href="namespace-None.html">None</a> </li> <li><a href="namespace-PHP.html">PHP</a> </li> </ul> </div> <hr /> <div id="elements"> <h3>Classes</h3> <ul> <li><a href="class-Mailchimp.html">Mailchimp</a></li> <li><a href="class-Mailchimp_Campaigns.html">Mailchimp_Campaigns</a></li> <li><a href="class-Mailchimp_Ecomm.html">Mailchimp_Ecomm</a></li> <li><a href="class-Mailchimp_Folders.html">Mailchimp_Folders</a></li> <li><a href="class-Mailchimp_Gallery.html">Mailchimp_Gallery</a></li> <li><a href="class-Mailchimp_Helper.html">Mailchimp_Helper</a></li> <li><a href="class-Mailchimp_Lists.html">Mailchimp_Lists</a></li> <li><a href="class-Mailchimp_Mobile.html">Mailchimp_Mobile</a></li> <li><a href="class-Mailchimp_Neapolitan.html">Mailchimp_Neapolitan</a></li> <li><a href="class-Mailchimp_Reports.html">Mailchimp_Reports</a></li> <li><a href="class-Mailchimp_Templates.html">Mailchimp_Templates</a></li> <li><a href="class-Mailchimp_Users.html">Mailchimp_Users</a></li> <li><a href="class-Mailchimp_Vip.html">Mailchimp_Vip</a></li> </ul> <h3>Exceptions</h3> <ul> <li><a href="class-Mailchimp_Absplit_UnknownError.html">Mailchimp_Absplit_UnknownError</a></li> <li><a href="class-Mailchimp_Absplit_UnknownSplitTest.html">Mailchimp_Absplit_UnknownSplitTest</a></li> <li><a href="class-Mailchimp_Absplit_UnknownTestType.html">Mailchimp_Absplit_UnknownTestType</a></li> <li><a href="class-Mailchimp_Absplit_UnknownWaitUnit.html">Mailchimp_Absplit_UnknownWaitUnit</a></li> <li><a href="class-Mailchimp_Absplit_UnknownWinnerType.html">Mailchimp_Absplit_UnknownWinnerType</a></li> <li><a href="class-Mailchimp_Absplit_WinnerNotSelected.html">Mailchimp_Absplit_WinnerNotSelected</a></li> <li><a href="class-Mailchimp_Avesta_Db_Exception.html">Mailchimp_Avesta_Db_Exception</a></li> <li><a href="class-Mailchimp_Campaign_BounceMissing.html">Mailchimp_Campaign_BounceMissing</a></li> <li><a href="class-Mailchimp_Campaign_DoesNotExist.html">Mailchimp_Campaign_DoesNotExist</a></li> <li><a href="class-Mailchimp_Campaign_InvalidAbsplit.html">Mailchimp_Campaign_InvalidAbsplit</a></li> <li><a href="class-Mailchimp_Campaign_InvalidAuto.html">Mailchimp_Campaign_InvalidAuto</a></li> <li><a href="class-Mailchimp_Campaign_InvalidContent.html">Mailchimp_Campaign_InvalidContent</a></li> <li><a href="class-Mailchimp_Campaign_InvalidOption.html">Mailchimp_Campaign_InvalidOption</a></li> <li><a href="class-Mailchimp_Campaign_InvalidRss.html">Mailchimp_Campaign_InvalidRss</a></li> <li><a href="class-Mailchimp_Campaign_InvalidSegment.html">Mailchimp_Campaign_InvalidSegment</a></li> <li><a href="class-Mailchimp_Campaign_InvalidStatus.html">Mailchimp_Campaign_InvalidStatus</a></li> <li><a href="class-Mailchimp_Campaign_InvalidTemplate.html">Mailchimp_Campaign_InvalidTemplate</a></li> <li><a href="class-Mailchimp_Campaign_NotSaved.html">Mailchimp_Campaign_NotSaved</a></li> <li><a href="class-Mailchimp_Campaign_StatsNotAvailable.html">Mailchimp_Campaign_StatsNotAvailable</a></li> <li><a href="class-Mailchimp_Email_AlreadySubscribed.html">Mailchimp_Email_AlreadySubscribed</a></li> <li><a href="class-Mailchimp_Email_AlreadyUnsubscribed.html">Mailchimp_Email_AlreadyUnsubscribed</a></li> <li><a href="class-Mailchimp_Email_NotExists.html">Mailchimp_Email_NotExists</a></li> <li><a href="class-Mailchimp_Email_NotSubscribed.html">Mailchimp_Email_NotSubscribed</a></li> <li><a href="class-Mailchimp_Error.html">Mailchimp_Error</a></li> <li><a href="class-Mailchimp_HttpError.html">Mailchimp_HttpError</a></li> <li><a href="class-Mailchimp_Invalid_Analytics.html">Mailchimp_Invalid_Analytics</a></li> <li><a href="class-Mailchimp_Invalid_ApiKey.html">Mailchimp_Invalid_ApiKey</a></li> <li><a href="class-Mailchimp_Invalid_AppKey.html">Mailchimp_Invalid_AppKey</a></li> <li><a href="class-Mailchimp_Invalid_DateTime.html">Mailchimp_Invalid_DateTime</a></li> <li><a href="class-Mailchimp_Invalid_EcommOrder.html">Mailchimp_Invalid_EcommOrder</a></li> <li><a href="class-Mailchimp_Invalid_Email.html">Mailchimp_Invalid_Email</a></li> <li><a href="class-Mailchimp_Invalid_Folder.html">Mailchimp_Invalid_Folder</a></li> <li><a href="class-Mailchimp_Invalid_IP.html">Mailchimp_Invalid_IP</a></li> <li><a href="class-Mailchimp_Invalid_Options.html">Mailchimp_Invalid_Options</a></li> <li><a href="class-Mailchimp_Invalid_PagingLimit.html">Mailchimp_Invalid_PagingLimit</a></li> <li><a href="class-Mailchimp_Invalid_PagingStart.html">Mailchimp_Invalid_PagingStart</a></li> <li><a href="class-Mailchimp_Invalid_SendType.html">Mailchimp_Invalid_SendType</a></li> <li><a href="class-Mailchimp_Invalid_Template.html">Mailchimp_Invalid_Template</a></li> <li><a href="class-Mailchimp_Invalid_TrackingOptions.html">Mailchimp_Invalid_TrackingOptions</a></li> <li><a href="class-Mailchimp_Invalid_URL.html">Mailchimp_Invalid_URL</a></li> <li><a href="class-Mailchimp_List_AlreadySubscribed.html">Mailchimp_List_AlreadySubscribed</a></li> <li><a href="class-Mailchimp_List_CannotRemoveEmailMerge.html">Mailchimp_List_CannotRemoveEmailMerge</a></li> <li><a href="class-Mailchimp_List_DoesNotExist.html">Mailchimp_List_DoesNotExist</a></li> <li><a href="class-Mailchimp_List_InvalidBounceMember.html">Mailchimp_List_InvalidBounceMember</a></li> <li><a href="class-Mailchimp_List_InvalidImport.html">Mailchimp_List_InvalidImport</a></li> <li><a href="class-Mailchimp_List_InvalidInterestFieldType.html">Mailchimp_List_InvalidInterestFieldType</a></li> <li><a href="class-Mailchimp_List_InvalidInterestGroup.html">Mailchimp_List_InvalidInterestGroup</a></li> <li><a href="class-Mailchimp_List_InvalidMergeField.html">Mailchimp_List_InvalidMergeField</a></li> <li><a href="class-Mailchimp_List_InvalidOption.html">Mailchimp_List_InvalidOption</a></li> <li><a href="class-Mailchimp_List_InvalidUnsubMember.html">Mailchimp_List_InvalidUnsubMember</a></li> <li><a href="class-Mailchimp_List_Merge_InvalidMergeID.html">Mailchimp_List_Merge_InvalidMergeID</a></li> <li><a href="class-Mailchimp_List_MergeFieldRequired.html">Mailchimp_List_MergeFieldRequired</a></li> <li><a href="class-Mailchimp_List_NotSubscribed.html">Mailchimp_List_NotSubscribed</a></li> <li><a href="class-Mailchimp_List_TooManyInterestGroups.html">Mailchimp_List_TooManyInterestGroups</a></li> <li><a href="class-Mailchimp_List_TooManyMergeFields.html">Mailchimp_List_TooManyMergeFields</a></li> <li><a href="class-Mailchimp_Max_Size_Reached.html">Mailchimp_Max_Size_Reached</a></li> <li><a href="class-Mailchimp_MC_ContentImport_InvalidArchive.html">Mailchimp_MC_ContentImport_InvalidArchive</a></li> <li><a href="class-Mailchimp_MC_InvalidPayment.html">Mailchimp_MC_InvalidPayment</a></li> <li><a href="class-Mailchimp_MC_PastedList_Duplicate.html">Mailchimp_MC_PastedList_Duplicate</a></li> <li><a href="class-Mailchimp_MC_PastedList_InvalidImport.html">Mailchimp_MC_PastedList_InvalidImport</a></li> <li><a href="class-Mailchimp_MC_SearchException.html">Mailchimp_MC_SearchException</a></li> <li><a href="class-Mailchimp_Module_Unknown.html">Mailchimp_Module_Unknown</a></li> <li><a href="class-Mailchimp_MonthlyPlan_Unknown.html">Mailchimp_MonthlyPlan_Unknown</a></li> <li><a href="class-Mailchimp_Order_TypeUnknown.html">Mailchimp_Order_TypeUnknown</a></li> <li><a href="class-Mailchimp_Parse_Exception.html">Mailchimp_Parse_Exception</a></li> <li><a href="class-Mailchimp_PDOException.html">Mailchimp_PDOException</a></li> <li><a href="class-Mailchimp_Request_TimedOut.html">Mailchimp_Request_TimedOut</a></li> <li><a href="class-Mailchimp_ServerError_InvalidParameters.html">Mailchimp_ServerError_InvalidParameters</a></li> <li class="active"><a href="class-Mailchimp_ServerError_MethodUnknown.html">Mailchimp_ServerError_MethodUnknown</a></li> <li><a href="class-Mailchimp_Too_Many_Connections.html">Mailchimp_Too_Many_Connections</a></li> <li><a href="class-Mailchimp_Unknown_Exception.html">Mailchimp_Unknown_Exception</a></li> <li><a href="class-Mailchimp_User_CannotSendCampaign.html">Mailchimp_User_CannotSendCampaign</a></li> <li><a href="class-Mailchimp_User_Disabled.html">Mailchimp_User_Disabled</a></li> <li><a href="class-Mailchimp_User_DoesExist.html">Mailchimp_User_DoesExist</a></li> <li><a href="class-Mailchimp_User_DoesNotExist.html">Mailchimp_User_DoesNotExist</a></li> <li><a href="class-Mailchimp_User_InvalidAction.html">Mailchimp_User_InvalidAction</a></li> <li><a href="class-Mailchimp_User_InvalidRole.html">Mailchimp_User_InvalidRole</a></li> <li><a href="class-Mailchimp_User_MissingEmail.html">Mailchimp_User_MissingEmail</a></li> <li><a href="class-Mailchimp_User_MissingModuleOutbox.html">Mailchimp_User_MissingModuleOutbox</a></li> <li><a href="class-Mailchimp_User_ModuleAlreadyPurchased.html">Mailchimp_User_ModuleAlreadyPurchased</a></li> <li><a href="class-Mailchimp_User_ModuleNotPurchased.html">Mailchimp_User_ModuleNotPurchased</a></li> <li><a href="class-Mailchimp_User_NotApproved.html">Mailchimp_User_NotApproved</a></li> <li><a href="class-Mailchimp_User_NotEnoughCredit.html">Mailchimp_User_NotEnoughCredit</a></li> <li><a href="class-Mailchimp_User_UnderMaintenance.html">Mailchimp_User_UnderMaintenance</a></li> <li><a href="class-Mailchimp_User_Unknown.html">Mailchimp_User_Unknown</a></li> <li><a href="class-Mailchimp_ValidationError.html">Mailchimp_ValidationError</a></li> <li><a href="class-Mailchimp_XML_RPC2_Exception.html">Mailchimp_XML_RPC2_Exception</a></li> <li><a href="class-Mailchimp_XML_RPC2_FaultException.html">Mailchimp_XML_RPC2_FaultException</a></li> <li><a href="class-Mailchimp_Zend_Uri_Exception.html">Mailchimp_Zend_Uri_Exception</a></li> </ul> </div> </div> </div> <div id="splitter"></div> <div id="right"> <div id="rightInner"> <form id="search"> <input type="hidden" name="cx" value="" /> <input type="hidden" name="ie" value="UTF-8" /> <input type="text" name="q" class="text" /> <input type="submit" value="Search" /> </form> <div id="navigation"> <ul> <li> <a href="index.html" title="Overview"><span>Overview</span></a> </li> <li> <a href="namespace-None.html" title="Summary of None"><span>Namespace</span></a> </li> <li class="active"> <span>Class</span> </li> </ul> <ul> <li> <a href="tree.html" title="Tree view of classes, interfaces, traits and exceptions"><span>Tree</span></a> </li> </ul> <ul> </ul> </div> <div id="content" class="class"> <h1>Class Mailchimp_ServerError_MethodUnknown</h1> <div class="description"> <p>None</p> </div> <dl class="tree"> <dd style="padding-left:0px"> <a href="class-Exception.html"><span>Exception</span></a> </dd> <dd style="padding-left:30px"> <img src="resources/inherit.png" alt="Extended by" /> <a href="class-Mailchimp_Error.html"><span>Mailchimp_Error</span></a> </dd> <dd style="padding-left:60px"> <img src="resources/inherit.png" alt="Extended by" /> <b><span>Mailchimp_ServerError_MethodUnknown</span></b> </dd> </dl> <div class="info"> <b>Located at</b> <a href="source-class-Mailchimp_ServerError_MethodUnknown.html#11-14" title="Go to source code">Mailchimp/Exceptions.php</a><br /> </div> <table class="summary inherited"> <caption>Methods inherited from <a href="class-Exception.html#methods">Exception</a></caption> <tr> <td><code> <a href="class-Exception.html#___construct">__construct()</a>, <a href="class-Exception.html#___toString">__toString()</a>, <a href="class-Exception.html#_getCode">getCode()</a>, <a href="class-Exception.html#_getFile">getFile()</a>, <a href="class-Exception.html#_getLine">getLine()</a>, <a href="class-Exception.html#_getMessage">getMessage()</a>, <a href="class-Exception.html#_getPrevious">getPrevious()</a>, <a href="class-Exception.html#_getTrace">getTrace()</a>, <a href="class-Exception.html#_getTraceAsString">getTraceAsString()</a> </code></td> </tr> </table> <table class="summary inherited"> <caption>Properties inherited from <a href="class-Exception.html#properties">Exception</a></caption> <tr> <td><code> <a href="class-Exception.html#$code"><var>$code</var></a>, <a href="class-Exception.html#$file"><var>$file</var></a>, <a href="class-Exception.html#$line"><var>$line</var></a>, <a href="class-Exception.html#$message"><var>$message</var></a> </code></td> </tr> </table> </div> <div id="footer"> API documentation generated by <a href="http://apigen.org">ApiGen 2.8.0</a> </div> </div> </div> </body> </html>
{ "content_hash": "5ea13666424ff6d86c0b2feaa32cf179", "timestamp": "", "source": "github", "line_count": 286, "max_line_length": 150, "avg_line_length": 47.57342657342657, "alnum_prop": 0.7060120535058063, "repo_name": "avorio/paraentender", "id": "d2554851763f15d529b25d3b899af60bf3d7df16", "size": "13606", "binary": false, "copies": "31", "ref": "refs/heads/master", "path": "code/sites/all/libraries/mailchimp/docs/class-Mailchimp_ServerError_MethodUnknown.html", "mode": "33261", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "5767" }, { "name": "C++", "bytes": "120240" }, { "name": "CSS", "bytes": "393217" }, { "name": "HTML", "bytes": "8899862" }, { "name": "JavaScript", "bytes": "756704" }, { "name": "PHP", "bytes": "9982507" }, { "name": "Shell", "bytes": "25637" }, { "name": "SourcePawn", "bytes": "128369" } ], "symlink_target": "" }
import os import sys import getpass from functools import wraps import ldap import ldap.modlist as modlist import cog.directory as dir import cog.util.passwd as passwd from cog.util.misc import loop_on from cog.objects.group import Group from cog.config.settings import Profiles from cog.config.templates import Templates accounts = Templates().get('accounts') settings = Profiles() class User(object): def __init__(self, name, account_data=None, groups=None, bind=False): """ User object, unsurprisingly. """ self.tree = dir.Tree() self.name = name self.exists = True self.base_dn = settings.user_dn self.ldap_query = settings.user_query % (settings.user_rdn, self.name) user_data = self.tree.search(self.base_dn, search_filter=self.ldap_query, bind=bind) if len(user_data) > 1: raise dir.MultipleObjectsFound("The user ID is not unique.") if len(user_data) == 1: self.data = user_data[0] self.uid = self.data.get('uid') else: self.exists = False self.uid = [name] self.data = account_data self.groups = groups def user_exists(method): """ Make sure that you're operating on an existing object. """ @wraps(method) def _user_exists(self, *args, **kwargs): if not self.exists: raise dir.ObjectNotFound("User ‘%s’ cannot be found." % self.name) return method(self, *args, **kwargs) return _user_exists def add(self): self.tree.add(self.data) self.exists = True if self.groups: for group in self.groups: try: self.addgroup(group) except: print "There was a problem with adding user %s to the group %s." % (self.name, group) @user_exists def replace_item(self, item, value): self.data.replace(item, value) @user_exists def append_to_item(self, item, value): self.data.append(item, value) @user_exists def remove_from_item(self, item, value): self.data.remove(item, value) @user_exists def commit_changes(self): self.tree.modify(self.data) @user_exists def find_groups(self): for uid in loop_on(self.uid): group_filter = '(&(objectClass=posixGroup)(|(memberUid=%s)(%s=%s)))' % (uid, settings.rfc2307bis_group_member_attribute, self.data.dn) groups = [x['cn'][0] for x in self.tree.search(search_filter=group_filter, attributes=['cn'])] yield groups @user_exists def strip_groups(self): for uid in self.uid: groups = [x['cn'][0] for x in self.tree.search(search_filter='(&(objectClass=posixGroup)(memberUid=%s))' % uid, attributes=['cn'])] for group in groups: self.delgroup(group) @user_exists def addgroup(self, user_group): group_obj = Group(user_group) for uid in self.uid: group_obj.add_uid(uid) group_obj.commit_changes() @user_exists def delgroup(self, user_group): group_obj = Group(user_group) for uid in self.uid: group_obj.del_uid(uid) group_obj.commit_changes() @user_exists def set_password(self, password=None): if not password: password = getpass.getpass('enter new LDAP password for %s: ' % self.name) self.data.replace('userPassword', passwd.make_sha512(password)) self.tree.modify(self.data) @user_exists def rename(self, new_name): self.tree.rename(self.data.dn, new_rdn='%s=%s' % (settings.user_rdn, new_name)) @user_exists def remove(self): self.strip_groups() self.tree.remove(self.data.dn) @user_exists def retire(self): self.set_password(passwd.random_string(32)) self.data.replace('gidNumber', accounts.get('retired').get('gidNumber')) self.tree.modify(self.data) self.tree.move(self.data.dn, new_parent=dir.get_account_base('retired')) self.strip_groups()
{ "content_hash": "08add296ff6b02b813ce2d025d68b7bc", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 146, "avg_line_length": 32.30769230769231, "alnum_prop": 0.5947619047619047, "repo_name": "jubalfh/cog", "id": "4a4929ad38202ff3ec26785cec8d6d267fc934e9", "size": "4500", "binary": false, "copies": "1", "ref": "refs/heads/trunk", "path": "cog/objects/user.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "67882" }, { "name": "Shell", "bytes": "562" } ], "symlink_target": "" }
using System.Collections.Generic; using System.ComponentModel.DataAnnotations; namespace Experimentation.Logic.ViewModels { public class BaseFeatureViewModel { [Required(AllowEmptyStrings = false, ErrorMessage = "A feature name cannot be an empty string and must be a valid meaningful unique name.")] public string Name { get; set; } [Range(1, int.MaxValue)] public int FriendlyId { get; set; } public List<string> BucketList { get; set; } } }
{ "content_hash": "58eddaeb8e6061570ce0916745eb5215", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 115, "avg_line_length": 30.058823529411764, "alnum_prop": 0.675146771037182, "repo_name": "iby-dev/Experimentation-API", "id": "fd34920213a092bf5feced8a03274662d05b5e9b", "size": "511", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Services/Experimentation/Experimentation.Logic/ViewModels/BaseFeatureViewModel.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "47130" } ], "symlink_target": "" }
<?php $value = $package[0]; $post = null; if ($this->session->userdata('info_credit')) { $info_credit = $this->session->userdata('info_credit'); $post = $info_credit['fields']; } ?> <section class="section box-wapper-show-image box-wapper-secondary"> <div class="container"> <p class="h2 text-center"><strong>Secure Upgrade...</strong>Put your brand top of mind!</p> </div> </section> <section class="section box-wapper-show-image"> <div class="container"> <?php if ($post != null) : ?> <div class="alert alert-danger"> <strong>Payment fail</strong> sorry your payment failed. Please try again. </div> <?php else : ?> <?php if ($this->session->flashdata('message')) : ?> <div class="alert alert-success"><?php echo $this->session->flashdata('message'); ?></div> <?php endif; ?> <?php endif; ?> <div class="upgrade-panel panel-shadow"> <div class="row"> <div class="col-md-8 col-md-offset-2"> <div class="row"> <div class="col-sm-6 border-right-sm"> <div class="media"> <div class="media-left"> <span class="mo-num"><?php echo $value['name']; ?></span> </div> <div class="media-body media-middle"> <span class="mo-text">Month<br>plan</span> </div> </div> </div> <div class="col-sm-6 text-center-sm"> <span class="wrap"> <?php echo $value['summary']; ?> </span> </div> </div> </div> </div> </div> <div class="upgrade-panel none-style"> <p><img class="align-bottom" src="<?php echo skin_url('images/icon-lock.png') ?>"><big> Enter secure payment details</big></p> </div> <form class="form-horizontal form-gray" action="<?php echo base_url("/checkout/payment"); ?>" method="POST" id="form-purchase"> <div class="upgrade-panel"> <div class="custom"> <div class="checkbox check-yelow checkbox-circle"> <input id="payment_paypal" name="payment_paypal" class="" type="checkbox" value="<?php echo $value['id']; ?>"> <label for="designwall_newlettter">Pay with <img src="<?php echo skin_url('images/logo-paypal.png') ?>"></label> </div> </div> </div> <div class="space-10"></div> <div class="upgrade-panel remove-margin"> <div class="custom"> <div class="checkbox check-yelow checkbox-circle"> <input id="payment_credit" checked="checked" name="payment_credit" class="" type="checkbox" value="1"> <label for="designwall_newlettter">Pay with a credit card</label> </div> </div> </div> <div class="upgrade-panel remove-border-top"> <div class="payment-logos text-center"> <img src="<?php echo skin_url('images/logo-payment-visa.png')?>" alt="Visa"> <img src="<?php echo skin_url('images/logo-payment-paypal.png')?>" alt="Paypal"> <img src="<?php echo skin_url('images/logo-payment-ae.png')?>" alt="American Express"> <img src="<?php echo skin_url('images/logo-payment-discover.png')?>" alt="Discover"> <img src="<?php echo skin_url('images/logo-payment-master.png')?>" alt="Master Card"> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label"></label> <div class="col-sm-8"> <div class="alert alert-danger error-client" style="display:none;"> </div> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">First Name:</label> <div class="col-sm-8"> <input type="text" class="form-control" title="First Name" data-valid="true" maxlength="50" id="first_name" name="first_name"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Last Name:</label> <div class="col-sm-8"> <input type="text" class="form-control" title="Last Name" data-valid="true" maxlength="50" id="last_name" name="last_name"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Address 1:</label> <div class="col-sm-8"> <input type="text" class="form-control" title="Address 1" data-valid="true" maxlength="200" id="address1" name="address1"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Address 2 (option):</label> <div class="col-sm-8"> <input type="text" class="form-control" title="Address 2" maxlength="200" id="address2" name="address2"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">City/Zip:</label> <div class="col-sm-4"> <input type="text" class="form-control" title="City" data-valid="true" maxlength="50" id="city" name="city" placeholder="City"> </div> <div class="col-sm-4"> <input type="number" class="form-control format-number" title="Zip Code" maxlength="10" id="zipcode" name="zipcode" placeholder="Zip"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">State/Country:</label> <div class="col-sm-4"> <input type="text" class="form-control" data-valid="true" title="State" maxlength="20" id="state" name="state" placeholder="State"> </div> <div class="col-sm-4"> <select name="country" class="form-control" title="Country" style="width:100%;" id="country" required="required"> <option value="GB">United Kingdom</option> <option value="US" selected="selected">United States</option> <option value="AF">Afghanistan</option> <option value="AL">Albania</option> <option value="DZ">Algeria</option> <option value="AS">American Samoa</option> <option value="AD">Andorra</option> <option value="AO">Angola</option> <option value="AI">Anguilla</option> <option value="AQ">Antarctica</option> <option value="AG">Antigua And Barbuda</option> <option value="AR">Argentina</option> <option value="AM">Armenia</option> <option value="AW">Aruba</option> <option value="AU">Australia</option> <option value="AT">Austria</option> <option value="AZ">Azerbaijan</option> <option value="BS">Bahamas</option> <option value="BD">Bangladesh</option> <option value="BB">Barbados</option> <option value="BY">Belarus</option> <option value="BE">Belgium</option> <option value="BZ">Belize</option> <option value="BJ">Benin</option> <option value="BM">Bermuda</option> <option value="BT">Bhutan</option> <option value="BO">Bolivia</option> <option value="BA">Bosnia And Herzegowina</option> <option value="BW">Botswana</option> <option value="BV">Bouvet Island</option> <option value="BR">Brazil</option> <option value="IO">British Indian Ocean Territory</option> <option value="BN">Brunei Darussalam</option> <option value="BG">Bulgaria</option> <option value="BF">Burkina Faso</option> <option value="BI">Burundi</option> <option value="KH">Cambodia</option> <option value="CM">Cameroon</option> <option value="CA">Canada</option> <option value="CV">Cape Verde</option> <option value="KY">Cayman Islands</option> <option value="CF">Central African Republic</option> <option value="TD">Chad</option> <option value="CL">Chile</option> <option value="CN">China</option> <option value="CX">Christmas Island</option> <option value="CC">Cocos (Keeling) Islands</option> <option value="CO">Colombia</option> <option value="KM">Comoros</option> <option value="CG">Congo</option> <option value="CD">Congo, The Democratic Republic Of The</option> <option value="CK">Cook Islands</option> <option value="CR">Costa Rica</option> <option value="CI">Cote D'Ivoire</option> <option value="HR">Croatia (Local Name: Hrvatska)</option> <option value="CU">Cuba</option> <option value="CY">Cyprus</option> <option value="CZ">Czech Republic</option> <option value="DK">Denmark</option> <option value="DJ">Djibouti</option> <option value="DM">Dominica</option> <option value="DO">Dominican Republic</option> <option value="TP">East Timor</option> <option value="EC">Ecuador</option> <option value="EG">Egypt</option> <option value="SV">El Salvador</option> <option value="GQ">Equatorial Guinea</option> <option value="ER">Eritrea</option> <option value="EE">Estonia</option> <option value="ET">Ethiopia</option> <option value="FK">Falkland Islands (Malvinas)</option> <option value="FO">Faroe Islands</option> <option value="FJ">Fiji</option> <option value="FI">Finland</option> <option value="FR">France</option> <option value="FX">France, Metropolitan</option> <option value="GF">French Guiana</option> <option value="PF">French Polynesia</option> <option value="TF">French Southern Territories</option> <option value="GA">Gabon</option> <option value="GM">Gambia</option> <option value="GE">Georgia</option> <option value="DE">Germany</option> <option value="GH">Ghana</option> <option value="GI">Gibraltar</option> <option value="GR">Greece</option> <option value="GL">Greenland</option> <option value="GD">Grenada</option> <option value="GP">Guadeloupe</option> <option value="GU">Guam</option> <option value="GT">Guatemala</option> <option value="GN">Guinea</option> <option value="GW">Guinea-Bissau</option> <option value="GY">Guyana</option> <option value="HT">Haiti</option> <option value="HM">Heard And Mc Donald Islands</option> <option value="VA">Holy See (Vatican City State)</option> <option value="HN">Honduras</option> <option value="HK">Hong Kong</option> <option value="HU">Hungary</option> <option value="IS">Iceland</option> <option value="IN">India</option> <option value="ID">Indonesia</option> <option value="IR">Iran (Islamic Republic Of)</option> <option value="IQ">Iraq</option> <option value="IE">Ireland</option> <option value="IL">Israel</option> <option value="IT">Italy</option> <option value="JM">Jamaica</option> <option value="JP">Japan</option> <option value="JO">Jordan</option> <option value="KZ">Kazakhstan</option> <option value="KE">Kenya</option> <option value="KI">Kiribati</option> <option value="KP">Korea, Democratic People's Republic Of</option> <option value="KR">Korea, Republic Of</option> <option value="KW">Kuwait</option> <option value="KG">Kyrgyzstan</option> <option value="LA">Lao People's Democratic Republic</option> <option value="LV">Latvia</option> <option value="LB">Lebanon</option> <option value="LS">Lesotho</option> <option value="LR">Liberia</option> <option value="LY">Libyan Arab Jamahiriya</option> <option value="LI">Liechtenstein</option> <option value="LT">Lithuania</option> <option value="LU">Luxembourg</option> <option value="MO">Macau</option> <option value="MK">Macedonia, Former Yugoslav Republic Of</option> <option value="MG">Madagascar</option> <option value="MW">Malawi</option> <option value="MY">Malaysia</option> <option value="MV">Maldives</option> <option value="ML">Mali</option> <option value="MT">Malta</option> <option value="MH">Marshall Islands</option> <option value="MQ">Martinique</option> <option value="MR">Mauritania</option> <option value="MU">Mauritius</option> <option value="YT">Mayotte</option> <option value="MX">Mexico</option> <option value="FM">Micronesia</option> <option value="MD">Moldova, Republic Of</option> <option value="MC">Monaco</option> <option value="MN">Mongolia</option> <option value="MS">Montserrat</option> <option value="MA">Morocco</option> <option value="MZ">Mozambique</option> <option value="MM">Myanmar</option> <option value="NA">Namibia</option> <option value="NR">Nauru</option> <option value="NP">Nepal</option> <option value="NL">Netherlands</option> <option value="AN">Netherlands Antilles</option> <option value="NC">New Caledonia</option> <option value="NZ">New Zealand</option> <option value="NI">Nicaragua</option> <option value="NE">Niger</option> <option value="NG">Nigeria</option> <option value="NU">Niue</option> <option value="NF">Norfolk Island</option> <option value="MP">Northern Mariana Islands</option> <option value="NO">Norway</option> <option value="OM">Oman</option> <option value="PK">Pakistan</option> <option value="PW">Palau</option> <option value="PA">Panama</option> <option value="PG">Papua New Guinea</option> <option value="PY">Paraguay</option> <option value="PE">Peru</option> <option value="PH">Philippines</option> <option value="PN">Pitcairn</option> <option value="PL">Poland</option> <option value="PT">Portugal</option> <option value="PR">Puerto Rico</option> <option value="QA">Qatar</option> <option value="RE">Reunion</option> <option value="RO">Romania</option> <option value="RU">Russian Federation</option> <option value="RW">Rwanda</option> <option value="KN">Saint Kitts And Nevis</option> <option value="LC">Saint Lucia</option> <option value="VC">Saint Vincent And The Grenadines</option> <option value="WS">Samoa</option> <option value="SM">San Marino</option> <option value="ST">Sao Tome And Principe</option> <option value="SA">Saudi Arabia</option> <option value="SN">Senegal</option> <option value="SC">Seychelles</option> <option value="SL">Sierra Leone</option> <option value="SG">Singapore</option> <option value="SK">Slovakia (Slovak Republic)</option> <option value="SI">Slovenia</option> <option value="SB">Solomon Islands</option> <option value="SO">Somalia</option> <option value="ZA">South Africa</option> <option value="GS">South Georgia, South Sandwich Islands</option> <option value="ES">Spain</option> <option value="LK">Sri Lanka</option> <option value="SH">St. Helena</option> <option value="PM">St. Pierre And Miquelon</option> <option value="SD">Sudan</option> <option value="SR">Suriname</option> <option value="SJ">Svalbard And Jan Mayen Islands</option> <option value="SZ">Swaziland</option> <option value="SE">Sweden</option> <option value="CH">Switzerland</option> <option value="SY">Syrian Arab Republic</option> <option value="TW">Taiwan</option> <option value="TJ">Tajikistan</option> <option value="TZ">Tanzania, United Republic Of</option> <option value="TH">Thailand</option> <option value="TG">Togo</option> <option value="TK">Tokelau</option> <option value="TO">Tonga</option> <option value="TT">Trinidad And Tobago</option> <option value="TN">Tunisia</option> <option value="TR">Turkey</option> <option value="TM">Turkmenistan</option> <option value="TC">Turks And Caicos Islands</option> <option value="TV">Tuvalu</option> <option value="UG">Uganda</option> <option value="UA">Ukraine</option> <option value="AE">United Arab Emirates</option> <option value="UM">United States Minor Outlying Islands</option> <option value="UY">Uruguay</option> <option value="UZ">Uzbekistan</option> <option value="VU">Vanuatu</option> <option value="VE">Venezuela</option> <option value="VN">Viet Nam</option> <option value="VG">Virgin Islands (British)</option> <option value="VI">Virgin Islands (U.S.)</option> <option value="WF">Wallis And Futuna Islands</option> <option value="EH">Western Sahara</option> <option value="YE">Yemen</option> <option value="YU">Yugoslavia</option> <option value="ZM">Zambia</option> <option value="ZW">Zimbabwe</option> </select> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Credit Card Number:</label> <div class="col-sm-8"> <input type="number" class="form-control" title="Credit Card" maxlength="20" id="card_number" name="card_number"> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Security Code:</label> <div class="col-sm-4"> <input type="text" class="form-control" title="Security Code" maxlength="6" data-valid="true" data-valid="true" id="security_code" name="security_code"> </div> <div class="col-sm-4"> <a class="form-icon-help" href="#" data-toggle="modal" data-target="#intro-security-code"><img src="<?php echo skin_url('images/icon-help.png')?>"></a> </div> </div> <div class="form-group"> <label for="inputEmail3" class="col-sm-4 control-label">Expiration Date:</label> <div class="col-sm-4"> <input type="number" class="form-control format-number-advance" title="Month" min="1" max="12" maxlength="2" id="month" name="month" placeholder="Month"> </div> <div class="col-sm-4"> <input type="number" class="form-control format-number-advance" title="Year" min="2016" max="3000" maxlength="4" id="year" name="year" placeholder="Year"> <input type="hidden" name="package_id" id="package_id" value="<?php echo $value['id']; ?>" /> <input type="hidden" name="card_type" id="card_type" value="" /> </div> </div> <button class="btn btn-lg btn-primary pull-right remove-margin submit-purchase">Complete Purchase</button> <div class="row"></div> <div class="space-20"></div> <p class="text-center">I agree with the Terms and Conditions and understand that this upgrade is non-refundable</p> <div class="space-10"></div> </div> <p class="text-center">Your credit card will be charge when you click Complete Purchase. A copy of your subscription details will be sent to you via email for your records. To manage your account, visit the advanced settings in Profile Page. At the end of your current upgrade, Dezignwall will automatically continue your subscription for the same period and amount as your current upgrade. Upgrade today and start promotion your brand.</p> </form> </div> </section> <div class="modal modal-no-radius fade" id="intro-security-code" tabindex="-1" role="dialog" aria-labelledby="Intro Security Code"> <div class="modal-dialog" role="document"> <div class="modal-content text-center"> <div class="modal-body"> <h3>What’s the security code (or CVV Code)?</h3> <div class="space-20"></div> <p>The security code is the 3 digit value printed on the signature panel located on the back of your card. It is the last 3 numbers in that area.</p> <div class="space-20"></div> <div class="space-10"></div> <img src="<?php echo skin_url('images/img-intro-sercurity-code.png')?>"> <div class="space-20"></div> <div class="space-10"></div> <p>American Express Cards: the security code is the digit value printed (not-embossed) above your account number on the front of your card.</p> <div class="text-right"> <button class="btn btn-primary" type="button" data-dismiss="modal" aria-label="Close">Close</button> </div> </div> </div> </div> </div> <script src="<?php echo skin_url(); ?>/js/jquery.creditCardValidator.js"></script> <script type="text/javascript"> $("#payment_paypal").click(function () { var id = $(this).val(); $("#payment_credit").removeAttr("checked"); document.location.href = "<?php echo base_url('/checkout/process'); ?>/" + id; }); var valid_card = false; var card_type = 'unknown'; $(function() { <?php if ($post != null) : foreach ($post as $key => $val) : echo '$("#'.$key.'").val("' . $val . '");'; endforeach; endif; ?> $("#payment_paypal").removeAttr("checked"); $('#card_number').validateCreditCard(function(result) { valid_card = result.valid; card_type = (result.card_type == null ? 'unknown' : result.card_type.name); $('#card_type').val(card_type); if (result.valid) { $(this).removeClass('warning'); } else { if ($(this).val().length > 0) { $(this).addClass('warning'); } } }); }); $(".submit-purchase").click(function () { $(this).attr('disabled','disabled'); var valid = valid_form($('#form-purchase'), "warning", false); if (valid == true) { if ($.trim($('#month').val()) == '') { $('#month').addClass('warning'); $(".error-client").html("Please enter required field Month"); $(".error-client").show(); $(this).removeAttr('disabled'); return false; } if (!valid_card) { $(".error-client").html("Invalid card number."); $(".error-client").show(); $(this).removeAttr('disabled'); return false; } $(".error-client").html(""); $(".error-client").hide(); $('#form-purchase').submit(); return true; } $(".error-client").html(valid); $(".error-client").show(); $(this).removeAttr('disabled'); return false; }); </script>
{ "content_hash": "514f2b97890aa6de777f233280ab1ce3", "timestamp": "", "source": "github", "line_count": 491, "max_line_length": 173, "avg_line_length": 54.76782077393075, "alnum_prop": 0.5034026254137072, "repo_name": "phanquanghiep123/dezignwall", "id": "1698d753c1fd3b60e4b4cac50b9797eead405fc1", "size": "26893", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/views/profile/upgrade.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ActionScript", "bytes": "97232" }, { "name": "ApacheConf", "bytes": "123" }, { "name": "Batchfile", "bytes": "21" }, { "name": "CSS", "bytes": "520865" }, { "name": "HTML", "bytes": "607672" }, { "name": "JavaScript", "bytes": "4451995" }, { "name": "PHP", "bytes": "18642021" }, { "name": "Shell", "bytes": "25" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_67) on Wed Oct 08 15:57:24 PDT 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Class org.apache.hadoop.hbase.security.visibility.ZKVisibilityLabelWatcher (HBase 0.98.7-hadoop2 API)</title> <meta name="date" content="2014-10-08"> <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.hadoop.hbase.security.visibility.ZKVisibilityLabelWatcher (HBase 0.98.7-hadoop2 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.html" title="class in org.apache.hadoop.hbase.security.visibility">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/security/visibility/class-use/ZKVisibilityLabelWatcher.html" target="_top">Frames</a></li> <li><a href="ZKVisibilityLabelWatcher.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class org.apache.hadoop.hbase.security.visibility.ZKVisibilityLabelWatcher" class="title">Uses of Class<br>org.apache.hadoop.hbase.security.visibility.ZKVisibilityLabelWatcher</h2> </div> <div class="classUseContainer">No usage of org.apache.hadoop.hbase.security.visibility.ZKVisibilityLabelWatcher</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/apache/hadoop/hbase/security/visibility/ZKVisibilityLabelWatcher.html" title="class in org.apache.hadoop.hbase.security.visibility">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/apache/hadoop/hbase/security/visibility/class-use/ZKVisibilityLabelWatcher.html" target="_top">Frames</a></li> <li><a href="ZKVisibilityLabelWatcher.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2014 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "a15c490de2571bd9c96f6c1b88f80678", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 199, "avg_line_length": 41.35042735042735, "alnum_prop": 0.6289789169078132, "repo_name": "gsoundar/mambo-ec2-deploy", "id": "19651856bf389c6ab0ed0eec6823b73bb6369f32", "size": "4838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/hbase-0.98.7-hadoop2/docs/devapidocs/org/apache/hadoop/hbase/security/visibility/class-use/ZKVisibilityLabelWatcher.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "23179" }, { "name": "CSS", "bytes": "39965" }, { "name": "HTML", "bytes": "263271260" }, { "name": "Java", "bytes": "103085" }, { "name": "JavaScript", "bytes": "1347" }, { "name": "Python", "bytes": "4101" }, { "name": "Ruby", "bytes": "262588" }, { "name": "Shell", "bytes": "118548" } ], "symlink_target": "" }
from tkinter import TclError class WidgetRedirector: """Support for redirecting arbitrary widget subcommands. Some Tk operations don't normally pass through tkinter. For example, if a character is inserted into a Text widget by pressing a key, a default Tk binding to the widget's 'insert' operation is activated, and the Tk library processes the insert without calling back into tkinter. Although a binding to <Key> could be made via tkinter, what we really want to do is to hook the Tk 'insert' operation itself. For one thing, we want a text.insert call in idle code to have the same effect as a key press. When a widget is instantiated, a Tcl command is created whose name is the same as the pathname widget._w. This command is used to invoke the various widget operations, e.g. insert (for a Text widget). We are going to hook this command and provide a facility ('register') to intercept the widget operation. We will also intercept method calls on the tkinter class instance that represents the tk widget. In IDLE, WidgetRedirector is used in Percolator to intercept Text commands. The function being registered provides access to the top of a Percolator chain. At the bottom of the chain is a call to the original Tk widget operation. """ def __init__(self, widget): '''Initialize attributes and setup redirection. _operations: dict mapping operation name to new function. widget: the widget whose tcl command is to be intercepted. tk: widget.tk, a convenience attribute, probably not needed. orig: new name of the original tcl command. Since renaming to orig fails with TclError when orig already exists, only one WidgetDirector can exist for a given widget. ''' self._operations = {} self.widget = widget # widget instance self.tk = tk = widget.tk # widget's root w = widget._w # widget's (full) Tk pathname self.orig = w + "_orig" # Rename the Tcl command within Tcl: tk.call("rename", w, self.orig) # Create a new Tcl command whose name is the widget's pathname, and # whose action is to dispatch on the operation passed to the widget: tk.createcommand(w, self.dispatch) def __repr__(self): return "%s(%s<%s>)" % (self.__class__.__name__, self.widget.__class__.__name__, self.widget._w) def close(self): "Unregister operations and revert redirection created by .__init__." for operation in list(self._operations): self.unregister(operation) widget = self.widget tk = widget.tk w = widget._w # Restore the original widget Tcl command. tk.deletecommand(w) tk.call("rename", self.orig, w) del self.widget, self.tk # Should not be needed # if instance is deleted after close, as in Percolator. def register(self, operation, function): '''Return OriginalCommand(operation) after registering function. Registration adds an operation: function pair to ._operations. It also adds an widget function attribute that masks the tkinter class instance method. Method masking operates independently from command dispatch. If a second function is registered for the same operation, the first function is replaced in both places. ''' self._operations[operation] = function setattr(self.widget, operation, function) return OriginalCommand(self, operation) def unregister(self, operation): '''Return the function for the operation, or None. Deleting the instance attribute unmasks the class attribute. ''' if operation in self._operations: function = self._operations[operation] del self._operations[operation] try: delattr(self.widget, operation) except AttributeError: pass return function else: return None def dispatch(self, operation, *args): '''Callback from Tcl which runs when the widget is referenced. If an operation has been registered in self._operations, apply the associated function to the args passed into Tcl. Otherwise, pass the operation through to Tk via the original Tcl function. Note that if a registered function is called, the operation is not passed through to Tk. Apply the function returned by self.register() to *args to accomplish that. For an example, see ColorDelegator.py. ''' m = self._operations.get(operation) try: if m: return m(*args) else: return self.tk.call((self.orig, operation) + args) except TclError: return "" class OriginalCommand: '''Callable for original tk command that has been redirected. Returned by .register; can be used in the function registered. redir = WidgetRedirector(text) def my_insert(*args): print("insert", args) original_insert(*args) original_insert = redir.register("insert", my_insert) ''' def __init__(self, redir, operation): '''Create .tk_call and .orig_and_operation for .__call__ method. .redir and .operation store the input args for __repr__. .tk and .orig copy attributes of .redir (probably not needed). ''' self.redir = redir self.operation = operation self.tk = redir.tk # redundant with self.redir self.orig = redir.orig # redundant with self.redir # These two could be deleted after checking recipient code. self.tk_call = redir.tk.call self.orig_and_operation = (redir.orig, operation) def __repr__(self): return "%s(%r, %r)" % (self.__class__.__name__, self.redir, self.operation) def __call__(self, *args): return self.tk_call(self.orig_and_operation + args) def _widget_redirector(parent): # htest # from tkinter import Tk, Text import re root = Tk() root.title("Test WidgetRedirector") width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) root.geometry("+%d+%d"%(x, y + 150)) text = Text(root) text.pack() text.focus_set() redir = WidgetRedirector(text) def my_insert(*args): print("insert", args) original_insert(*args) original_insert = redir.register("insert", my_insert) root.mainloop() if __name__ == "__main__": import unittest unittest.main('idlelib.idle_test.test_widgetredir', verbosity=2, exit=False) from idlelib.idle_test.htest import run run(_widget_redirector)
{ "content_hash": "3b3a6d4006341b33f45882d2e9546a14", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 79, "avg_line_length": 39.43181818181818, "alnum_prop": 0.6321325648414986, "repo_name": "sharhar/USB-Thing", "id": "67d7f61e623b4289bf55de2c641affd63b60e681", "size": "6940", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "UpdaterFiles/Lib/python-3.5.1.amd64/Lib/idlelib/WidgetRedirector.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5015" }, { "name": "C", "bytes": "436714" }, { "name": "C#", "bytes": "8440" }, { "name": "C++", "bytes": "100530" }, { "name": "CSS", "bytes": "96" }, { "name": "F#", "bytes": "2310" }, { "name": "Forth", "bytes": "506" }, { "name": "GLSL", "bytes": "1040" }, { "name": "HTML", "bytes": "41126" }, { "name": "Jupyter Notebook", "bytes": "752587" }, { "name": "Makefile", "bytes": "895" }, { "name": "Mask", "bytes": "969" }, { "name": "PowerShell", "bytes": "1372" }, { "name": "Python", "bytes": "14041449" }, { "name": "Shell", "bytes": "13559" }, { "name": "Tcl", "bytes": "2173292" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <Weavers VerifyAssembly="true"> <Stamp /> <ConfigureAwait /> </Weavers>
{ "content_hash": "c71c7394230829a583894d71432d58c5", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 38, "avg_line_length": 22.8, "alnum_prop": 0.6578947368421053, "repo_name": "pomma89/CodeServices", "id": "e8f1a6cd5aeec2af26b43d1b2a30dfa52790355b", "size": "114", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "Platform Specific/CodeServices.MessageQueue.NET46/FodyWeavers.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "204" }, { "name": "C", "bytes": "11150" }, { "name": "C#", "bytes": "2231834" }, { "name": "Common Lisp", "bytes": "3721" }, { "name": "F#", "bytes": "2407" }, { "name": "HTML", "bytes": "24603" }, { "name": "PowerShell", "bytes": "178204" }, { "name": "Roff", "bytes": "4227" } ], "symlink_target": "" }
function ComplaintsChartMouseOverRectangle(svgObj){ this.svg = svgObj.svg; this.height = svgObj.height; this.width = svgObj.width; this.margin = svgObj.margin; this.data = svgObj.data; this.xScale = svgObj.xScale; this.yScale = svgObj.yScale; this.line = null; this.circles = []; this.BOROUGHS = ['BROOKLYN', 'BRONX', 'MANHATTAN', 'QUEENS', 'STATEN ISLAND']; } ComplaintsChartMouseOverRectangle.prototype.addToChart = function(){ var index = this.data['BRONX'].length - 1; this._addFocusElements(); this.line.update(index); this._updateCircles(index); this.drawRect(); }; ComplaintsChartMouseOverRectangle.prototype.drawRect = function(){ var self = this; this.svg.append('svg:rect') .attr('id', 'mouse-effects') .attr('width', (this.width - this.margin.left - this.margin.right)) .attr('height', (this.height - this.margin.top)) .attr('transform', 'translate(' + (this.margin.left + this.margin.right)+ ',0)') .style('pointer-events', 'all') .on('mouseover', function(){ self.line.style('display', 'inherit'); }) .on('mouseout', function(){ self.line.style('display', 'none'); }) .on('mousemove', function(){ self._mouseMove(this); }); }; // private methods ComplaintsChartMouseOverRectangle.prototype._addFocusElements = function(){ if( !this.line && !this.circles.length ){ this._createFocusLine(); this._createFocusCircles(); } }; ComplaintsChartMouseOverRectangle.prototype._createFocusLine = function(){ this.line = new ComplaintsChartFocusLine(this); }; ComplaintsChartMouseOverRectangle.prototype._createFocusCircles = function(){ var self = this; this.BOROUGHS.forEach(function(borough){ self.circles.push( new ComplaintsChartFocusCircle(self, borough) ); }); }; ComplaintsChartMouseOverRectangle.prototype._mouseMove = function(el){ var index = this._findIndex(el); this.line.update(index); this._updateCircles(index); }; ComplaintsChartMouseOverRectangle.prototype._findIndex = function(el){ var xPosition = this.xScale.invert(d3.mouse(el)[0]), rightIndex = this._bisectDate(this.data['BRONX'], xPosition, 1), leftIndex = rightIndex - 1 leftDatum = this.data['BRONX'][leftIndex], rightDaturm = this.data['BRONX'][rightIndex]; return xPosition - leftDatum.date > rightDaturm.date - xPosition ? rightIndex : leftIndex; }; ComplaintsChartMouseOverRectangle.prototype._updateCircles = function(index){ this.circles.forEach(function(circle){ circle.update(index); }); }; ComplaintsChartMouseOverRectangle.prototype._bisectDate = d3.bisector( function(d) { return d.date; } ).left;
{ "content_hash": "a033630cd22ef7359d2301e188dafcd4", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 84, "avg_line_length": 31.903614457831324, "alnum_prop": 0.7020392749244713, "repo_name": "heatseeknyc/heatseeknyc", "id": "3245683f34a3ef0d0804bf970012ab8d9689e43a", "size": "2684", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/assets/javascripts/complaints-chart/complaints-chart-mouse-over-rectangle.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "715" }, { "name": "HTML", "bytes": "154554" }, { "name": "JavaScript", "bytes": "42768" }, { "name": "Procfile", "bytes": "92" }, { "name": "Ruby", "bytes": "279498" }, { "name": "SCSS", "bytes": "62020" } ], "symlink_target": "" }
mybbcodeSettings = { nameSpace: "bbcode", // Useful to prevent multi-instances CSS conflict previewParserPath: '~/sets/bbcode/preview.php', // path to your BBCode parser markupSet: [ {name:'Bold', key:'B', openWith:'[b]', closeWith:'[/b]'}, {name:'Italic', key:'I', openWith:'[i]', closeWith:'[/i]'}, {name:'Underline', key:'U', openWith:'[u]', closeWith:'[/u]'}, {separator:'---------------' }, {name:'Picture', key:'P', replaceWith:'[img][![Url]!][/img]'}, {name:'Link', key:'L', openWith:'[url=[![Url]!]]', closeWith:'[/url]', placeHolder:'Your text to link here...'}, {separator:'---------------' }, {name:'Size', key:'S', openWith:'[size=[![Text size]!]]', closeWith:'[/size]', dropMenu :[ {name:'Big', openWith:'[size=200]', closeWith:'[/size]' }, {name:'Normal', openWith:'[size=100]', closeWith:'[/size]' }, {name:'Small', openWith:'[size=50]', closeWith:'[/size]' } ]}, {separator:'---------------' }, {name:'Bulleted list', openWith:'[list]\n', closeWith:'\n[/list]'}, {name:'Numeric list', openWith:'[list=[![Starting number]!]]\n', closeWith:'\n[/list]'}, {name:'List item', openWith:'[*] '}, {separator:'---------------' }, {name:'Quotes', openWith:'[quote]', closeWith:'[/quote]'}, {name:'Code', openWith:'[code]', closeWith:'[/code]'}, {separator:'---------------' }, {name:'Clean', className:"clean", replaceWith:function(markitup) { return markitup.selection.replace(/\[(.*?)\]/g, "") } }, {name:'Preview', className:"preview", call:'preview' } ] }
{ "content_hash": "2100b6029e9edec9611364b45ccd9d96", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 125, "avg_line_length": 52.48275862068966, "alnum_prop": 0.5624178712220762, "repo_name": "ladea/Laravel-Markitup", "id": "abdde2f02ccac46994f4ede165a120d59fdd9ff3", "size": "2094", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/markitup/sets/bbcode/set.js", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
using Nest.Resolvers.Converters; using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace Nest { [JsonObject] [JsonConverter(typeof(ReadAsTypeConverter<YearlySchedule>))] public interface IYearlySchedule : ISchedule { [JsonProperty("in")] Month In { get; set; } [JsonProperty("on")] int On { get; set; } [JsonProperty("at")] string At { get; set; } } public class YearlySchedule : ScheduleBase, IYearlySchedule { public Month In { get; set; } public int On { get; set; } public string At { get; set; } internal override void ContainIn(IScheduleContainer container) { container.Yearly = this; } } public class YearlyScheduleDescriptor : IYearlySchedule { private IYearlySchedule Self { get { return this; } } public YearlyScheduleDescriptor In(Month month) { Self.In = month; return this; } public YearlyScheduleDescriptor On(int day) { Self.On = day; return this; } public YearlyScheduleDescriptor At(string time) { Self.At = time; return this; } Month IYearlySchedule.In { get; set; } int IYearlySchedule.On { get; set; } string IYearlySchedule.At { get; set; } } }
{ "content_hash": "d19054c5766f8abf1530eaba52a24c48", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 64, "avg_line_length": 19.125, "alnum_prop": 0.6928104575163399, "repo_name": "CSGOpenSource/elasticsearch-watcher-net", "id": "d8bab88b0880100b4b2ed657b690fc2d5b8602d6", "size": "1226", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Nest.Watcher/Domain/Schedule/YearlySchedule.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2239" }, { "name": "C#", "bytes": "392385" }, { "name": "F#", "bytes": "15612" }, { "name": "PowerShell", "bytes": "3797" }, { "name": "Shell", "bytes": "1869" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>color: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.5.0 / color - 1.0.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> color <small> 1.0.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-11-23 11:02:22 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-11-23 11:02:22 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-threads base base-unix base camlp5 7.14 Preprocessor-pretty-printer of OCaml conf-findutils 1 Virtual package relying on findutils conf-perl 2 Virtual package relying on perl coq 8.5.0 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.04.2 The OCaml compiler (virtual package) ocaml-base-compiler 4.04.2 Official 4.04.2 release ocaml-config 1 OCaml Switch Configuration # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;frederic.blanqui@inria.fr&quot; homepage: &quot;http://color.inria.fr/&quot; license: &quot;CeCILL&quot; build: [ [make &quot;-j%{jobs}%&quot;] ] install: [make &quot;install&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.4pl2&quot; &amp; &lt; &quot;8.5~&quot;} ] authors: [ &quot;Frédéric Blanqui&quot; &quot;Adam Koprowski&quot; &quot;Sébastien Hinderer&quot; &quot;Pierre-Yves Strub&quot; &quot;Sidi Ould Biha&quot; &quot;Solange Coupet-Grimal&quot; &quot;William Delobel&quot; &quot;Hans Zantema&quot; &quot;Stéphane Leroux&quot; &quot;Léo Ducas&quot; &quot;Johannes Waldmann&quot; &quot;Qiand Wang&quot; &quot;Lianyi Zhang&quot; &quot;Sorin Stratulat&quot; ] synopsis: &quot;A library on rewriting theory and termination&quot; url { src: &quot;http://files.inria.fr/blanqui/color/color.1.0.0.tar.gz&quot; checksum: &quot;md5=5277b9cfe0cb609f32dd5deb7795fb45&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-color.1.0.0 coq.8.5.0</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.5.0). The following dependencies couldn&#39;t be met: - coq-color -&gt; coq &lt; 8.5~ -&gt; ocaml &lt; 4.03.0 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-color.1.0.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "4dae4fcbc053cf02fa9b3c19a530dbe1", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 159, "avg_line_length": 38.335260115606935, "alnum_prop": 0.5278950542822678, "repo_name": "coq-bench/coq-bench.github.io", "id": "9da6b4fc8f95c9464f76687dfd6e09fb4c8b67d2", "size": "6662", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.04.2-2.0.5/released/8.5.0/color/1.0.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
let JointModel = Model.createComponent("JointModel"); JointModel.defineMethod("construct", function construct() { // Construct models Object.defineProperty(this, "models", { value: {} }); }); JointModel.defineMethod("modelDidUpdate", function modelDidUpdate(model) { }); JointModel.defineMethod("retainModel", function retainModel(model) { // Hook on model model.jointModels[this.modelId] = this; // Remember models this.models[model.modelId] = model; }); JointModel.defineMethod("releaseModel", function releaseModel(model) { // Release model hook so model would no longer send message to this model delete model.jointModels[this.modelId]; // Forget model delete this.models[model.modelId]; }); JointModel.defineMethod("releaseAllModels", function releaseAllModels() { // Call to release hooks on all models Object.values(this.models).forEach(function (model) { this.releaseModel(model); }, this); });
{ "content_hash": "f836b030d0edf7175220e5e8ae0caa9d", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 75, "avg_line_length": 28.696969696969695, "alnum_prop": 0.7296726504751848, "repo_name": "many-to-many/ica", "id": "705ac761085cd47a512272274240a36101a400d4", "size": "948", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "scripts/component/JointModel.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "27042" }, { "name": "HTML", "bytes": "28050" }, { "name": "JavaScript", "bytes": "221674" }, { "name": "PHP", "bytes": "96046" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <project> <!-- NMML reference: https://gist.github.com/1763850 --> <!-- metadata, make sure 'package' is at least 3 segments (ie. com.mycompany.myproject) --> <meta title="[PR] InfiniShip" package="br.com.yuiti.lab.infiniship.PRInfiniShip" version="1.0.0" company="Fabio Y. Goto" /> <!-- output --> <app main="br.com.yuiti.lab.infiniship.Main" file="PRInfiniShip" path="bin" /> <window background="#ffffff" fps="60" /> <window width="256" height="256" unless="mobile" /> <window orientation="landscape" vsync="false" antialiasing="0" if="cpp" /> <!-- classpath, haxe libs --> <source path="src" /> <haxelib name="openfl" /> <haxelib name="actuate" /> <!-- assets --> <icon path="assets/icon.svg" /> <assets path="assets/img" rename="img" /> <!-- optimize output <haxeflag name="-dce full" /> --> </project>
{ "content_hash": "f800b6c1d99a21a2761016576a4798d9", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 124, "avg_line_length": 32.48148148148148, "alnum_prop": 0.6408209806157354, "repo_name": "yuigoto/infiniship", "id": "f10531ce87f6e9da84c0b3db70c8d6b7a3ca75a8", "size": "877", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "haxe/application.xml", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "16714" }, { "name": "Haxe", "bytes": "49023" }, { "name": "JavaScript", "bytes": "257942" }, { "name": "PHP", "bytes": "23724" }, { "name": "Processing", "bytes": "11060" } ], "symlink_target": "" }
from GroundedScan.dataset import GroundedScan from GroundedScan.grammar import Derivation from GroundedScan.world import Situation from GroundedScan.world import Position from GroundedScan.world import Object from GroundedScan.world import INT_TO_DIR from GroundedScan.world import PositionedObject from GroundedScan.helpers import numpy_array_to_image from GroundedScan.helpers import image_to_numpy_array import os import time import numpy as np import logging import shutil logging.getLogger("PyQt5").disabled = True logging.getLogger('matplotlib.font_manager').disabled = True logger = logging.getLogger("GroundedScan") TEST_DIRECTORY = "test_dir" TEST_PATH = os.path.join(os.getcwd(), TEST_DIRECTORY) if not os.path.exists(TEST_PATH): os.mkdir(TEST_PATH) EXAMPLES_TO_TEST = 10000 intransitive_verbs = ["walk"] transitive_verbs = ["push", "pull"] adverbs = ["cautiously"] nouns = ["circle", "cylinder", "square"] color_adjectives = ["red", "blue", "green", "yellow"] size_adjectives = ["big", "small"] TEST_DATASET = GroundedScan(intransitive_verbs=intransitive_verbs, transitive_verbs=transitive_verbs, adverbs=adverbs, nouns=nouns, color_adjectives=color_adjectives, size_adjectives=size_adjectives, percentage_train=0.8, min_object_size=1, max_object_size=4, sample_vocabulary='default', save_directory=TEST_DIRECTORY, grid_size=15, type_grammar="adverb") TEST_DATASET_NONCE = GroundedScan(intransitive_verbs=1, transitive_verbs=2, adverbs=1, nouns=3, color_adjectives=4, size_adjectives=2, percentage_train=0.8, min_object_size=1, max_object_size=4, sample_vocabulary='sample', save_directory=TEST_DIRECTORY, grid_size=15, type_grammar="adverb") TEST_SITUATION_1 = Situation(grid_size=15, agent_position=Position(row=7, column=2), agent_direction=INT_TO_DIR[0], target_object=PositionedObject(object=Object(size=2, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), placed_objects=[PositionedObject(object=Object(size=2, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), PositionedObject(object=Object(size=4, color='green', shape='circle'), position=Position(row=3, column=12), vector=np.array([0, 1, 0]))], carrying=None) TEST_SITUATION_2 = Situation(grid_size=15, agent_position=Position(row=7, column=2), agent_direction=INT_TO_DIR[0], target_object=PositionedObject(object=Object(size=4, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), placed_objects=[PositionedObject(object=Object(size=4, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), PositionedObject(object=Object(size=4, color='green', shape='cylinder'), position=Position(row=3, column=12), vector=np.array([0, 1, 0]))], carrying=None) TEST_SITUATION_3 = Situation(grid_size=15, agent_position=Position(row=7, column=2), agent_direction=INT_TO_DIR[0], target_object=None, placed_objects=[PositionedObject(object=Object(size=1, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), PositionedObject(object=Object(size=2, color='green', shape='circle'), position=Position(row=3, column=1), vector=np.array([0, 1, 0]))], carrying=None) TEST_SITUATION_4 = Situation(grid_size=15, agent_position=Position(row=7, column=2), agent_direction=INT_TO_DIR[0], target_object=None, placed_objects=[PositionedObject(object=Object(size=2, color='red', shape='circle'), position=Position(row=10, column=4), vector=np.array([1, 0, 1])), PositionedObject(object=Object(size=4, color='red', shape='circle'), position=Position(row=3, column=1), vector=np.array([0, 1, 0]))], carrying=None) def test_save_and_load_dataset(dataset): start = time.time() dataset.get_data_pairs(max_examples=EXAMPLES_TO_TEST) dataset.save_dataset("test.txt") dataset.save_dataset_statistics(split="train") dataset.save_dataset_statistics(split="test") test_grounded_scan = GroundedScan.load_dataset_from_file(os.path.join(TEST_DIRECTORY, "test.txt"), TEST_DIRECTORY) for example_one, example_two in zip(dataset.get_examples_with_image("train"), test_grounded_scan.get_examples_with_image("train")): assert dataset.command_repr(example_one["input_command"]) == test_grounded_scan.command_repr( example_two["input_command"]), "test_save_and_load_dataset FAILED" assert dataset.command_repr(example_one["target_command"]) == test_grounded_scan.command_repr( example_two["target_command"]), "test_save_and_load_dataset FAILED" assert np.array_equal(example_one["situation_image"], example_two["situation_image"]),\ "test_save_and_load_dataset FAILED" assert dataset.command_repr(example_one["input_meaning"]) == test_grounded_scan.command_repr( example_two["input_meaning"]), "test_save_and_load_dataset FAILED" os.remove(os.path.join(TEST_DIRECTORY, "test.txt")) end = time.time() logger.info("test_save_and_load_dataset PASSED in {} seconds".format(end - start)) return def test_save_and_load_dataset_nonce(): start = time.time() TEST_DATASET_NONCE.get_data_pairs(max_examples=EXAMPLES_TO_TEST) TEST_DATASET_NONCE.save_dataset("test.txt") TEST_DATASET_NONCE.save_dataset_statistics(split="train") TEST_DATASET_NONCE.save_dataset_statistics(split="test") test_grounded_scan = GroundedScan.load_dataset_from_file(os.path.join(TEST_DIRECTORY, "test.txt"), TEST_DIRECTORY) for example_one, example_two in zip(TEST_DATASET_NONCE.get_examples_with_image("train"), test_grounded_scan.get_examples_with_image("train")): assert TEST_DATASET_NONCE.command_repr(example_one["input_command"]) == test_grounded_scan.command_repr( example_two["input_command"]), "test_save_and_load_dataset FAILED" assert TEST_DATASET_NONCE.command_repr(example_one["target_command"]) == test_grounded_scan.command_repr( example_two["target_command"]), "test_save_and_load_dataset FAILED" assert np.array_equal(example_one["situation_image"], example_two["situation_image"]),\ "test_save_and_load_dataset FAILED" assert TEST_DATASET_NONCE.command_repr(example_one["input_meaning"]) == test_grounded_scan.command_repr( example_two["input_meaning"]), "test_save_and_load_dataset FAILED" os.remove(os.path.join(TEST_DIRECTORY, "test.txt")) end = time.time() logger.info("test_save_and_load_dataset PASSED in {} seconds".format(end - start)) return def test_derivation_from_rules(dataset): start = time.time() derivation, arguments = dataset.sample_command() rules_list = [] lexicon = {} derivation.to_rules(rules_list, lexicon) test = Derivation.from_rules(rules_list, lexicon=lexicon) assert ' '.join(test.words()) == ' '.join(derivation.words()), "test_derivation_from_rules FAILED" end = time.time() logger.info("test_derivation_from_rules PASSED in {} seconds".format(end - start)) def test_derivation_from_string(dataset): start = time.time() derivation, arguments = dataset.sample_command() derivation_str = derivation.__repr__() rules_str, lexicon_str = derivation_str.split(';') new_derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) assert ' '.join(new_derivation.words()) == ' '.join(derivation.words()), "test_derivation_from_string FAILED" end = time.time() logger.info("test_derivation_from_string PASSED in {} seconds".format(end - start)) def test_demonstrate_target_commands_one(dataset): """Test that target commands sequence resulting from demonstrate_command is the same as the one executed by demonstrate_target_commands""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("small"), translate_fn("small"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) actual_target_commands, _, _ = dataset.demonstrate_command(derivation, TEST_SITUATION_1) command = ' '.join(derivation.words()) target_commands, _, _, _ = dataset.demonstrate_target_commands(command, TEST_SITUATION_1, actual_target_commands) assert ','.join(actual_target_commands) == ','.join(target_commands), \ "test_demonstrate_target_commands_one FAILED" end = time.time() logger.info("test_demonstrate_target_commands_one PASSED in {} seconds".format(end - start)) def test_demonstrate_target_commands_two(dataset): """Test that target commands sequence resulting from demonstrate_command for pushing a heavy objectis the same as the executed one by demonstrate_target_commands""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_trans DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_transitive -> {},T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("push"), translate_fn("push"), translate_fn("big"), translate_fn("big"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_2) command = ' '.join(derivation.words()) target_commands, _, _, _ = dataset.demonstrate_target_commands(command, TEST_SITUATION_2, actual_target_commands) assert ','.join(actual_target_commands) == ','.join(target_commands), "test_demonstrate_target_commands_two FAILED" end = time.time() logger.info("test_demonstrate_target_commands_two PASSED in {} seconds".format(end - start)) def test_demonstrate_target_commands_three(dataset): """Test that target commands sequence resulting from demonstrate_command for pushing a light object is the same as the executed one by demonstrate_target_commands""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_trans DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_transitive -> {},T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("push"), translate_fn("push"), translate_fn("small"), translate_fn("small"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_1) command = ' '.join(derivation.words()) target_commands, _, _, _ = dataset.demonstrate_target_commands(command, TEST_SITUATION_1, actual_target_commands) assert ','.join(actual_target_commands) == ','.join(target_commands), "test_demonstrate_target_commands_three FAILED" end = time.time() logger.info("test_demonstrate_target_commands_three PASSED in {} seconds".format(end - start)) def test_demonstrate_command_one(dataset): """Test pushing a light object (where one target command of 'push <dir>' results in movement of 1 grid).""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_trans DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_transitive -> {},T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("push"), translate_fn("push"), translate_fn("small"), translate_fn("small"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "walk,walk,turn right,walk,walk,walk,"\ "push,push,push,push" actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_1) assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_one FAILED" end = time.time() logger.info("test_demonstrate_command_one PASSED in {} seconds".format(end - start)) def test_demonstrate_command_two(dataset): """Test pushing a heavy object (where one target command of 'push <dir>' results in movement of 1 grid).""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_trans DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_transitive -> {},T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("push"), translate_fn("push"), translate_fn("small"), translate_fn("small"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "walk,walk,turn right,walk,walk,walk," \ "push,push,push,push,push,push,push,push" actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_2) assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_two FAILED" end = time.time() logger.info("test_demonstrate_command_two PASSED in {} seconds".format(end - start)) def test_demonstrate_command_three(dataset): """Test walk to a small circle, tests that the function demonstrate command is able to find the target small circle even if that circle isn't explicitly set as the target object in the situation (which it wouldn't be at test time). """ start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("small"), translate_fn("small"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "walk,walk,turn right,walk,walk,walk" actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_3) assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_three FAILED" end = time.time() logger.info("test_demonstrate_command_three PASSED in {} seconds".format(end - start)) def test_demonstrate_command_four(dataset): """Test walk to a small circle, tests that the function demonstrate command is able to find the target big circle even if that circle isn't explicitly set as the target object in the situation (which it wouldn't be at test time). """ start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("big"), translate_fn("big"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "turn left,turn left,walk,turn right,walk,walk,walk,walk" actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_3) assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_four FAILED" end = time.time() logger.info("test_demonstrate_command_four PASSED in {} seconds".format(end - start)) def test_demonstrate_command_five(dataset): """Test that when referring to a small red circle and two present in the world, it finds the correct one.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {}:JJ -> {},T:{},T:{},NT:"\ "NN -> {}".format(translate_fn("walk"), translate_fn("walk"), translate_fn("red"), translate_fn("small"), translate_fn("red"), translate_fn("small"), translate_fn("circle"), translate_fn("circle")) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "walk,walk,turn right,walk,walk,walk" actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_4) assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_five FAILED" end = time.time() logger.info("test_demonstrate_command_five PASSED in {} seconds".format(end - start)) def test_demonstrate_command_six(dataset): """Test that when referring to a small red circle but only one red circle is present, demonstrate_commands fails.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {}:JJ -> {},T:{},T:{},NT:" \ "NN -> {}".format(translate_fn("walk"), translate_fn("walk"), translate_fn("red"), translate_fn("small"), translate_fn("red"), translate_fn("small"), translate_fn("circle"), translate_fn("circle")) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) expected_target_commands = "" try: actual_target_commands, _, _ = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_3) except AssertionError: actual_target_commands = "" assert expected_target_commands == ','.join(actual_target_commands), "test_demonstrate_command_six FAILED" end = time.time() logger.info("test_demonstrate_command_six PASSED in {} seconds".format(end - start)) def test_find_referred_target_one(dataset): """Test that for particular referred targets, the Derivation class identifies it correctly.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {}:JJ -> {},T:{},T:{},NT:" \ "NN -> {}".format(translate_fn("walk"), translate_fn("walk"), translate_fn("red"), translate_fn("small"), translate_fn("red"), translate_fn("small"), translate_fn("circle"), translate_fn("circle")) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) arguments = [] derivation.meaning(arguments) assert len(arguments) == 1, "test_find_referred_target_one FAILED." target_str, target_predicate = arguments.pop().to_predicate() translate_fn_word = dataset._vocabulary.translate_word translated_target_str = ' '.join([translate_fn_word(word) for word in target_str.split()]) assert translated_target_str == "red circle", "test_find_referred_target FAILED." assert target_predicate["noun"] == translate_fn("circle"), "test_find_referred_target_one FAILED." assert target_predicate["size"] == translate_fn("small"), "test_find_referred_target_one FAILED." assert target_predicate["color"] == translate_fn("red"), "test_find_referred_target_one FAILED." end = time.time() logger.info("test_find_referred_target_one PASSED in {} seconds".format(end - start)) def test_find_referred_target_two(dataset): """Test that for particular referred targets, the Derivation class identifies it correctly.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("big"), translate_fn("big"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) arguments = [] derivation.meaning(arguments) assert len(arguments) == 1, "test_find_referred_target_two FAILED." target_str, target_predicate = arguments.pop().to_predicate() translate_fn_word = dataset._vocabulary.translate_word translated_target_str = ' '.join([translate_fn_word(word) for word in target_str.split()]) assert translated_target_str == "circle", "test_find_referred_target_two FAILED." assert target_predicate["noun"] == translate_fn("circle"), "test_find_referred_target_two FAILED." assert target_predicate["size"] == translate_fn("big"), "test_find_referred_target_two FAILED." assert target_predicate["color"] == translate_fn(""), "test_find_referred_target_two FAILED." end = time.time() logger.info("test_find_referred_target_two PASSED in {} seconds".format(end - start)) def test_generate_possible_targets_one(dataset): """Test that for particular referred targets, the right possible target objects get generated.""" start = time.time() translate_meaning = dataset._vocabulary.translate_meaning target_predicate = {"noun": translate_meaning("circle"), "color": translate_meaning("red"), "size": translate_meaning("big")} translate_word = dataset._vocabulary.translate_word expected_possible_targets = {(2, "red", "circle"), (3, "red", "circle"), (4, "red", "circle")} actual_possible_targets = dataset.generate_possible_targets( referred_size=translate_word(target_predicate["size"]), referred_color=translate_word(target_predicate["color"]), referred_shape=translate_word(target_predicate["noun"])) for actual_possible_target in actual_possible_targets: assert actual_possible_target in expected_possible_targets, "test_generate_possible_targets_one FAILED." end = time.time() logger.info("test_generate_possible_targets_one PASSED in {} seconds".format(end - start)) def test_generate_possible_targets_two(dataset): """Test that for particular referred targets, the right possible target objects get generated.""" start = time.time() translate_meaning = dataset._vocabulary.translate_meaning target_predicate = {"noun": translate_meaning("circle"), "color": translate_meaning("red"), "size": translate_meaning("small")} translate_word = dataset._vocabulary.translate_word expected_possible_targets = {(1, "red", "circle"), (2, "red", "circle"), (3, "red", "circle"), (1, "blue", "circle"), (2, "blue", "circle"), (3, "blue", "circle"), (1, "green", "circle"), (2, "green", "circle"), (3, "green", "circle")} actual_possible_targets = dataset.generate_possible_targets( referred_size=translate_word(target_predicate["size"]), referred_color=translate_word(target_predicate["color"]), referred_shape=translate_word(target_predicate["noun"])) for expected_possible_target, actual_possible_target in zip(expected_possible_targets, actual_possible_targets): assert actual_possible_target in expected_possible_targets, "test_generate_possible_targets_two FAILED." end = time.time() logger.info("test_generate_possible_targets_two PASSED in {} seconds".format(end - start)) def test_generate_situations_one(dataset): """Test that when a small green circle is referred to there exist no smaller green circles than the target object in the world and at least one larger green circle.""" start = time.time() translate_meaning = dataset._vocabulary.translate_meaning target_shape = "circle" target_color = "green" target_size = 2 referred_size = translate_meaning("small") referred_color = translate_meaning("green") referred_shape = translate_meaning("circle") situation_specifications = dataset.generate_situations(num_resampling=1) relevant_situation = situation_specifications[target_shape][target_color][target_size].pop() dataset.initialize_world_from_spec(relevant_situation, referred_size=referred_size, referred_color=referred_color, referred_shape=referred_shape, actual_size=target_size, sample_percentage=0.5 ) smallest_object = dataset._world.object_positions("green circle", object_size="small").pop() assert smallest_object == relevant_situation["target_position"], "test_generate_situations_one FAILED." other_related_objects = dataset._world.object_positions("green circle") larger_objects = [] for size, sized_objects in other_related_objects: if size < target_size: assert not sized_objects, "test_generate_situations_one FAILED." elif size > target_size: larger_objects.extend(sized_objects) assert len(larger_objects) >= 1, "test_generate_situations_one FAILED." end = time.time() logger.info("test_generate_situations_one PASSED in {} seconds".format(end - start)) def test_generate_situations_two(dataset): """Test that when a big green circle is referred to there exists no larger green circles and the exists at least one smaller green circle.""" start = time.time() translate_meaning = dataset._vocabulary.translate_meaning target_shape = "circle" target_color = "green" target_size = 2 referred_size = translate_meaning("big") referred_color = translate_meaning("green") referred_shape = translate_meaning("circle") situation_specifications = dataset.generate_situations(num_resampling=1) relevant_situation = situation_specifications[target_shape][target_color][target_size].pop() dataset.initialize_world_from_spec(relevant_situation, referred_size=referred_size, referred_color=referred_color, referred_shape=referred_shape, actual_size=target_size, sample_percentage=0.5 ) largest_object = dataset._world.object_positions("green circle", object_size="big").pop() assert largest_object == relevant_situation["target_position"], "test_generate_situations_two FAILED." other_related_objects = dataset._world.object_positions("green circle") smaller_objects = [] for size, sized_objects in other_related_objects: if size > target_size: assert not sized_objects, "test_generate_situations_two FAILED." elif size < target_size: smaller_objects.extend(sized_objects) assert len(smaller_objects) >= 1, "test_generate_situations_two FAILED." end = time.time() logger.info("test_generate_situations_two PASSED in {} seconds".format(end - start)) def test_generate_situations_three(dataset): """Test that for particular commands the right situations get matched.""" start = time.time() translate_meaning = dataset._vocabulary.translate_meaning target_shape = "circle" target_color = "green" target_size = 2 referred_size = translate_meaning("big") referred_shape = translate_meaning("circle") situation_specifications = dataset.generate_situations(num_resampling=1) relevant_situation = situation_specifications[target_shape][target_color][target_size].pop() dataset.initialize_world_from_spec(relevant_situation, referred_size=referred_size, referred_color="", referred_shape=referred_shape, actual_size=target_size, sample_percentage=0.5 ) largest_object = dataset._world.object_positions("circle", object_size="big").pop() assert largest_object == relevant_situation["target_position"], "test_generate_situations_three FAILED." other_related_objects = dataset._world.object_positions("circle") smaller_objects = [] for size, sized_objects in other_related_objects: if size > target_size: assert not sized_objects, "test_generate_situations_three FAILED." elif size < target_size: smaller_objects.extend(sized_objects) assert len(smaller_objects) >= 1, "test_generate_situations_three FAILED." end = time.time() logger.info("test_generate_situations_three PASSED in {} seconds".format(end - start)) def test_situation_representation_eq(): start = time.time() test_situations = [TEST_SITUATION_1, TEST_SITUATION_2, TEST_SITUATION_3, TEST_SITUATION_4] for i, test_situation_1 in enumerate(test_situations): for j, test_situation_2 in enumerate(test_situations): if i == j: assert test_situation_1 == test_situation_2, "test_situation_representation_eq FAILED." else: assert test_situation_1 != test_situation_2, "test_situation_representation_eq FAILED." end = time.time() logger.info("test_situation_representation_eq PASSED in {} seconds".format(end - start)) def test_example_representation_eq(dataset): """Test that the function for comparing examples returns true when exactly the same example is passed twice.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("big"), translate_fn("big"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) arguments = [] derivation.meaning(arguments) target_str, target_predicate = arguments.pop().to_predicate() adverb = "" for word in derivation.words(): if word in dataset._vocabulary.get_adverbs(): adverb = word target_commands, _, target_action = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_1) TEST_DATASET.fill_example(derivation.words(), derivation, TEST_SITUATION_1, target_commands, target_action, target_predicate, visualize=False, splits=["train"], adverb=adverb) TEST_DATASET.get_data_pairs(max_examples=10, num_resampling=2) for split, examples in dataset._data_pairs.items(): for example in examples: assert dataset.compare_examples(example, example), "test_example_representation_eq FAILED." end = time.time() logger.info("test_example_representation_eq PASSED in {} seconds".format(end - start)) def test_example_representation(dataset): """Test that when you save an example in its representation its the same if you parse it again.""" start = time.time() rules_str = "NP -> NN,NP -> JJ NP,DP -> 'a' NP,VP -> VV_intrans 'to' DP,ROOT -> VP" translate_fn = dataset._vocabulary.translate_meaning lexicon_str = "T:{},NT:VV_intransitive -> {},T:to,T:a,T:{},NT:JJ -> {},T:{},NT:NN -> {}".format( translate_fn("walk"), translate_fn("walk"), translate_fn("big"), translate_fn("big"), translate_fn("circle"), translate_fn("circle") ) derivation = Derivation.from_str(rules_str, lexicon_str, dataset._grammar) arguments = [] derivation.meaning(arguments) target_str, target_predicate = arguments.pop().to_predicate() adverb = "" for word in derivation.words(): if word in dataset._vocabulary.get_adverbs(): adverb = word target_commands, _, target_action = dataset.demonstrate_command(derivation, initial_situation=TEST_SITUATION_1) dataset.fill_example(derivation.words(), derivation, TEST_SITUATION_1, target_commands, target_action, target_predicate, visualize=False, splits=["train"], adverb=adverb) example = dataset._data_pairs["train"].pop() (parsed_command, parsed_meaning, parsed_derivation, parsed_situation, parsed_target_commands, _, parsed_action) = dataset.parse_example( example ) assert example["command"] == dataset.command_repr(parsed_command), "test_example_representation FAILED." assert example["meaning"] == dataset.command_repr(parsed_meaning), "test_example_representation FAILED." assert example["derivation"] == dataset.derivation_repr(parsed_derivation), "test_example_representation "\ "FAILED." situation = Situation.from_representation(example["situation"]) assert situation == parsed_situation, "test_example_representation FAILED." assert example["target_commands"] == dataset.command_repr(parsed_target_commands), \ "test_example_representation FAILED." assert example["verb_in_command"] == dataset._vocabulary.translate_word(parsed_action),\ "test_example_representation FAILED." assert example["referred_target"] == ' '.join([dataset._vocabulary.translate_word(target_predicate["size"]), dataset._vocabulary.translate_word(target_predicate["color"]), dataset._vocabulary.translate_word(target_predicate["noun"])]),\ "test_example_representation FAILED." end = time.time() logger.info("test_example_representation PASSED in {} seconds".format(end - start)) def test_initialize_world(dataset): """Test that two the same situations get represented in exactly the same image by rendering.py and minigrid.py""" start = time.time() test_situations = [TEST_SITUATION_1, TEST_SITUATION_2, TEST_SITUATION_3, TEST_SITUATION_4] current_situation = dataset._world.get_current_situation() current_mission = dataset._world.mission for i, test_situation_1 in enumerate(test_situations): for j, test_situation_2 in enumerate(test_situations): dataset._world.clear_situation() dataset.initialize_world(test_situation_1) situation_1 = dataset._world.get_current_situation() dataset._world.clear_situation() dataset.initialize_world(test_situation_2) situation_2 = dataset._world.get_current_situation() if i == j: assert situation_1 == situation_2, "test_initialize_world FAILED." else: assert situation_1 != situation_2, "test_initialize_world FAILED." dataset.initialize_world(current_situation, mission=current_mission) end = time.time() logger.info("test_initialize_world PASSED in {} seconds".format(end - start)) def test_image_representation_situations(dataset): """Test that situations are still the same when they need to be in image / numpy RGB array form.""" start = time.time() current_situation = dataset._world.get_current_situation() current_mission = dataset._world.mission test_situations = [TEST_SITUATION_1, TEST_SITUATION_2, TEST_SITUATION_3, TEST_SITUATION_4] for i, test_situation_1 in enumerate(test_situations): for j, test_situation_2 in enumerate(test_situations): dataset._world.clear_situation() dataset.initialize_world(test_situation_1) np_situation_image_1 = dataset._world.render(mode='human').getArray() numpy_array_to_image(np_situation_image_1, os.path.join(TEST_DIRECTORY, "test_im_1.png")) np_situation_image_1_reread = image_to_numpy_array(os.path.join(TEST_DIRECTORY, "test_im_1.png")) assert np.array_equal(np_situation_image_1, np_situation_image_1_reread), "test_image_representation_situations FAILED." dataset._world.clear_situation() dataset.initialize_world(test_situation_2) np_situation_image_2 = dataset._world.render().getArray() numpy_array_to_image(np_situation_image_2, os.path.join(TEST_DIRECTORY, "test_im_2.png")) np_situation_image_2_reread = image_to_numpy_array(os.path.join(TEST_DIRECTORY, "test_im_2.png")) assert np.array_equal(np_situation_image_2, np_situation_image_2_reread), "test_image_representation_situations FAILED." if i == j: assert np.array_equal(np_situation_image_1, np_situation_image_2), \ "test_image_representation_situations FAILED." else: assert not np.array_equal(np_situation_image_1, np_situation_image_2), \ "test_image_representation_situations FAILED." os.remove(os.path.join(TEST_DIRECTORY, "test_im_1.png")) os.remove(os.path.join(TEST_DIRECTORY, "test_im_2.png")) dataset.initialize_world(current_situation, mission=current_mission) end = time.time() logger.info("test_image_representation_situations PASSED in {} seconds".format(end - start)) def test_encode_situation(dataset): start = time.time() current_situation = dataset._world.get_current_situation() current_mission = dataset._world.mission test_situation = Situation(grid_size=15, agent_position=Position(row=7, column=2), agent_direction=INT_TO_DIR[0], target_object=PositionedObject(object=Object(size=2, color='red', shape='circle'), position=Position(row=7, column=2), vector=np.array([1, 0, 1])), placed_objects=[PositionedObject(object=Object(size=2, color='red', shape='circle'), position=Position(row=7, column=2), vector=np.array([1, 0, 1])), PositionedObject(object=Object(size=4, color='green', shape='circle'), position=Position(row=3, column=12), vector=np.array([0, 1, 0]))], carrying=None) dataset._world.clear_situation() dataset.initialize_world(test_situation) expected_numpy_array = np.zeros([15, 15, dataset._world.grid._num_attributes_object + 1 + 4], dtype='uint8') expected_numpy_array[7, 2, -5] = 1 expected_numpy_array[7, 2, -4:] = np.array([1, 0, 0, 0]) expected_numpy_array[7, 2, :-5] = dataset._object_vocabulary.get_object_vector(shape='circle', color='red', size=2) expected_numpy_array[3, 12, :-5] = dataset._object_vocabulary.get_object_vector(shape='circle', color='green', size=4) encoded_numpy_array = dataset._world.grid.encode(agent_row=7, agent_column=2, agent_direction=0) assert np.array_equal(expected_numpy_array, encoded_numpy_array), "test_encode_situation FAILED." dataset.initialize_world(current_situation, mission=current_mission) end = time.time() logger.info("test_encode_situation PASSED in {} seconds".format(end - start)) def test_k_shot_generalization(dataset): start = time.time() current_situation = dataset._world.get_current_situation() current_mission = dataset._world.mission k_shot_generalization = 5 dataset.get_data_pairs(max_examples=100000, num_resampling=1, other_objects_sample_percentage=0.5, split_type="generalization", k_shot_generalization=k_shot_generalization) # Test that all the splits only contain examples related to their split. visual_split_examples = dataset._data_pairs["visual"] for example in visual_split_examples: target_object = example["situation"]["target_object"]["object"] assert target_object["shape"] == "square" and target_object["color"] == "red", \ "test_k_shot_generalization FAILED in split visual." situational_split_1 = dataset._data_pairs["situational_1"] for example in situational_split_1: direction_to_target = example["situation"]["direction_to_target"] assert direction_to_target == "sw", "test_k_shot_generalization FAILED in split situational_1." situational_split_2 = dataset._data_pairs["situational_2"] for example in situational_split_2: referred_target = example["referred_target"] assert "small" in referred_target, \ "test_k_shot_generalization FAILED in split situational_2." target_size = example["situation"]["target_object"]["object"]["size"] assert target_size == '2', "test_k_shot_generalization FAILED in split situational_2." contextual_split = dataset._data_pairs["contextual"] for example in contextual_split: assert (dataset._vocabulary.translate_meaning(example["verb_in_command"]) in dataset._vocabulary.get_transitive_verbs()), \ "test_k_shot_generalization FAILED in split contextual." target_object = example["situation"]["target_object"]["object"] assert target_object["shape"] == "square" and target_object["size"] == '3', \ "test_k_shot_generalization FAILED in split contextual." # Test that the training set doesn't contain more than k examples of each of the test splits. examples_per_split = {"visual": 0, "situational_1": 0, "situational_2": 0, "contextual": 0, "adverb_1": 0} for example in dataset._data_pairs["train"]: target_object = example["situation"]["target_object"]["object"] target_size = target_object["size"] direction_to_target = example["situation"]["direction_to_target"] referred_target = example["referred_target"] if target_object["shape"] == "square" and target_object["color"] == "red": examples_per_split["visual"] += 1 if direction_to_target == "sw": examples_per_split["situational_1"] += 1 if "small" in referred_target and target_size == 2: examples_per_split["situational_2"] += 1 if (dataset._vocabulary.translate_meaning(example["verb_in_command"]) in dataset._vocabulary.get_transitive_verbs() and target_object["shape"] == "square" and target_object["size"] == '3'): examples_per_split["contextual"] += 1 for split, examples_count in examples_per_split.items(): if split == "adverb_1": assert examples_count == k_shot_generalization, \ "test_k_shot_generalization FAILED in split train for split {}.".format(split) else: assert examples_count == 0, "test_k_shot_generalization FAILED in split train for split {}.".format(split) dataset.initialize_world(current_situation, mission=current_mission) end = time.time() logger.info("test_k_shot_generalization PASSED in {} seconds".format(end - start)) def run_all_tests(): test_save_and_load_dataset(TEST_DATASET) test_save_and_load_dataset(TEST_DATASET_NONCE) test_save_and_load_dataset_nonce() test_derivation_from_rules(TEST_DATASET) test_derivation_from_rules(TEST_DATASET_NONCE) test_derivation_from_string(TEST_DATASET) test_derivation_from_string(TEST_DATASET_NONCE) test_demonstrate_target_commands_one(TEST_DATASET) test_demonstrate_target_commands_one(TEST_DATASET_NONCE) test_demonstrate_target_commands_two(TEST_DATASET) test_demonstrate_target_commands_two(TEST_DATASET_NONCE) test_demonstrate_target_commands_three(TEST_DATASET) test_demonstrate_target_commands_three(TEST_DATASET_NONCE) test_demonstrate_command_one(TEST_DATASET) test_demonstrate_command_one(TEST_DATASET_NONCE) test_demonstrate_command_two(TEST_DATASET) test_demonstrate_command_two(TEST_DATASET_NONCE) test_demonstrate_command_three(TEST_DATASET) test_demonstrate_command_three(TEST_DATASET_NONCE) test_demonstrate_command_four(TEST_DATASET) test_demonstrate_command_four(TEST_DATASET_NONCE) test_demonstrate_command_five(TEST_DATASET) test_demonstrate_command_five(TEST_DATASET_NONCE) test_demonstrate_command_six(TEST_DATASET) test_demonstrate_command_six(TEST_DATASET_NONCE) test_find_referred_target_one(TEST_DATASET) test_find_referred_target_one(TEST_DATASET_NONCE) test_find_referred_target_two(TEST_DATASET) test_find_referred_target_two(TEST_DATASET_NONCE) test_generate_possible_targets_one(TEST_DATASET) test_generate_possible_targets_one(TEST_DATASET_NONCE) test_generate_possible_targets_two(TEST_DATASET) test_generate_possible_targets_two(TEST_DATASET_NONCE) test_generate_situations_one(TEST_DATASET) test_generate_situations_one(TEST_DATASET_NONCE) test_generate_situations_two(TEST_DATASET) test_generate_situations_two(TEST_DATASET_NONCE) test_generate_situations_three(TEST_DATASET) test_generate_situations_three(TEST_DATASET_NONCE) test_situation_representation_eq() test_example_representation_eq(TEST_DATASET) test_example_representation_eq(TEST_DATASET_NONCE) test_example_representation(TEST_DATASET) test_example_representation(TEST_DATASET_NONCE) test_initialize_world(TEST_DATASET) test_initialize_world(TEST_DATASET_NONCE) test_image_representation_situations(TEST_DATASET) test_image_representation_situations(TEST_DATASET_NONCE) test_encode_situation(TEST_DATASET) test_encode_situation(TEST_DATASET_NONCE) #test_k_shot_generalization(TEST_DATASET) #test_k_shot_generalization(TEST_DATASET_NONCE) shutil.rmtree(TEST_DIRECTORY)
{ "content_hash": "17da3717f7b50ae84b76c43d95d8aea2", "timestamp": "", "source": "github", "line_count": 809, "max_line_length": 121, "avg_line_length": 60.58961681087763, "alnum_prop": 0.6309647673256217, "repo_name": "LauraRuis/groundedSCAN", "id": "b2a65f4fa6f094c996c527515c6b18195577f403", "size": "49063", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GroundedScan/dataset_test.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "284405" } ], "symlink_target": "" }
#ifndef WASMINT_SEXPRPARSER_H #define WASMINT_SEXPRPARSER_H #include "CharacterStream.h" #include "SExpr.h" #include "StringCharacterStream.h" #include "FileCharacterStream.h" namespace wasm_module { namespace sexpr { class UnknownDataAtEndOfStream : public std::exception { }; ExceptionMessage(InvalidEscapeSequence) class SExprParser { CharacterStream &stream_; void parseValues(SExpr &parent, bool allowsEndOfStream); public: SExprParser(CharacterStream& stream); SExpr parse(bool allowExitBeforeEOF = false); static SExpr parseString(const std::string& str) { StringCharacterStream stream(str); SExprParser parser(stream); return parser.parse(); } static SExpr parseFile(const std::string& filePath) { FileCharacterStream stream(filePath); SExprParser parser(stream); return parser.parse(); } }; }} #endif //WASMINT_SEXPRPARSER_H
{ "content_hash": "2b5c9acb22ceb40a0ad5e7d61d8ae801", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 64, "avg_line_length": 22.931818181818183, "alnum_prop": 0.6600594648166501, "repo_name": "WebAssembly/wasmint", "id": "a03cfa6133b5e701be25564689fad06a043c1890", "size": "1619", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "wasm-module/src/sexpr_parsing/SExprParser.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "10699" }, { "name": "C++", "bytes": "877467" }, { "name": "CMake", "bytes": "20207" }, { "name": "Shell", "bytes": "3206" } ], "symlink_target": "" }
from __future__ import absolute_import, division, print_function # Import standard modules import copy import numpy as np # Import astronomical modules from astropy.stats import sigma_clip, sigma_clipped_stats # Import the relevant PTS classes and modules from . import general from ..basics.mask import Mask # ----------------------------------------------------------------- # Calculate sigma-to-FWHM and FWHM-to-sigma conversion factors sigma_to_fwhm = (8 * np.log(2))**0.5 fwhm_to_sigma = 1.0 / sigma_to_fwhm # ----------------------------------------------------------------- def sigma_clip_mask_list(data, sigma=3.0, mask=None): """ This function ... :param data: :param sigma: :param mask: :return: """ masked_list = sigma_clip(data, sigma=sigma, iters=None, copy=False) new_mask = copy.deepcopy(mask) if mask is not None else [0]*len(data) for i, masked in enumerate(masked_list.mask): if masked: new_mask[i] = True # Return the new or updated mask return new_mask # ----------------------------------------------------------------- def sigma_clip_mask(data, sigma_level=3.0, mask=None): """ This function ... :param data: :param sigma_level: :param mask: :return: """ # Split the x, y and z values of the data, without the masked values x_values, y_values, z_values = general.split_xyz(data, mask=mask) # Sigma-clip z-values that are outliers masked_z_values = sigma_clip(z_values, sigma=sigma_level, iters=None, copy=False) # Copy the mask or create a new one if none was provided new_mask = copy.deepcopy(mask) if mask is not None else Mask(np.zeros_like(data)) for i, masked in enumerate(masked_z_values.mask): if masked: x = x_values[i] y = y_values[i] new_mask[y,x] = True #if not isinstance(new_mask, Mask): print(new_mask, mask) # Assert the mask is of type 'Mask' assert isinstance(new_mask, Mask) # Return the new or updated mask return new_mask # ----------------------------------------------------------------- def sigma_clipped_median(data, sigma=3.0, mask=None): """ This function ... :param data: :param sigma: :param mask: :return: """ # Calculate the sigma-clipped mean and median _, median, _ = sigma_clipped_stats(data, mask=mask, sigma=sigma) # Return the median value return median # ----------------------------------------------------------------- def sigma_clipped_statistics(data, sigma=3.0, mask=None): """ This function ... :param data: :param sigma: :param mask: :return: """ # Calculate the sigma-clipped mean and median mean, median, stddev = sigma_clipped_stats(data, mask=mask, sigma=sigma) # Return the statistical parameters return mean, median, stddev # ----------------------------------------------------------------- def sigma_clip_split(input_list, criterion, sigma=3.0, only_high=False, only_low=False, nans="low"): """ This function ... :param input_list: :param criterion: :param sigma: :param only_high: :param only_low: :param nans: :return: """ # Initialize an empty list of widths determinants = [] # Loop over all the star candidates and calculate their width for item in input_list: determinants.append(criterion(item)) # Use sigma clipping to seperate stars and unidentified objects mask = sigma_clip_mask_list(determinants, sigma=sigma) # Calculate the mean value of the determinants that are not masked mean = np.ma.mean(np.ma.masked_array(determinants, mask=mask)) # Create a seperate list for the stars and for the ufos valid_list = [] invalid_list = [] # Loop over all items in the input list, putting them in either the valid or invalid list for index, item in enumerate(input_list): value = criterion(item) if only_high: if mask[index] and value > mean: invalid_list.append(item) else: valid_list.append(item) elif only_low: if mask[index] and value < mean: invalid_list.append(item) else: valid_list.append(item) else: if mask[index]: invalid_list.append(item) else: valid_list.append(item) # Return the valid and invalid lists return valid_list, invalid_list # ----------------------------------------------------------------- def cutoff(values, method, limit): """ This function ... :param values: :param method: :param limit: """ # Percentage method if method == "percentage": # Create a sorted list for the input values sorted_values = sorted(values) # Determine the splitting point split = (1.0-limit) * len(sorted_values) index = int(round(split)) # Return the corresponding value in the sorted list return sorted_values[index] # Sigma-clipping method elif method == "sigma_clip": # Perform sigma clipping on the input list masked_values = sigma_clip(np.array(values), sigma=limit, iters=None, copy=False) # Calculate the maximum of the masked array return np.ma.max(masked_values) else: raise ValueError("Invalid cutoff method (must be 'percentage' or 'sigma_clip'") # -----------------------------------------------------------------
{ "content_hash": "02828f0c15e262d49b502255de53a08a", "timestamp": "", "source": "github", "line_count": 205, "max_line_length": 100, "avg_line_length": 26.692682926829267, "alnum_prop": 0.5778508771929824, "repo_name": "Stargrazer82301/CAAPR", "id": "28a575ee13d515782b2f7d4ab760baf7046b9194", "size": "5964", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CAAPR/CAAPR_AstroMagic/PTS/pts/magic/tools/statistics.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "51" }, { "name": "CSS", "bytes": "21972" }, { "name": "HTML", "bytes": "2408" }, { "name": "Prolog", "bytes": "16433" }, { "name": "Python", "bytes": "4465217" }, { "name": "Shell", "bytes": "3793" } ], "symlink_target": "" }
require_relative '../../request' module Rack module Auth class AbstractRequest def initialize(env) @env = env end def request @request ||= Request.new(@env) end def provided? !authorization_key.nil? && valid? end def valid? !@env[authorization_key].nil? end def parts @parts ||= @env[authorization_key].split(' ', 2) end def scheme @scheme ||= parts.first&.downcase end def params @params ||= parts.last end private AUTHORIZATION_KEYS = ['HTTP_AUTHORIZATION', 'X-HTTP_AUTHORIZATION', 'X_HTTP_AUTHORIZATION'] def authorization_key @authorization_key ||= AUTHORIZATION_KEYS.detect { |key| @env.has_key?(key) } end end end end
{ "content_hash": "d8092b6c0315adebeafe06c47e03d1e3", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 97, "avg_line_length": 17.48936170212766, "alnum_prop": 0.5559610705596107, "repo_name": "MikeMcQuaid/brew", "id": "f872331563ebbd8bf9527c1cd83969a3553b2160", "size": "853", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "Library/Homebrew/vendor/bundle/ruby/2.6.0/gems/rack-3.0.0/lib/rack/auth/abstract/request.rb", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Dockerfile", "bytes": "1852" }, { "name": "HTML", "bytes": "29627" }, { "name": "PostScript", "bytes": "485" }, { "name": "Roff", "bytes": "108664" }, { "name": "Ruby", "bytes": "6013617" }, { "name": "Shell", "bytes": "264029" }, { "name": "Swift", "bytes": "2161" } ], "symlink_target": "" }
//---------------------------------------------------------------------------- // <copyright file="Level.cs" company="Delft University of Technology"> // Copyright 2015, Delft University of Technology // // This software is licensed under the terms of the MIT License. // A copy of the license should be included with this software. If not, // see http://opensource.org/licenses/MIT for the full license. // </copyright> //---------------------------------------------------------------------------- namespace Level { using System.Collections.ObjectModel; /// <summary> /// Information about a level. /// </summary> public class Level { /// <summary> /// Description of level properties. /// </summary> public readonly LevelProperties Properties; /// <summary> /// List of objects within the level. /// </summary> public readonly ReadOnlyCollection<LevelObject> Objects; /// <summary> /// Initializes a new instance of the <see cref="Level"/> class. /// </summary> /// <param name="properties">Level properties.</param> /// <param name="objects">Objects within level.</param> public Level(LevelProperties properties, ReadOnlyCollection<LevelObject> objects) { this.Properties = properties; this.Objects = objects; } } }
{ "content_hash": "94196ccd7912bbb26f16b341af5f8134", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 89, "avg_line_length": 36.5, "alnum_prop": 0.5321917808219178, "repo_name": "thijser/ARGAME", "id": "7942c5decbe1e6c928ad49f69bb8be574e07a0ff", "size": "1462", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ARGame/Assets/Scripts/Level/Level.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1479" }, { "name": "C", "bytes": "85437" }, { "name": "C#", "bytes": "286398" }, { "name": "C++", "bytes": "1376379" }, { "name": "GLSL", "bytes": "15967" }, { "name": "PostScript", "bytes": "49885" }, { "name": "Prolog", "bytes": "2499" }, { "name": "QMake", "bytes": "4623" }, { "name": "TeX", "bytes": "194350" } ], "symlink_target": "" }
from __future__ import unicode_literals import copy import os import pickle import warnings from django.core.exceptions import SuspiciousOperation from django.http import (QueryDict, HttpResponse, HttpResponseRedirect, HttpResponsePermanentRedirect, HttpResponseNotAllowed, HttpResponseNotModified, StreamingHttpResponse, SimpleCookie, BadHeaderError, parse_cookie) from django.test import TestCase from django.utils.encoding import smart_str from django.utils import six from django.utils import unittest class QueryDictTests(unittest.TestCase): def test_missing_key(self): q = QueryDict(str('')) self.assertRaises(KeyError, q.__getitem__, 'foo') def test_immutability(self): q = QueryDict(str('')) self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar') self.assertRaises(AttributeError, q.setlist, 'foo', ['bar']) self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar']) self.assertRaises(AttributeError, q.update, {'foo': 'bar'}) self.assertRaises(AttributeError, q.pop, 'foo') self.assertRaises(AttributeError, q.popitem) self.assertRaises(AttributeError, q.clear) def test_immutable_get_with_default(self): q = QueryDict(str('')) self.assertEqual(q.get('foo', 'default'), 'default') def test_immutable_basic_operations(self): q = QueryDict(str('')) self.assertEqual(q.getlist('foo'), []) if not six.PY3: self.assertEqual(q.has_key('foo'), False) self.assertEqual('foo' in q, False) self.assertEqual(list(six.iteritems(q)), []) self.assertEqual(list(six.iterlists(q)), []) self.assertEqual(list(six.iterkeys(q)), []) self.assertEqual(list(six.itervalues(q)), []) self.assertEqual(len(q), 0) self.assertEqual(q.urlencode(), '') def test_single_key_value(self): """Test QueryDict with one key/value pair""" q = QueryDict(str('foo=bar')) self.assertEqual(q['foo'], 'bar') self.assertRaises(KeyError, q.__getitem__, 'bar') self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar') self.assertEqual(q.get('foo', 'default'), 'bar') self.assertEqual(q.get('bar', 'default'), 'default') self.assertEqual(q.getlist('foo'), ['bar']) self.assertEqual(q.getlist('bar'), []) self.assertRaises(AttributeError, q.setlist, 'foo', ['bar']) self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar']) if not six.PY3: self.assertTrue(q.has_key('foo')) self.assertTrue('foo' in q) if not six.PY3: self.assertFalse(q.has_key('bar')) self.assertFalse('bar' in q) self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')]) self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])]) self.assertEqual(list(six.iterkeys(q)), ['foo']) self.assertEqual(list(six.itervalues(q)), ['bar']) self.assertEqual(len(q), 1) self.assertRaises(AttributeError, q.update, {'foo': 'bar'}) self.assertRaises(AttributeError, q.pop, 'foo') self.assertRaises(AttributeError, q.popitem) self.assertRaises(AttributeError, q.clear) self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar') self.assertEqual(q.urlencode(), 'foo=bar') def test_urlencode(self): q = QueryDict(str(''), mutable=True) q['next'] = '/a&b/' self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F') self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/') q = QueryDict(str(''), mutable=True) q['next'] = '/t\xebst&key/' self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F') self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/') def test_mutable_copy(self): """A copy of a QueryDict is mutable.""" q = QueryDict(str('')).copy() self.assertRaises(KeyError, q.__getitem__, "foo") q['name'] = 'john' self.assertEqual(q['name'], 'john') def test_mutable_delete(self): q = QueryDict(str('')).copy() q['name'] = 'john' del q['name'] self.assertFalse('name' in q) def test_basic_mutable_operations(self): q = QueryDict(str('')).copy() q['name'] = 'john' self.assertEqual(q.get('foo', 'default'), 'default') self.assertEqual(q.get('name', 'default'), 'john') self.assertEqual(q.getlist('name'), ['john']) self.assertEqual(q.getlist('foo'), []) q.setlist('foo', ['bar', 'baz']) self.assertEqual(q.get('foo', 'default'), 'baz') self.assertEqual(q.getlist('foo'), ['bar', 'baz']) q.appendlist('foo', 'another') self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another']) self.assertEqual(q['foo'], 'another') if not six.PY3: self.assertTrue(q.has_key('foo')) self.assertTrue('foo' in q) self.assertEqual(sorted(list(six.iteritems(q))), [('foo', 'another'), ('name', 'john')]) self.assertEqual(sorted(list(six.iterlists(q))), [('foo', ['bar', 'baz', 'another']), ('name', ['john'])]) self.assertEqual(sorted(list(six.iterkeys(q))), ['foo', 'name']) self.assertEqual(sorted(list(six.itervalues(q))), ['another', 'john']) self.assertEqual(len(q), 2) q.update({'foo': 'hello'}) self.assertEqual(q['foo'], 'hello') self.assertEqual(q.get('foo', 'not available'), 'hello') self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello']) self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello']) self.assertEqual(q.pop('foo', 'not there'), 'not there') self.assertEqual(q.get('foo', 'not there'), 'not there') self.assertEqual(q.setdefault('foo', 'bar'), 'bar') self.assertEqual(q['foo'], 'bar') self.assertEqual(q.getlist('foo'), ['bar']) self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar']) q.clear() self.assertEqual(len(q), 0) def test_multiple_keys(self): """Test QueryDict with two key/value pairs with same keys.""" q = QueryDict(str('vote=yes&vote=no')) self.assertEqual(q['vote'], 'no') self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar') self.assertEqual(q.get('vote', 'default'), 'no') self.assertEqual(q.get('foo', 'default'), 'default') self.assertEqual(q.getlist('vote'), ['yes', 'no']) self.assertEqual(q.getlist('foo'), []) self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz']) self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz']) self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar']) if not six.PY3: self.assertEqual(q.has_key('vote'), True) self.assertEqual('vote' in q, True) if not six.PY3: self.assertEqual(q.has_key('foo'), False) self.assertEqual('foo' in q, False) self.assertEqual(list(six.iteritems(q)), [('vote', 'no')]) self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])]) self.assertEqual(list(six.iterkeys(q)), ['vote']) self.assertEqual(list(six.itervalues(q)), ['no']) self.assertEqual(len(q), 1) self.assertRaises(AttributeError, q.update, {'foo': 'bar'}) self.assertRaises(AttributeError, q.pop, 'foo') self.assertRaises(AttributeError, q.popitem) self.assertRaises(AttributeError, q.clear) self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar') self.assertRaises(AttributeError, q.__delitem__, 'vote') if not six.PY3: def test_invalid_input_encoding(self): """ QueryDicts must be able to handle invalid input encoding (in this case, bad UTF-8 encoding). This test doesn't apply under Python 3 because the URL is a string and not a bytestring. """ q = QueryDict(str(b'foo=bar&foo=\xff')) self.assertEqual(q['foo'], '\ufffd') self.assertEqual(q.getlist('foo'), ['bar', '\ufffd']) def test_pickle(self): q = QueryDict(str('')) q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q == q1, True) q = QueryDict(str('a=b&c=d')) q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q == q1, True) q = QueryDict(str('a=b&c=d&a=1')) q1 = pickle.loads(pickle.dumps(q, 2)) self.assertEqual(q == q1, True) def test_update_from_querydict(self): """Regression test for #8278: QueryDict.update(QueryDict)""" x = QueryDict(str("a=1&a=2"), mutable=True) y = QueryDict(str("a=3&a=4")) x.update(y) self.assertEqual(x.getlist('a'), ['1', '2', '3', '4']) def test_non_default_encoding(self): """#13572 - QueryDict with a non-default encoding""" q = QueryDict(str('cur=%A4'), encoding='iso-8859-15') self.assertEqual(q.encoding, 'iso-8859-15') self.assertEqual(list(six.iteritems(q)), [('cur', '€')]) self.assertEqual(q.urlencode(), 'cur=%A4') q = q.copy() self.assertEqual(q.encoding, 'iso-8859-15') self.assertEqual(list(six.iteritems(q)), [('cur', '€')]) self.assertEqual(q.urlencode(), 'cur=%A4') self.assertEqual(copy.copy(q).encoding, 'iso-8859-15') self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15') class HttpResponseTests(unittest.TestCase): def test_headers_type(self): r = HttpResponse() # The following tests explicitly test types in addition to values # because in Python 2 u'foo' == b'foo'. # ASCII unicode or bytes values are converted to native strings. r['key'] = 'test' self.assertEqual(r['key'], str('test')) self.assertIsInstance(r['key'], str) r['key'] = 'test'.encode('ascii') self.assertEqual(r['key'], str('test')) self.assertIsInstance(r['key'], str) # Latin-1 unicode or bytes values are also converted to native strings. r['key'] = 'café' self.assertEqual(r['key'], smart_str('café', 'latin-1')) self.assertIsInstance(r['key'], str) r['key'] = 'café'.encode('latin-1') self.assertEqual(r['key'], smart_str('café', 'latin-1')) self.assertIsInstance(r['key'], str) # Other unicode values are MIME-encoded (there's no way to pass them as bytes). r['key'] = '†' self.assertEqual(r['key'], str('=?utf-8?b?4oCg?=')) self.assertIsInstance(r['key'], str) # The response also converts unicode or bytes keys to strings, but requires # them to contain ASCII r = HttpResponse() del r['Content-Type'] r['foo'] = 'bar' l = list(r.items()) self.assertEqual(len(l), 1) self.assertEqual(l[0], ('foo', 'bar')) self.assertIsInstance(l[0][0], str) r = HttpResponse() del r['Content-Type'] r[b'foo'] = 'bar' l = list(r.items()) self.assertEqual(len(l), 1) self.assertEqual(l[0], ('foo', 'bar')) self.assertIsInstance(l[0][0], str) r = HttpResponse() self.assertRaises(UnicodeError, r.__setitem__, 'føø', 'bar') self.assertRaises(UnicodeError, r.__setitem__, 'føø'.encode('utf-8'), 'bar') def test_newlines_in_headers(self): # Bug #10188: Do not allow newlines in headers (CR or LF) r = HttpResponse() self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test') self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test') def test_dict_behavior(self): """ Test for bug #14020: Make HttpResponse.get work like dict.get """ r = HttpResponse() self.assertEqual(r.get('test'), None) def test_non_string_content(self): #Bug 16494: HttpResponse should behave consistently with non-strings r = HttpResponse(12345) self.assertEqual(r.content, b'12345') #test content via property r = HttpResponse() r.content = 12345 self.assertEqual(r.content, b'12345') def test_iter_content(self): r = HttpResponse(['abc', 'def', 'ghi']) self.assertEqual(r.content, b'abcdefghi') #test iter content via property r = HttpResponse() r.content = ['idan', 'alex', 'jacob'] self.assertEqual(r.content, b'idanalexjacob') r = HttpResponse() r.content = [1, 2, 3] self.assertEqual(r.content, b'123') #test retrieval explicitly using iter (deprecated) and odd inputs r = HttpResponse() r.content = ['1', '2', 3, '\u079e'] with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", PendingDeprecationWarning) my_iter = iter(r) self.assertEqual(w[0].category, PendingDeprecationWarning) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always", PendingDeprecationWarning) result = list(my_iter) self.assertEqual(w[0].category, PendingDeprecationWarning) #'\xde\x9e' == unichr(1950).encode('utf-8') self.assertEqual(result, [b'1', b'2', b'3', b'\xde\x9e']) self.assertEqual(r.content, b'123\xde\x9e') #with Content-Encoding header r = HttpResponse() r['Content-Encoding'] = 'winning' r.content = [b'abc', b'def'] self.assertEqual(r.content, b'abcdef') r.content = ['\u079e'] self.assertRaises(TypeError if six.PY3 else UnicodeEncodeError, getattr, r, 'content') # .content can safely be accessed multiple times. r = HttpResponse(iter(['hello', 'world'])) self.assertEqual(r.content, r.content) self.assertEqual(r.content, b'helloworld') # accessing the iterator works (once) after accessing .content self.assertEqual(b''.join(r), b'helloworld') self.assertEqual(b''.join(r), b'') # accessing .content still works self.assertEqual(r.content, b'helloworld') # XXX accessing .content doesn't work if the response was iterated first # XXX change this when the deprecation completes in HttpResponse r = HttpResponse(iter(['hello', 'world'])) with warnings.catch_warnings(): warnings.simplefilter("ignore", PendingDeprecationWarning) self.assertEqual(b''.join(r), b'helloworld') self.assertEqual(r.content, b'') # not the expected result! # additional content can be written to the response. r = HttpResponse(iter(['hello', 'world'])) self.assertEqual(r.content, b'helloworld') r.write('!') self.assertEqual(r.content, b'helloworld!') def test_iterator_isnt_rewound(self): # Regression test for #13222 r = HttpResponse('abc') i = iter(r) self.assertEqual(list(i), [b'abc']) self.assertEqual(list(i), []) def test_file_interface(self): r = HttpResponse() r.write(b"hello") self.assertEqual(r.tell(), 5) r.write("привет") self.assertEqual(r.tell(), 17) r = HttpResponse(['abc']) r.write('def') self.assertEqual(r.tell(), 6) self.assertEqual(r.content, b'abcdef') def test_unsafe_redirect(self): bad_urls = [ 'data:text/html,<script>window.alert("xss")</script>', 'mailto:test@example.com', 'file:///etc/passwd', ] for url in bad_urls: self.assertRaises(SuspiciousOperation, HttpResponseRedirect, url) self.assertRaises(SuspiciousOperation, HttpResponsePermanentRedirect, url) class HttpResponseSubclassesTests(TestCase): def test_redirect(self): response = HttpResponseRedirect('/redirected/') self.assertEqual(response.status_code, 302) # Test that standard HttpResponse init args can be used response = HttpResponseRedirect('/redirected/', content='The resource has temporarily moved', content_type='text/html') self.assertContains(response, 'The resource has temporarily moved', status_code=302) def test_not_modified(self): response = HttpResponseNotModified() self.assertEqual(response.status_code, 304) # 304 responses should not have content/content-type with self.assertRaises(AttributeError): response.content = "Hello dear" self.assertNotIn('content-type', response) def test_not_allowed(self): response = HttpResponseNotAllowed(['GET']) self.assertEqual(response.status_code, 405) # Test that standard HttpResponse init args can be used response = HttpResponseNotAllowed(['GET'], content='Only the GET method is allowed', content_type='text/html') self.assertContains(response, 'Only the GET method is allowed', status_code=405) class StreamingHttpResponseTests(TestCase): def test_streaming_response(self): r = StreamingHttpResponse(iter(['hello', 'world'])) # iterating over the response itself yields bytestring chunks. chunks = list(r) self.assertEqual(chunks, [b'hello', b'world']) for chunk in chunks: self.assertIsInstance(chunk, six.binary_type) # and the response can only be iterated once. self.assertEqual(list(r), []) # even when a sequence that can be iterated many times, like a list, # is given as content. r = StreamingHttpResponse(['abc', 'def']) self.assertEqual(list(r), [b'abc', b'def']) self.assertEqual(list(r), []) # streaming responses don't have a `content` attribute. self.assertFalse(hasattr(r, 'content')) # and you can't accidentally assign to a `content` attribute. with self.assertRaises(AttributeError): r.content = 'xyz' # but they do have a `streaming_content` attribute. self.assertTrue(hasattr(r, 'streaming_content')) # that exists so we can check if a response is streaming, and wrap or # replace the content iterator. r.streaming_content = iter(['abc', 'def']) r.streaming_content = (chunk.upper() for chunk in r.streaming_content) self.assertEqual(list(r), [b'ABC', b'DEF']) # coercing a streaming response to bytes doesn't return a complete HTTP # message like a regular response does. it only gives us the headers. r = StreamingHttpResponse(iter(['hello', 'world'])) self.assertEqual( six.binary_type(r), b'Content-Type: text/html; charset=utf-8') # and this won't consume its content. self.assertEqual(list(r), [b'hello', b'world']) # additional content cannot be written to the response. r = StreamingHttpResponse(iter(['hello', 'world'])) with self.assertRaises(Exception): r.write('!') # and we can't tell the current position. with self.assertRaises(Exception): r.tell() class FileCloseTests(TestCase): def test_response(self): filename = os.path.join(os.path.dirname(__file__), 'abc.txt') # file isn't closed until we close the response. file1 = open(filename) r = HttpResponse(file1) self.assertFalse(file1.closed) r.close() self.assertTrue(file1.closed) # don't automatically close file when we finish iterating the response. file1 = open(filename) r = HttpResponse(file1) self.assertFalse(file1.closed) with warnings.catch_warnings(): warnings.simplefilter("ignore", PendingDeprecationWarning) list(r) self.assertFalse(file1.closed) r.close() self.assertTrue(file1.closed) # when multiple file are assigned as content, make sure they are all # closed with the response. file1 = open(filename) file2 = open(filename) r = HttpResponse(file1) r.content = file2 self.assertFalse(file1.closed) self.assertFalse(file2.closed) r.close() self.assertTrue(file1.closed) self.assertTrue(file2.closed) def test_streaming_response(self): filename = os.path.join(os.path.dirname(__file__), 'abc.txt') # file isn't closed until we close the response. file1 = open(filename) r = StreamingHttpResponse(file1) self.assertFalse(file1.closed) r.close() self.assertTrue(file1.closed) # when multiple file are assigned as content, make sure they are all # closed with the response. file1 = open(filename) file2 = open(filename) r = StreamingHttpResponse(file1) r.streaming_content = file2 self.assertFalse(file1.closed) self.assertFalse(file2.closed) r.close() self.assertTrue(file1.closed) self.assertTrue(file2.closed) class CookieTests(unittest.TestCase): def test_encode(self): """ Test that we don't output tricky characters in encoded value """ c = SimpleCookie() c['test'] = "An,awkward;value" self.assertTrue(";" not in c.output().rstrip(';')) # IE compat self.assertTrue("," not in c.output().rstrip(';')) # Safari compat def test_decode(self): """ Test that we can still preserve semi-colons and commas """ c = SimpleCookie() c['test'] = "An,awkward;value" c2 = SimpleCookie() c2.load(c.output()) self.assertEqual(c['test'].value, c2['test'].value) def test_decode_2(self): """ Test that we haven't broken normal encoding """ c = SimpleCookie() c['test'] = b"\xf0" c2 = SimpleCookie() c2.load(c.output()) self.assertEqual(c['test'].value, c2['test'].value) def test_nonstandard_keys(self): """ Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007. """ self.assertTrue('good_cookie' in parse_cookie('good_cookie=yes;bad:cookie=yes').keys()) def test_repeated_nonstandard_keys(self): """ Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852 """ self.assertTrue('good_cookie' in parse_cookie('a:=b; a:=c; good_cookie=yes').keys()) def test_httponly_after_load(self): """ Test that we can use httponly attribute on cookies that we load """ c = SimpleCookie() c.load("name=val") c['name']['httponly'] = True self.assertTrue(c['name']['httponly'])
{ "content_hash": "4791c2c88d2a89661d268a658f321075", "timestamp": "", "source": "github", "line_count": 589, "max_line_length": 95, "avg_line_length": 39.49575551782682, "alnum_prop": 0.5927868288698792, "repo_name": "chrisfranzen/django", "id": "2d172ad0e0aa1faec2009dfd8f5ea083e716593a", "size": "23309", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/regressiontests/httpwrappers/tests.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "42663" }, { "name": "HTML", "bytes": "95024" }, { "name": "JavaScript", "bytes": "94313" }, { "name": "Python", "bytes": "8216479" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }
class Teammates::InvitationsController < Devise::InvitationsController before_filter :configure_permitted_parameters protected def configure_permitted_parameters devise_parameter_sanitizer.permit( :accept_invitation, keys: [:name, :city, :email, :password, :password_confirmation, :invitation_token] ) end private def invite_resource super do |u| u.team = current_teammate.team end end # def accept_resource # resource = resource_class.accept_invitation!(update_resource_params) # Analytics.report('invite.accept', resource.id) # resource # end end
{ "content_hash": "63b9dcbfca8e10ffd5249cb60cde4433", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 74, "avg_line_length": 22.571428571428573, "alnum_prop": 0.6930379746835443, "repo_name": "yakovenkodenis/itivation-hackathon", "id": "dc340bf0390c4b50ba4f404e621abf3e8b1dd6b7", "size": "632", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/teammates/invitations_controller.rb", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "2842671" }, { "name": "HTML", "bytes": "102108" }, { "name": "JavaScript", "bytes": "1861743" }, { "name": "Ruby", "bytes": "101931" } ], "symlink_target": "" }
package com.quakearts.syshub.webapp.helpers.utils; import java.io.IOException; import java.io.StringWriter; public class HtmlUtils { private HtmlUtils() { } public static String escape(String value){ try { StringWriter stringWriter = new StringWriter(); com.sun.faces.util.HtmlUtils.writeText(stringWriter, true, true, new char[1028], value.toCharArray()); return stringWriter.toString(); } catch (IOException e) { return ""; } } }
{ "content_hash": "125ab3b5e7b10f6d6eef5adde87d9be7", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 105, "avg_line_length": 23.95, "alnum_prop": 0.6931106471816284, "repo_name": "kwakutwumasi/Quakearts-JSF-Webtools", "id": "1fc5f1a5189e64bb77c1f5592750ff7fc589419b", "size": "1083", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qa-syshub-webapp/src/main/java/com/quakearts/syshub/webapp/helpers/utils/HtmlUtils.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "774347" }, { "name": "HTML", "bytes": "155531" }, { "name": "Java", "bytes": "3057604" }, { "name": "JavaScript", "bytes": "4569" }, { "name": "Smarty", "bytes": "277" } ], "symlink_target": "" }
<!DOCTYPE HTML> <html ng-app="queueapp"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>Twitter Queue Bot Admin Panel</title> <link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.1/css/bootstrap.min.css"> <link rel="stylesheet" href="assets/css/main.css"> <script src="https://code.jquery.com/jquery-2.1.0.min.js"></script> <script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.1/js/bootstrap.min.js"></script> <script src="https://code.angularjs.org/1.3.0-beta.5/angular.min.js"></script> <script src="assets/js/app.js"></script> </head> <body ng-controller="QueueController"> <div class="modal fade" id="warningModal"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button> <h4 class="modal-title">Too Many Characters!</h4> </div> <div class="modal-body"> <p>This tweet is over 140 characters. Make sure you only have 140 characters or less!</p> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Ok</button> </div> </div> </div> </div> <div id="wrapper" class="container"> <header> <div class="row"> <div class="col-md-12 center"> <h1>{{title}} Admin Panel</h1> <p><a href="https://twitter.com/{{twitter}}" target="_blank">View @{{twitter}} Twitter Account</a></p> </div> </div> </header> <section class="row"> <article class="col-md-6"> <h3>Next Tweet:</h3> <div class="alert alert-info center featured-tweet"> &quot;<span id="next-tweet">{{getFirstTweet().content}}</span>&quot; </div> </article> <article class="col-md-6"> <h3>Add a Tweet:</h3> <form role="form"> <div class="form-group"> <textarea class="form-control" rows="2" id="tweetBody" ng-model="newTweet" ng-change="checkLength();" placeholder="Here is my tweet. #hashtag"></textarea> </div> <div class="form-group center"> <button type="button" class="btn btn-primary btn-lg" id="submitTweet" ng-click="submitTweet();">Add to Queue</button> </div> </form> <p class="center" ng-class="{red: newTweet.length > 140}">{{ 140 - newTweet.length }} chars left</p> </article> </section> <section class="row"> <article class="col-md-12"> <h3>Tweet Queue:</h3> <table class="table table-striped table-bordered center"> <tr ng-repeat="tweet in tweets | orderBy:'+order'"> <!-- Order + 1 because order is 0-indexed --> <td class="tweet-id">{{tweet.order + 1}}</td> <td class="tweet-body">{{tweet.content}}</td> <td class="tweet-actions"> <button type="button" class="btn btn-sm btn-danger tweet-btn" ng-click="deleteTweet(tweet);"> <span class="glyphicon glyphicon-trash"></span> </button> <button type="button" class="btn btn-sm btn-primary tweet-btn" ng-click="escalateTweet(tweet);"> <span class="glyphicon glyphicon-arrow-up"></span> </button> </td> </tr> </table> </article> </section> <footer> <p class="center">Made with Harlan Haskins's <a href="https://github.com/harlanhaskins/Twitter-Queue-Bot" target="_blank">Twitter Queue Bot</a></p> <p class="center">Grade-A Web Design by Ben Centra</p> <footer> <!--<audio class="hide"> <source src="assets/audio/submitSound.mp3" preload="auto" type="audio/mp3"> Your browser does not support the audio element. </audio>--> </div> </body> </html>
{ "content_hash": "75cc42d5439cf6406e7272b6752b3067", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 163, "avg_line_length": 42.79347826086956, "alnum_prop": 0.5946151892303785, "repo_name": "bencentra/Twitter-Queue-Bot", "id": "2b6b1b7e933051d6dfe4cb1025d03615449dad58", "size": "3937", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "frontend/index.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
package seedu.taskmaster.logic.commands; /** * Template for the two other add commands to follow * Stores messages that are shared by both add commands * @author User * */ public abstract class AddCommand extends Command { public static final String COMMAND_WORD = "add"; public static String MESSAGE_DUPLICATE_TASK = "This task already exists in the task list"; protected static final int INDEX_OFFSET = 1; }
{ "content_hash": "143ab8750371e25add48845bd41b3326", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 94, "avg_line_length": 32.92307692307692, "alnum_prop": 0.7406542056074766, "repo_name": "CS2103AUG2016-W09-C2/main", "id": "4d108f547cc88af2ece5741a728edbd2d659012d", "size": "428", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/seedu/taskmaster/logic/commands/AddCommand.java", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "246" }, { "name": "CSS", "bytes": "10063" }, { "name": "HTML", "bytes": "5692750" }, { "name": "Java", "bytes": "527687" }, { "name": "XSLT", "bytes": "6290" } ], "symlink_target": "" }
bool GetModuleFileNameWrapper(HMODULE hModule, pal::string_t* recv) { pal::string_t path; DWORD dwModuleFileName = MAX_PATH / 2; do { path.resize(dwModuleFileName * 2); dwModuleFileName = GetModuleFileNameW(hModule, (LPWSTR)path.data(), path.size()); } while (dwModuleFileName == path.size()); if (dwModuleFileName != 0) { *recv = path; return true; } return false; } pal::string_t pal::to_lower(const pal::string_t& in) { pal::string_t ret = in; std::transform(ret.begin(), ret.end(), ret.begin(), ::towlower); return ret; } pal::string_t pal::to_string(int value) { return std::to_wstring(value); } pal::string_t pal::get_timestamp() { std::time_t t = std::time(0); const std::size_t elems = 100; char_t buf[elems]; std::wcsftime(buf, elems, _X("%c %Z"), std::gmtime(&t)); return pal::string_t(buf); } bool pal::touch_file(const pal::string_t& path) { HANDLE hnd = ::CreateFileW(path.c_str(), 0, 0, NULL, CREATE_NEW, FILE_ATTRIBUTE_NORMAL, NULL); if (hnd == INVALID_HANDLE_VALUE) { trace::verbose(_X("Failed to leave breadcrumb, HRESULT: 0x%X"), HRESULT_FROM_WIN32(GetLastError())); return false; } ::CloseHandle(hnd); return true; } bool pal::getcwd(pal::string_t* recv) { recv->clear(); pal::char_t buf[MAX_PATH]; DWORD result = GetCurrentDirectoryW(MAX_PATH, buf); if (result < MAX_PATH) { recv->assign(buf); return true; } else if (result != 0) { std::vector<pal::char_t> str; str.resize(result); result = GetCurrentDirectoryW(str.size(), str.data()); assert(result <= str.size()); if (result != 0) { recv->assign(str.data()); return true; } } assert(result == 0); trace::error(_X("Failed to obtain working directory, HRESULT: 0x%X"), HRESULT_FROM_WIN32(GetLastError())); return false; } bool pal::load_library(const string_t* in_path, dll_t* dll) { string_t path = *in_path; // LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR: // In framework-dependent apps, coreclr would come from another directory than the host, // so make sure coreclr dependencies can be resolved from coreclr.dll load dir. if (LongFile::IsPathNotFullyQualified(path)) { if (!pal::realpath(&path)) { trace::error(_X("Failed to load the dll from [%s], HRESULT: 0x%X"), path.c_str(), HRESULT_FROM_WIN32(GetLastError())); return false; } } //Adding the assert to ensure relative paths which are not just filenames are not used for LoadLibrary Calls assert(!LongFile::IsPathNotFullyQualified(path) || !LongFile::ContainsDirectorySeparator(path)); *dll = ::LoadLibraryExW(path.c_str(), NULL, LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR | LOAD_LIBRARY_SEARCH_DEFAULT_DIRS); if (*dll == nullptr) { trace::error(_X("Failed to load the dll from [%s], HRESULT: 0x%X"), path.c_str(), HRESULT_FROM_WIN32(GetLastError())); return false; } // Pin the module HMODULE dummy_module; if (!::GetModuleHandleExW(GET_MODULE_HANDLE_EX_FLAG_PIN, path.c_str(), &dummy_module)) { trace::error(_X("Failed to pin library [%s] in [%s]"), path.c_str(), _STRINGIFY(__FUNCTION__)); return false; } if (trace::is_enabled()) { string_t buf; GetModuleFileNameWrapper(*dll, &buf); trace::info(_X("Loaded library from %s"), buf.c_str()); } return true; } pal::proc_t pal::get_symbol(dll_t library, const char* name) { auto result = ::GetProcAddress(library, name); if (result == nullptr) { trace::info(_X("Probed for and did not resolve library symbol %s"), name); } return result; } void pal::unload_library(dll_t library) { // No-op. On windows, we pin the library, so it can't be unloaded. } static bool get_wow_mode_program_files(pal::string_t* recv) { #if defined(_TARGET_AMD64_) pal::char_t* env_key = _X("ProgramFiles(x86)"); #else pal::char_t* env_key = _X("ProgramFiles"); #endif return get_file_path_from_env(env_key,recv); } bool pal::get_default_breadcrumb_store(string_t* recv) { recv->clear(); pal::string_t prog_dat; if (!get_file_path_from_env(_X("ProgramData"), &prog_dat)) { // We should have the path in prog_dat. trace::verbose(_X("Failed to read default breadcrumb store [%s]"), prog_dat.c_str()); return false; } recv->assign(prog_dat); append_path(recv, _X("Microsoft")); append_path(recv, _X("NetFramework")); append_path(recv, _X("BreadcrumbStore")); return true; } bool pal::get_default_servicing_directory(string_t* recv) { if (!get_wow_mode_program_files(recv)) { return false; } append_path(recv, _X("coreservicing")); return true; } bool pal::get_default_installation_dir(pal::string_t* recv) { pal::char_t* program_files_dir; if (pal::is_running_in_wow64()) { program_files_dir = _X("ProgramFiles(x86)"); } else { program_files_dir = _X("ProgramFiles"); } if (!get_file_path_from_env(program_files_dir, recv)) { return false; } append_path(recv, _X("dotnet")); return true; } bool get_sdk_self_registered_dir(pal::string_t* recv) { #if !defined(_TARGET_AMD64_) && !defined(_TARGET_X86_) // Self-registered SDK installation directory is only supported for x64 and x86 architectures. return false; #else recv->clear(); // ***Used only for testing*** pal::string_t environmentOverride; if (pal::getenv(_X("_DOTNET_TEST_SDK_SELF_REGISTERED_DIR"), &environmentOverride)) { recv->assign(environmentOverride); return true; } // *************************** DWORD size = 0; const HKEY hkey = HKEY_LOCAL_MACHINE; // The registry search occurs in the 32-bit registry in all cases. const DWORD flags = RRF_RT_REG_SZ | RRF_SUBKEY_WOW6432KEY; pal::string_t sub_key = pal::string_t(_X("SOFTWARE\\dotnet\\Setup\\InstalledVersions\\")) + get_arch() + pal::string_t(_X("\\sdk")); pal::char_t* value = _X("InstallLocation"); // Determine the size of the buffer LONG result = ::RegGetValueW(hkey, sub_key.c_str(), value, flags, nullptr, nullptr, &size); if (result != ERROR_SUCCESS || size == 0) { return false; } // Get the key's value std::vector<pal::char_t> buffer(size/sizeof(pal::char_t)); result = ::RegGetValueW(hkey, sub_key.c_str(), value, flags, nullptr, &buffer[0], &size); if (result != ERROR_SUCCESS) { return false; } recv->assign(buffer.data()); return true; #endif } bool pal::get_global_dotnet_dirs(std::vector<pal::string_t>* dirs) { pal::string_t default_dir; pal::string_t custom_dir; bool dir_found = false; if (get_sdk_self_registered_dir(&custom_dir)) { dirs->push_back(custom_dir); dir_found = true; } if (get_default_installation_dir(&default_dir)) { // Avoid duplicate global dirs. if (!dir_found || !are_paths_equal_with_normalized_casing(custom_dir, default_dir)) { dirs->push_back(default_dir); dir_found = true; } } return dir_found; } // To determine the OS version, we are going to use RtlGetVersion API // since GetVersion call can be shimmed on Win8.1+. typedef NTSTATUS (WINAPI *pFuncRtlGetVersion)(RTL_OSVERSIONINFOW *); pal::string_t pal::get_current_os_rid_platform() { pal::string_t ridOS; RTL_OSVERSIONINFOW osinfo; // Init the buffer ZeroMemory(&osinfo, sizeof(osinfo)); osinfo.dwOSVersionInfoSize = sizeof(osinfo); HMODULE hmodNtdll = LoadLibrary("ntdll.dll"); if (hmodNtdll != NULL) { pFuncRtlGetVersion pRtlGetVersion = (pFuncRtlGetVersion)GetProcAddress(hmodNtdll, "RtlGetVersion"); if (pRtlGetVersion) { if ((*pRtlGetVersion)(&osinfo) == 0) { // Win7 RID is the minimum supported version. int majorVer = 6; int minorVer = 1; if (osinfo.dwMajorVersion > majorVer) { majorVer = osinfo.dwMajorVersion; // Reset the minor version since we picked a different major version. minorVer = 0; } if (osinfo.dwMinorVersion > minorVer) { minorVer = osinfo.dwMinorVersion; } if (majorVer == 6) { switch(minorVer) { case 1: ridOS.append(_X("win7")); break; case 2: ridOS.append(_X("win8")); break; case 3: default: // For unknown version, we will support the highest RID that we know for this major version. ridOS.append(_X("win81")); break; } } else if (majorVer >= 10) { // Return the major version for use in RID computation without applying any cap. ridOS.append(_X("win")); ridOS.append(pal::to_string(majorVer)); } } } } return ridOS; } bool pal::is_path_rooted(const string_t& path) { return path.length() >= 2 && path[1] == L':'; } // Returns true only if an env variable can be read successfully to be non-empty. bool pal::getenv(const char_t* name, string_t* recv) { recv->clear(); auto length = ::GetEnvironmentVariableW(name, nullptr, 0); if (length == 0) { auto err = GetLastError(); if (err != ERROR_ENVVAR_NOT_FOUND) { trace::error(_X("Failed to read environment variable [%s], HRESULT: 0x%X"), name, HRESULT_FROM_WIN32(GetLastError())); } return false; } auto buf = new char_t[length]; if (::GetEnvironmentVariableW(name, buf, length) == 0) { trace::error(_X("Failed to read environment variable [%s], HRESULT: 0x%X"), name, HRESULT_FROM_WIN32(GetLastError())); return false; } recv->assign(buf); delete[] buf; return true; } int pal::xtoi(const char_t* input) { return ::_wtoi(input); } bool pal::get_own_executable_path(string_t* recv) { return GetModuleFileNameWrapper(NULL, recv); } static bool wchar_convert_helper(DWORD code_page, const char* cstr, int len, pal::string_t* out) { out->clear(); // No need of explicit null termination, so pass in the actual length. size_t size = ::MultiByteToWideChar(code_page, 0, cstr, len, nullptr, 0); if (size == 0) { return false; } out->resize(size, '\0'); return ::MultiByteToWideChar(code_page, 0, cstr, len, &(*out)[0], out->size()) != 0; } bool pal::utf8_palstring(const std::string& str, pal::string_t* out) { return wchar_convert_helper(CP_UTF8, &str[0], str.size(), out); } bool pal::pal_utf8string(const pal::string_t& str, std::vector<char>* out) { out->clear(); // Pass -1 as we want explicit null termination in the char buffer. size_t size = ::WideCharToMultiByte(CP_UTF8, 0, str.c_str(), -1, nullptr, 0, nullptr, nullptr); if (size == 0) { return false; } out->resize(size, '\0'); return ::WideCharToMultiByte(CP_UTF8, 0, str.c_str(), -1, out->data(), out->size(), nullptr, nullptr) != 0; } bool pal::pal_clrstring(const pal::string_t& str, std::vector<char>* out) { return pal_utf8string(str, out); } bool pal::clr_palstring(const char* cstr, pal::string_t* out) { return wchar_convert_helper(CP_UTF8, cstr, ::strlen(cstr), out); } // Return if path is valid and file exists, return true and adjust path as appropriate. bool pal::realpath(string_t* path, bool skip_error_logging) { if (LongFile::IsNormalized(path->c_str())) { WIN32_FILE_ATTRIBUTE_DATA data; if (GetFileAttributesExW(path->c_str(), GetFileExInfoStandard, &data) != 0) { return true; } } char_t buf[MAX_PATH]; auto size = ::GetFullPathNameW(path->c_str(), MAX_PATH, buf, nullptr); if (size == 0) { if (!skip_error_logging) { trace::error(_X("Error resolving full path [%s]"), path->c_str()); } return false; } string_t str; if (size < MAX_PATH) { str.assign(buf); } else { str.resize(size + LongFile::UNCExtendedPathPrefix.length(), 0); size = ::GetFullPathNameW(path->c_str(), size, (LPWSTR)str.data(), nullptr); assert(size <= str.size()); if (size == 0) { if (!skip_error_logging) { trace::error(_X("Error resolving full path [%s]"), path->c_str()); } return false; } const string_t* prefix = &LongFile::ExtendedPrefix; //Check if the resolved path is a UNC. By default we assume relative path to resolve to disk if (str.compare(0, LongFile::UNCPathPrefix.length(), LongFile::UNCPathPrefix) == 0) { prefix = &LongFile::UNCExtendedPathPrefix; str.erase(0, LongFile::UNCPathPrefix.length()); size = size - LongFile::UNCPathPrefix.length(); } str.insert(0, *prefix); str.resize(size + prefix->length()); str.shrink_to_fit(); } WIN32_FILE_ATTRIBUTE_DATA data; if (GetFileAttributesExW(str.c_str(), GetFileExInfoStandard, &data) != 0) { *path = str; return true; } return false; } bool pal::file_exists(const string_t& path) { if (path.empty()) { return false; } string_t tmp(path); return pal::realpath(&tmp, true); } static void readdir(const pal::string_t& path, const pal::string_t& pattern, bool onlydirectories, std::vector<pal::string_t>* list) { assert(list != nullptr); std::vector<pal::string_t>& files = *list; pal::string_t normalized_path(path); if (LongFile::ShouldNormalize(normalized_path)) { if (!pal::realpath(&normalized_path)) { return; } } pal::string_t search_string(normalized_path); append_path(&search_string, pattern.c_str()); WIN32_FIND_DATAW data = { 0 }; auto handle = ::FindFirstFileExW(search_string.c_str(), FindExInfoStandard, &data, FindExSearchNameMatch, NULL, 0); if (handle == INVALID_HANDLE_VALUE) { return; } do { if (!onlydirectories || (data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY)) { pal::string_t filepath(data.cFileName); if (filepath != _X(".") && filepath != _X("..")) { files.push_back(filepath); } } } while (::FindNextFileW(handle, &data)); ::FindClose(handle); } void pal::readdir(const string_t& path, const string_t& pattern, std::vector<pal::string_t>* list) { ::readdir(path, pattern, false, list); } void pal::readdir(const string_t& path, std::vector<pal::string_t>* list) { ::readdir(path, _X("*"), false, list); } void pal::readdir_onlydirectories(const pal::string_t& path, const string_t& pattern, std::vector<pal::string_t>* list) { ::readdir(path, pattern, true, list); } void pal::readdir_onlydirectories(const pal::string_t& path, std::vector<pal::string_t>* list) { ::readdir(path, _X("*"), true, list); } bool pal::is_running_in_wow64() { BOOL fWow64Process = FALSE; if (!IsWow64Process(GetCurrentProcess(), &fWow64Process)) { return false; } return (fWow64Process != FALSE); } bool pal::are_paths_equal_with_normalized_casing(const string_t& path1, const string_t& path2) { // On Windows, paths are case-insensitive return (strcasecmp(path1.c_str(), path2.c_str()) == 0); }
{ "content_hash": "ca628edc990ff75018ff4aeacc81a6c0", "timestamp": "", "source": "github", "line_count": 580, "max_line_length": 136, "avg_line_length": 28.03448275862069, "alnum_prop": 0.5771832718327183, "repo_name": "ericstj/core-setup", "id": "5b79ee208bd09341eaea8006675c9154a52784bb", "size": "16693", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/corehost/common/pal.windows.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "18941" }, { "name": "C", "bytes": "5100" }, { "name": "C#", "bytes": "796232" }, { "name": "C++", "bytes": "762899" }, { "name": "CMake", "bytes": "47817" }, { "name": "Dockerfile", "bytes": "12606" }, { "name": "HTML", "bytes": "44274" }, { "name": "Makefile", "bytes": "220" }, { "name": "PowerShell", "bytes": "65397" }, { "name": "Python", "bytes": "19476" }, { "name": "Roff", "bytes": "6044" }, { "name": "Shell", "bytes": "165931" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "93f9cd4492a138ca8e32f2d17c8f904b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "3962d0bea1c8fe7d912bdbb11e06164fd94f41f7", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Campanulaceae/Campanula/Campanula collina/ Syn. Campanula collina major/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
layout: page title: About permalink: /about/ --- I'm Ky-Cuong ("key-coon"), a software engineer at [Stripe](https://stripe.com/), where I was also an intern. I was previously an intern at [AppFolio](http://www.appfolioinc.com/). I completed my undergrad as a computer science major at [UCLA](http://www.ucla.edu/), where I was heavily involved with [UCLA ACM](http://uclaacm.com/). Through the power of the web, here are links to my: * [Resume](/Ky-Cuong.pdf) * [GitHub](https://github.com/KyCodeHuynh) * [LinkedIn](https://www.linkedin.com/in/kycuong) To relax, I can usually be found climbing, running, or reading. ## Licensing of Blog Content See the [README](https://github.com/KyCodeHuynh/kycodehuynh.github.io/blob/master/README.md) for the GitHub repo of this site.
{ "content_hash": "e69fd122bc419fdcb9a239aac60a762c", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 236, "avg_line_length": 39.1, "alnum_prop": 0.7276214833759591, "repo_name": "KyCodeHuynh/kycodehuynh.github.io", "id": "978e781015b2f9df130f0e583092b392f45c03c6", "size": "786", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "about.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6900" }, { "name": "HTML", "bytes": "11200" }, { "name": "JavaScript", "bytes": "739" } ], "symlink_target": "" }
package util; import com.mongodb.MongoClient; import me.deployfor.model.EmailAuth; import me.deployfor.model.Person; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import org.mongodb.morphia.Morphia; /** * * @author Juliano Macedo */ public class EmailUtilTest { private String name; private String email; private MongoClient mongo; private Morphia morphia; private EmailUtil instance; public EmailUtilTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { name = "Junit Tester"; email = "junit@tester.com"; mongo = MongoUtil.getMongoConnection(); morphia = new Morphia(); morphia.map(EmailAuth.class); instance = new EmailUtil(mongo, morphia); } @After public void tearDown() { } /** * Test of sendEmail method, of class EmailUtil. */ @Ignore public void testSendEmail_Sucess() { Person person = new Person(name, email); instance.sendEmail(person); } /** * Test of sendEmail method, of class EmailUtil. */ @Test public void testSendEmail_Unsucess() { Person person = null; instance.sendEmail(person); } }
{ "content_hash": "d10a37c89bc8c0e86d19e74cf9193c1d", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 52, "avg_line_length": 20.34285714285714, "alnum_prop": 0.6411516853932584, "repo_name": "JulianoR/simple-jsf-morphia-mdl", "id": "d907a5f884d7dba7f5bd987bb4a26d15214c0b11", "size": "1523", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "deployforme/src/test/java/util/EmailUtilTest.java", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "27495" }, { "name": "HTML", "bytes": "25707" }, { "name": "Java", "bytes": "62021" }, { "name": "JavaScript", "bytes": "1188" } ], "symlink_target": "" }
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68a.c Label Definition File: CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32.label.xml Template File: sources-sink-68a.tmpl.c */ /* * @description * CWE: 591 Sensitive Data Storage in Improperly Locked Memory * BadSource: Allocate memory for sensitive data without using VirtualLock() to lock the buffer into memory * GoodSource: Allocate memory for sensitive data and use VirtualLock() to lock the buffer into memory * Sink: * BadSink : Authenticate the user using LogonUserW() * Flow Variant: 68 Data flow: data passed as a global variable from one function to another in different source files * * */ #include "std_testcase.h" #include <wchar.h> #include <windows.h> #pragma comment(lib, "advapi32.lib") wchar_t * CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_badData; wchar_t * CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_goodG2BData; #ifndef OMITBAD /* bad function declaration */ void CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68b_badSink(); void CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_bad() { wchar_t * password; /* Initialize Data */ password = L""; password = (wchar_t *)malloc(100*sizeof(wchar_t)); if (password == NULL) { printLine("Memory could not be allocated"); exit(1); } /* FLAW: Do not lock the memory */ /* INCIDENTAL FLAW: CWE-259 Hardcoded Password */ wcscpy(password, L"Password1234!"); CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_badData = password; CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68b_badSink(); } #endif /* OMITBAD */ #ifndef OMITGOOD /* good function declarations */ void CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68b_goodG2BSink(); /* goodG2B uses the GoodSource with the BadSink */ static void goodG2B() { wchar_t * password; /* Initialize Data */ password = L""; password = (wchar_t *)malloc(100*sizeof(wchar_t)); if (password == NULL) { printLine("Memory could not be allocated"); exit(1); } /* FIX: Use VirtualLock() to lock the buffer into memory */ if(!VirtualLock(password, 100*sizeof(wchar_t))) { printLine("Memory could not be locked"); exit(1); } /* INCIDENTAL FLAW: CWE-259 Hardcoded Password */ wcscpy(password, L"Password1234!"); CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_goodG2BData = password; CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68b_goodG2BSink(); } void CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_good() { goodG2B(); } #endif /* OMITGOOD */ /* Below is the main(). It is only used when building this testcase on * its own for testing or for building a binary to use in testing binary * analysis tools. It is not used when compiling all the testcases as one * application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68_bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
{ "content_hash": "6e4936de6e525aa13d31c060e58cf8f1", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 118, "avg_line_length": 34.08849557522124, "alnum_prop": 0.6825025960539979, "repo_name": "JianpingZeng/xcc", "id": "eff55c6029d3a995094958e723fb08dc876b747a", "size": "3852", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "xcc/test/juliet/testcases/CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory/CWE591_Sensitive_Data_Storage_in_Improperly_Locked_Memory__w32_wchar_t_68a.c", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
extern BOOLEAN xWaitNextEvent(INTEGER em, EventRecord *evt, LONGINT sleep, RgnHandle mousergn); extern BOOLEAN xGetNextEvent(INTEGER em, EventRecord *evt); extern OSErr xPBHRename(HFileParam *pb, BOOLEAN async); extern OSErr xPBHCreate(HFileParam *pb, BOOLEAN async); extern OSErr xPBDirCreate(HFileParam *pb, BOOLEAN async); extern OSErr xPBHDelete(HFileParam *pb, BOOLEAN async); extern OSErr xPBRead(ioParam *pb, BOOLEAN async); extern OSErr xPBWrite(ioParam *pb, BOOLEAN async); extern OSErr xPBClose(ioParam *pb, BOOLEAN async); extern OSErr xPBHOpen(HFileParam *pb, BOOLEAN async); extern OSErr xPBHOpenRF(HFileParam *pb, BOOLEAN async); extern OSErr xPBGetCatInfo(CInfoPBPtr pb, BOOLEAN async); extern OSErr xPBSetCatInfo(CInfoPBPtr pb, BOOLEAN async); extern OSErr xPBCatMove(CMovePBPtr pb, BOOLEAN async); extern OSErr xPBGetVInfo(volumeParam *pb, BOOLEAN async); extern OSErr xPBUnmountVol(volumeParam *pb); extern OSErr xPBEject(volumeParam *pb); extern OSErr xPBAllocate(ParmBlkPtr pb, BOOLEAN async); extern OSErr xPBAllocContig(ParmBlkPtr pb, BOOLEAN async); extern OSErr xPBHGetFInfo(ParmBlkPtr pb, BOOLEAN async); extern OSErr xPBSetEOF(ParmBlkPtr pb, BOOLEAN async); extern OSErr myPBHRename(HFileParam *pb, BOOLEAN async); extern OSErr myPBHCreate(HFileParam *pb, BOOLEAN async); extern OSErr myPBDirCreate(HFileParam *pb, BOOLEAN async); extern OSErr myPBHDelete(HFileParam *pb, BOOLEAN async); extern OSErr myPBRead(ioParam *pb, BOOLEAN async); extern OSErr myPBWrite(ioParam *pb, BOOLEAN async); extern OSErr myPBClose(ioParam *pb, BOOLEAN async); extern OSErr myPBHOpen(HFileParam *pb, BOOLEAN async); extern OSErr myPBHOpenRF(HFileParam *pb, BOOLEAN async); extern OSErr myPBGetCatInfo(CInfoPBPtr pb, BOOLEAN async); extern OSErr myPBSetCatInfo(CInfoPBPtr pb, BOOLEAN async); extern OSErr myPBCatMove(CMovePBPtr pb, BOOLEAN async); extern OSErr myPBGetVInfo(volumeParam *pb, BOOLEAN async); extern OSErr myPBUnmountVol(volumeParam *pb); extern OSErr myPBEject(volumeParam *pb); extern OSErr myPBAllocate(ioParam *pb, BOOLEAN async); extern OSErr myPBAllocContig(ioParam *pb, BOOLEAN async); extern OSErr myPBHGetFInfo(HFileParam *pb, BOOLEAN async); extern OSErr myPBSetEOF(ioParam *pb, BOOLEAN async); extern OSErr myPBOpenWD(WDPBPtr pb, BOOLEAN async); extern OSErr myPBCloseWD(WDPBPtr pb, BOOLEAN async); extern OSErr myPBFlushFile(ioParam *pb, BOOLEAN async);
{ "content_hash": "a76fcc3702db472b90871e5487463ba9", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 74, "avg_line_length": 51.04255319148936, "alnum_prop": 0.8053355564818675, "repo_name": "MaddTheSane/executor", "id": "4bc4a72e5d2a5a68cd4574883c58bb25b4f346ef", "size": "2399", "binary": false, "copies": "2", "ref": "refs/heads/CppPort", "path": "src/config/front-ends/nextstep/HFS_XFer/xbar.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "78587" }, { "name": "Awk", "bytes": "5579" }, { "name": "C", "bytes": "1598474" }, { "name": "C++", "bytes": "4606280" }, { "name": "GDB", "bytes": "36" }, { "name": "Inno Setup", "bytes": "25291" }, { "name": "M4", "bytes": "5802" }, { "name": "Makefile", "bytes": "62397" }, { "name": "Mathematica", "bytes": "9405" }, { "name": "NewLisp", "bytes": "35906" }, { "name": "Objective-C", "bytes": "25209" }, { "name": "Objective-C++", "bytes": "101300" }, { "name": "Perl", "bytes": "33397" }, { "name": "Perl 6", "bytes": "2970" }, { "name": "Roff", "bytes": "19007" }, { "name": "Shell", "bytes": "65013" }, { "name": "Yacc", "bytes": "27639" } ], "symlink_target": "" }
.csvpreviewer::-webkit-scrollbar { -webkit-appearance: none; background-color: #ccc; width: 2px; } .csvpreviewer::-webkit-scrollbar-thumb { border-radius: 5px; background-color: rgba(0,0,0,.5); -webkit-box-shadow: 0 0 1px rgba(255,255,255,.5); } .csvpreviewer{ /* find out how to fill the width and scroll the overflow*/ width: 820px; max-width: 100%; overflow-x: scroll; } .csvpreviewer table{ } .csvpreviewer table th{ padding: 3px; } .csvpreviewer table select{ max-width: 10em; } .csvpreviewer table td{ border: 1px solid #ccc; border-collapse: collapse; padding: 5px; background: #fff; } #HasHeader, #ClearData{ padding-left: 5px; }
{ "content_hash": "0c89345b4fe0841a3fcedd610be96094", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 60, "avg_line_length": 16.023809523809526, "alnum_prop": 0.687964338781575, "repo_name": "botzkobg/silverstripe-importexport", "id": "95dba37ad74ab0647909be17758f92fe0c7a7d83", "size": "673", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "css/csvpreviewer.css", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1000" }, { "name": "JavaScript", "bytes": "2662" }, { "name": "PHP", "bytes": "79135" }, { "name": "Scheme", "bytes": "2558" } ], "symlink_target": "" }
package transfertcp; import java.io.DataOutputStream; /** * * @author JoseCaceres */ public class ListaUpdate { private nodoUpdate cab, recor, eliminar; public ListaUpdate() { this.cab = null; } public void add(nodoUpdate dato) { if(cab == null) cab = dato; else { recor = cab; while(true) { if(recor.getSuc() != null) recor = recor.getSuc(); else break; } recor.setSuc(dato); } } public nodoUpdate getCab() { return cab; } public void ejecutar(DataOutputStream salida) { cab.actualizar(salida); eliminar = cab; cab = cab.getSuc(); eliminar = null; } }
{ "content_hash": "3eaa71a74c1628cd925741137a6811b5", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 51, "avg_line_length": 19.5, "alnum_prop": 0.47863247863247865, "repo_name": "joalcapa/transfertcp", "id": "06fddc4e1c4b13993dda0a1f84d8601d0de12785", "size": "819", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "transfertcp/ListaUpdate.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "89479" } ], "symlink_target": "" }
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Gecko DOM code. * * The Initial Developer of the Original Code is * Mozilla Foundation. * Portions created by the Initial Developer are Copyright (C) 2008 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Peter Van der Beken <peterv@propagandism.org> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ #ifndef nsWrapperCache_h___ #define nsWrapperCache_h___ #include "nsCycleCollectionParticipant.h" struct JSObject; struct JSContext; class nsContentUtils; class XPCWrappedNativeScope; typedef PRUptrdiff PtrBits; #define NS_WRAPPERCACHE_IID \ { 0x6f3179a1, 0x36f7, 0x4a5c, \ { 0x8c, 0xf1, 0xad, 0xc8, 0x7c, 0xde, 0x3e, 0x87 } } /** * Class to store the wrapper for an object. This can only be used with objects * that only have one non-security wrapper at a time (for an XPCWrappedNative * this is usually ensured by setting an explicit parent in the PreCreate hook * for the class). * * An instance of nsWrapperCache can be gotten from an object that implements * a wrapper cache by calling QueryInterface on it. Note that this breaks XPCOM * rules a bit (this object doesn't derive from nsISupports). * * The cache can store objects other than wrappers. We allow wrappers to use a * separate JSObject to store their state (mostly expandos). If the wrapper is * collected and we want to preserve this state we actually store the state * object in the cache. * * The cache can store 3 types of objects: * * If WRAPPER_IS_PROXY is not set (IsProxy() returns false): * - a slim wrapper or the JSObject of an XPCWrappedNative wrapper * * If WRAPPER_IS_PROXY is set (IsProxy() returns true): * - a proxy wrapper * - an expando object * * If a proxy wrapper is GCed and it has an expando object we'll store the * expando object in the cache. If we create a new proxy wrapper and the cache * contains an expando object we'll store the expando object in the new wrapper * and store the new wrapper in the cache. Unlinking from the cycle collector * clears anything stored in the cache. * * A number of the methods are implemented in nsWrapperCacheInlines.h because we * have to include some JS headers that don't play nicely with the rest of the * codebase. Include nsWrapperCacheInlines.h if you need to call those methods. */ class nsWrapperCache { friend class nsContentUtils; public: NS_DECLARE_STATIC_IID_ACCESSOR(NS_WRAPPERCACHE_IID) nsWrapperCache() : mWrapperPtrBits(0) { } ~nsWrapperCache() { NS_ASSERTION(!PreservingWrapper(), "Destroying cache with a preserved wrapper!"); RemoveExpandoObject(); } /** * Get the cached wrapper. * * This getter clears the gray bit before handing out the JSObject which means * that the object is guaranteed to be kept alive past the next CC. */ JSObject* GetWrapper() const; /** * Get the cached wrapper. * * This getter does not change the color of the JSObject meaning that the * object returned is not guaranteed to be kept alive past the next CC. * * This should only be called if you are certain that the return value won't * be passed into a JS API function and that it won't be stored without being * rooted (or otherwise signaling the stored value to the CC). */ JSObject* GetWrapperPreserveColor() const; /** * Get the expando object, used for storing expando properties, if there is * one available. If the cache holds a DOM proxy binding that proxy's expando * object will be returned. * * This getter does not change the color of the JSObject meaning that the * object returned is not guaranteed to be kept alive past the next CC. * * This should only be called if you are certain that the return value won't * be passed into a JS API function and that it won't be stored without being * rooted (or otherwise signaling the stored value to the CC). */ JSObject* GetExpandoObjectPreserveColor() const; void SetWrapper(JSObject* aWrapper); /** * Clear the wrapper, but keep the expando object alive if the wrapper has * one. This should be called from the finalizer for the wrapper. */ void ClearWrapper(); /** * Clear the wrapper if it's a proxy, doesn't keep the expando object alive. * This should be called when unlinking the cache. */ void ClearWrapperIfProxy(); bool PreservingWrapper() { return (mWrapperPtrBits & WRAPPER_BIT_PRESERVED) != 0; } void SetIsProxy() { NS_ASSERTION(!mWrapperPtrBits, "This flag should be set before creating any wrappers."); mWrapperPtrBits = WRAPPER_IS_PROXY; } void ClearIsProxy() { NS_ASSERTION(!mWrapperPtrBits || mWrapperPtrBits == WRAPPER_IS_PROXY, "This flag should be cleared before creating any wrappers."); mWrapperPtrBits = 0; } bool IsProxy() const { return (mWrapperPtrBits & WRAPPER_IS_PROXY) != 0; } /** * Wrap the object corresponding to this wrapper cache. If non-null is * returned, the object has already been stored in the wrapper cache and the * value set in triedToWrap is meaningless. If null is returned then * triedToWrap indicates whether an error occurred, if it's false then the * object doesn't actually support creating a wrapper through its WrapObject * hook. */ virtual JSObject* WrapObject(JSContext *cx, XPCWrappedNativeScope *scope, bool *triedToWrap) { *triedToWrap = false; return nsnull; } private: // Only meant to be called by nsContentUtils. void SetPreservingWrapper(bool aPreserve) { if(aPreserve) { mWrapperPtrBits |= WRAPPER_BIT_PRESERVED; } else { mWrapperPtrBits &= ~WRAPPER_BIT_PRESERVED; } } JSObject *GetJSObjectFromBits() const { return reinterpret_cast<JSObject*>(mWrapperPtrBits & ~kWrapperBitMask); } void SetWrapperBits(void *aWrapper) { mWrapperPtrBits = reinterpret_cast<PtrBits>(aWrapper) | (mWrapperPtrBits & WRAPPER_IS_PROXY); } void RemoveExpandoObject(); static JSObject *GetExpandoFromSlot(JSObject *obj); /** * If this bit is set then we're preserving the wrapper, which in effect ties * the lifetime of the JS object stored in the cache to the lifetime of the * native object. We rely on the cycle collector to break the cycle that this * causes between the native object and the JS object, so it is important that * any native object that supports preserving of its wrapper * traces/traverses/unlinks the cached JS object (see * NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER, * NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS and * NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER). */ enum { WRAPPER_BIT_PRESERVED = 1 << 0 }; /** * If this bit is set then the wrapper for the native object is a proxy. Note * that that doesn't necessarily mean that the JS object stored in the cache * is a JS proxy, as we sometimes store objects other than the wrapper in the * cache. */ enum { WRAPPER_IS_PROXY = 1 << 1 }; enum { kWrapperBitMask = (WRAPPER_BIT_PRESERVED | WRAPPER_IS_PROXY) }; PtrBits mWrapperPtrBits; }; NS_DEFINE_STATIC_IID_ACCESSOR(nsWrapperCache, NS_WRAPPERCACHE_IID) #define NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY \ if ( aIID.Equals(NS_GET_IID(nsWrapperCache)) ) { \ *aInstancePtr = static_cast<nsWrapperCache*>(this); \ return NS_OK; \ } #endif /* nsWrapperCache_h___ */
{ "content_hash": "3990b5e0ecf4988a9e95b978e40063bb", "timestamp": "", "source": "github", "line_count": 251, "max_line_length": 80, "avg_line_length": 36.322709163346616, "alnum_prop": 0.6977075792475596, "repo_name": "bwp/SeleniumWebDriver", "id": "e330f2fe3b447db9dd68150a3968cbb3563bbe23", "size": "9117", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "third_party/gecko-10/win32/include/nsWrapperCache.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "22162" }, { "name": "C", "bytes": "302788" }, { "name": "C#", "bytes": "2090580" }, { "name": "C++", "bytes": "771128" }, { "name": "Java", "bytes": "7874195" }, { "name": "JavaScript", "bytes": "14218193" }, { "name": "Objective-C", "bytes": "368823" }, { "name": "Python", "bytes": "634315" }, { "name": "Ruby", "bytes": "757466" }, { "name": "Shell", "bytes": "6429" } ], "symlink_target": "" }
package xxxxx.yyyyy.zzzzz.xyz5.simple.impl; import java.util.Collection; import java.util.Map; import xxxxx.yyyyy.zzzzz.xyz5.api.Right; import xxxxx.yyyyy.zzzzz.xyz5.api.RuleSession; import xxxxx.yyyyy.zzzzz.xyz5.api.ThreadUnsafe; import xxxxx.yyyyy.zzzzz.xyz5.api.TypedHandle; import xxxxx.yyyyy.zzzzz.xyz5.simple.store.TypedHandleStore; @lombok.extern.slf4j.Slf4j @ThreadUnsafe public class RightImpl implements Right { protected final RuleSession ruleSession; protected final Map<String, TypedHandleStore> temporal; public RightImpl(RuleSession ruleSession, Map<String, TypedHandleStore> temporal) { this.ruleSession = ruleSession; this.temporal = temporal; } @Override public <T> T global(String name, Class<T> type) { return this.ruleSession.getGlobal(name, type); } @Override public <T> Collection<TypedHandle<T>> var(String name, Class<T> type) { TypedHandleStore typedHandleStore = this.temporal.get(name); if (typedHandleStore == null) { throw new IllegalArgumentException(String.format("Undefined name [%s]", name)); } // FIXME Check Type ? return typedHandleStore.getHandles(type); } @Override public <T> void insert(T object, Class<T> type) { this.ruleSession.insert(object, type); } @Override public <T> void insert(Collection<T> objects, Class<T> type) { this.ruleSession.insert(objects, type); } @Override public <T> void delete(TypedHandle<T> handle) { this.ruleSession.delete(handle); } }
{ "content_hash": "decbea3a052ce8eb1e9f7085d1fc6869", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 91, "avg_line_length": 30.037735849056602, "alnum_prop": 0.6915829145728644, "repo_name": "namioka/xyz5", "id": "bc665fa26a91fcd43bf8da30221bc661bcb48d1a", "size": "2225", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xyz5-simple/src/main/java/xxxxx/yyyyy/zzzzz/xyz5/simple/impl/RightImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "84638" }, { "name": "Shell", "bytes": "173" } ], "symlink_target": "" }
#ifndef _GEXECUTE_H_ #define _GEXECUTE_H_ #include <wx/string.h> #include <vector> struct GettextError { int line; wxString text; }; typedef std::vector<GettextError> GettextErrors; /** Executes command. Writes stderr output to \a stderrOutput if not NULL, and logs it with wxLogError otherwise. \return true if program exited with exit code 0, false otherwise. */ extern bool ExecuteGettext(const wxString& cmdline); /// Like ExecuteGettext(), but stores error output parsed into per-item entries. extern bool ExecuteGettextAndParseOutput(const wxString& cmdline, GettextErrors& errors); extern wxString QuoteCmdlineArg(const wxString& s); extern wxString GetGettextPackagePath(); #endif // _GEXECUTE_H_
{ "content_hash": "dc35bf39094c2cfce78680cc5ad4458b", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 80, "avg_line_length": 23.484848484848484, "alnum_prop": 0.7135483870967742, "repo_name": "alexhenrie/poedit", "id": "e2fa48286fa3c438a5e468858a761b9aec9ae22f", "size": "1972", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "src/gexecute.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "48113" }, { "name": "C++", "bytes": "1285031" }, { "name": "Inno Setup", "bytes": "11180" }, { "name": "M4", "bytes": "103958" }, { "name": "Makefile", "bytes": "9507" }, { "name": "Objective-C", "bytes": "16519" }, { "name": "Objective-C++", "bytes": "14681" }, { "name": "Python", "bytes": "6594" }, { "name": "Ruby", "bytes": "292" }, { "name": "Shell", "bytes": "11982" } ], "symlink_target": "" }
<?php /** * Class for HTML code creation * * @package Framework * @subpackage View */ class html { protected $tagname; protected $content; protected $attrib = array(); protected $allowed = array(); public static $doctype = 'xhtml'; public static $lc_tags = true; public static $common_attrib = array('id','class','style','title','align','unselectable','tabindex','role'); public static $containers = array('iframe','div','span','p','h1','h2','h3','ul','form','textarea','table','thead','tbody','tr','th','td','style','script'); public static $bool_attrib = array('checked','multiple','disabled','selected','autofocus','readonly'); /** * Constructor * * @param array $attrib Hash array with tag attributes */ public function __construct($attrib = array()) { if (is_array($attrib)) { $this->attrib = $attrib; } } /** * Return the tag code * * @return string The finally composed HTML tag */ public function show() { return self::tag($this->tagname, $this->attrib, $this->content, array_merge(self::$common_attrib, $this->allowed)); } /****** STATIC METHODS *******/ /** * Generic method to create a HTML tag * * @param string $tagname Tag name * @param array $attrib Tag attributes as key/value pairs * @param string $content Optinal Tag content (creates a container tag) * @param array $allowed List with allowed attributes, omit to allow all * * @return string The XHTML tag */ public static function tag($tagname, $attrib = array(), $content = null, $allowed = null) { if (is_string($attrib)) { $attrib = array('class' => $attrib); } $inline_tags = array('a','span','img'); $suffix = $attrib['nl'] || ($content && $attrib['nl'] !== false && !in_array($tagname, $inline_tags)) ? "\n" : ''; $tagname = self::$lc_tags ? strtolower($tagname) : $tagname; if (isset($content) || in_array($tagname, self::$containers)) { $suffix = $attrib['noclose'] ? $suffix : '</' . $tagname . '>' . $suffix; unset($attrib['noclose'], $attrib['nl']); return '<' . $tagname . self::attrib_string($attrib, $allowed) . '>' . $content . $suffix; } else { return '<' . $tagname . self::attrib_string($attrib, $allowed) . '>' . $suffix; } } /** * Return DOCTYPE tag of specified type * * @param string $type Document type (html5, xhtml, 'xhtml-trans, xhtml-strict) */ public static function doctype($type) { $doctypes = array( 'html5' => '<!DOCTYPE html>', 'xhtml' => '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">', 'xhtml-trans' => '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">', 'xhtml-strict' => '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">', ); if ($doctypes[$type]) { self::$doctype = preg_replace('/-\w+$/', '', $type); return $doctypes[$type]; } return ''; } /** * Derrived method for <div> containers * * @param mixed $attr Hash array with tag attributes or string with class name * @param string $cont Div content * * @return string HTML code * @see html::tag() */ public static function div($attr = null, $cont = null) { if (is_string($attr)) { $attr = array('class' => $attr); } return self::tag('div', $attr, $cont, array_merge(self::$common_attrib, array('onclick'))); } /** * Derrived method for <p> blocks * * @param mixed $attr Hash array with tag attributes or string with class name * @param string $cont Paragraph content * * @return string HTML code * @see html::tag() */ public static function p($attr = null, $cont = null) { if (is_string($attr)) { $attr = array('class' => $attr); } return self::tag('p', $attr, $cont, self::$common_attrib); } /** * Derrived method to create <img /> * * @param mixed $attr Hash array with tag attributes or string with image source (src) * * @return string HTML code * @see html::tag() */ public static function img($attr = null) { if (is_string($attr)) { $attr = array('src' => $attr); } return self::tag('img', $attr + array('alt' => ''), null, array_merge(self::$common_attrib, array('src','alt','width','height','border','usemap','onclick','onerror'))); } /** * Derrived method for link tags * * @param mixed $attr Hash array with tag attributes or string with link location (href) * @param string $cont Link content * * @return string HTML code * @see html::tag() */ public static function a($attr, $cont) { if (is_string($attr)) { $attr = array('href' => $attr); } return self::tag('a', $attr, $cont, array_merge(self::$common_attrib, array('href','target','name','rel','onclick','onmouseover','onmouseout','onmousedown','onmouseup'))); } /** * Derrived method for inline span tags * * @param mixed $attr Hash array with tag attributes or string with class name * @param string $cont Tag content * * @return string HTML code * @see html::tag() */ public static function span($attr, $cont) { if (is_string($attr)) { $attr = array('class' => $attr); } return self::tag('span', $attr, $cont, self::$common_attrib); } /** * Derrived method for form element labels * * @param mixed $attr Hash array with tag attributes or string with 'for' attrib * @param string $cont Tag content * * @return string HTML code * @see html::tag() */ public static function label($attr, $cont) { if (is_string($attr)) { $attr = array('for' => $attr); } return self::tag('label', $attr, $cont, array_merge(self::$common_attrib, array('for'))); } /** * Derrived method to create <iframe></iframe> * * @param mixed $attr Hash array with tag attributes or string with frame source (src) * * @return string HTML code * @see html::tag() */ public static function iframe($attr = null, $cont = null) { if (is_string($attr)) { $attr = array('src' => $attr); } return self::tag('iframe', $attr, $cont, array_merge(self::$common_attrib, array('src','name','width','height','border','frameborder','onload','allowfullscreen'))); } /** * Derrived method to create <script> tags * * @param mixed $attr Hash array with tag attributes or string with script source (src) * @param string $cont Javascript code to be placed as tag content * * @return string HTML code * @see html::tag() */ public static function script($attr, $cont = null) { if (is_string($attr)) { $attr = array('src' => $attr); } if ($cont) { if (self::$doctype == 'xhtml') $cont = "\n/* <![CDATA[ */\n" . $cont . "\n/* ]]> */\n"; else $cont = "\n" . $cont . "\n"; } return self::tag('script', $attr + array('type' => 'text/javascript', 'nl' => true), $cont, array_merge(self::$common_attrib, array('src','type','charset'))); } /** * Derrived method for line breaks * * @param array $attrib Associative arry with tag attributes * * @return string HTML code * @see html::tag() */ public static function br($attrib = array()) { return self::tag('br', $attrib); } /** * Create string with attributes * * @param array $attrib Associative array with tag attributes * @param array $allowed List of allowed attributes * * @return string Valid attribute string */ public static function attrib_string($attrib = array(), $allowed = null) { if (empty($attrib)) { return ''; } $allowed_f = array_flip((array)$allowed); $attrib_arr = array(); foreach ($attrib as $key => $value) { // skip size if not numeric if ($key == 'size' && !is_numeric($value)) { continue; } // ignore "internal" or empty attributes if ($key == 'nl' || $value === null) { continue; } // ignore not allowed attributes, except aria-* and data-* if (!empty($allowed)) { $is_data_attr = @substr_compare($key, 'data-', 0, 5) === 0; $is_aria_attr = @substr_compare($key, 'aria-', 0, 5) === 0; if (!$is_aria_attr && !$is_data_attr && !isset($allowed_f[$key])) { continue; } } // skip empty eventhandlers if (preg_match('/^on[a-z]+/', $key) && !$value) { continue; } // attributes with no value if (in_array($key, self::$bool_attrib)) { if ($value) { // @TODO: minimize attribute in non-xhtml mode $attrib_arr[] = $key . '="' . $key . '"'; } } else { $attrib_arr[] = $key . '="' . self::quote($value) . '"'; } } return count($attrib_arr) ? ' '.implode(' ', $attrib_arr) : ''; } /** * Convert a HTML attribute string attributes to an associative array (name => value) * * @param string Input string * * @return array Key-value pairs of parsed attributes */ public static function parse_attrib_string($str) { $attrib = array(); $regexp = '/\s*([-_a-z]+)=(["\'])??(?(2)([^\2]*)\2|(\S+?))/Ui'; preg_match_all($regexp, stripslashes($str), $regs, PREG_SET_ORDER); // convert attributes to an associative array (name => value) if ($regs) { foreach ($regs as $attr) { $attrib[strtolower($attr[1])] = html_entity_decode($attr[3] . $attr[4]); } } return $attrib; } /** * Replacing specials characters in html attribute value * * @param string $str Input string * * @return string The quoted string */ public static function quote($str) { static $flags; if (!$flags) { $flags = ENT_COMPAT; if (defined('ENT_SUBSTITUTE')) { $flags |= ENT_SUBSTITUTE; } } return @htmlspecialchars($str, $flags, RCUBE_CHARSET); } } /** * Class to create an HTML input field * * @package Framework * @subpackage View */ class html_inputfield extends html { protected $tagname = 'input'; protected $type = 'text'; protected $allowed = array( 'type','name','value','size','tabindex','autocapitalize','required', 'autocomplete','checked','onchange','onclick','disabled','readonly', 'spellcheck','results','maxlength','src','multiple','accept', 'placeholder','autofocus','pattern' ); /** * Object constructor * * @param array $attrib Associative array with tag attributes */ public function __construct($attrib = array()) { if (is_array($attrib)) { $this->attrib = $attrib; } if ($attrib['type']) { $this->type = $attrib['type']; } } /** * Compose input tag * * @param string $value Field value * @param array $attrib Additional attributes to override * * @return string HTML output */ public function show($value = null, $attrib = null) { // overwrite object attributes if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } // set value attribute if ($value !== null) { $this->attrib['value'] = $value; } // set type $this->attrib['type'] = $this->type; return parent::show(); } } /** * Class to create an HTML password field * * @package Framework * @subpackage View */ class html_passwordfield extends html_inputfield { protected $type = 'password'; } /** * Class to create an hidden HTML input field * * @package Framework * @subpackage View */ class html_hiddenfield extends html { protected $tagname = 'input'; protected $type = 'hidden'; protected $allowed = array('type','name','value','onchange','disabled','readonly'); protected $fields = array(); /** * Constructor * * @param array $attrib Named tag attributes */ public function __construct($attrib = null) { if (is_array($attrib)) { $this->add($attrib); } } /** * Add a hidden field to this instance * * @param array $attrib Named tag attributes */ public function add($attrib) { $this->fields[] = $attrib; } /** * Create HTML code for the hidden fields * * @return string Final HTML code */ public function show() { $out = ''; foreach ($this->fields as $attrib) { $out .= self::tag($this->tagname, array('type' => $this->type) + $attrib); } return $out; } } /** * Class to create HTML radio buttons * * @package Framework * @subpackage View */ class html_radiobutton extends html_inputfield { protected $type = 'radio'; /** * Get HTML code for this object * * @param string $value Value of the checked field * @param array $attrib Additional attributes to override * * @return string HTML output */ public function show($value = '', $attrib = null) { // overwrite object attributes if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } // set value attribute $this->attrib['checked'] = ((string)$value == (string)$this->attrib['value']); return parent::show(); } } /** * Class to create HTML checkboxes * * @package Framework * @subpackage View */ class html_checkbox extends html_inputfield { protected $type = 'checkbox'; /** * Get HTML code for this object * * @param string $value Value of the checked field * @param array $attrib Additional attributes to override * * @return string HTML output */ public function show($value = '', $attrib = null) { // overwrite object attributes if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } // set value attribute $this->attrib['checked'] = ((string)$value == (string)$this->attrib['value']); return parent::show(); } } /** * Class to create an HTML textarea * * @package Framework * @subpackage View */ class html_textarea extends html { protected $tagname = 'textarea'; protected $allowed = array('name','rows','cols','wrap','tabindex', 'onchange','disabled','readonly','spellcheck'); /** * Get HTML code for this object * * @param string $value Textbox value * @param array $attrib Additional attributes to override * * @return string HTML output */ public function show($value = '', $attrib = null) { // overwrite object attributes if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } // take value attribute as content if (empty($value) && !empty($this->attrib['value'])) { $value = $this->attrib['value']; } // make shure we don't print the value attribute if (isset($this->attrib['value'])) { unset($this->attrib['value']); } if (!empty($value) && empty($this->attrib['is_escaped'])) { $value = self::quote($value); } return self::tag($this->tagname, $this->attrib, $value, array_merge(self::$common_attrib, $this->allowed)); } } /** * Builder for HTML drop-down menus * Syntax:<pre> * // create instance. arguments are used to set attributes of select-tag * $select = new html_select(array('name' => 'fieldname')); * * // add one option * $select->add('Switzerland', 'CH'); * * // add multiple options * $select->add(array('Switzerland','Germany'), array('CH','DE')); * * // generate pulldown with selection 'Switzerland' and return html-code * // as second argument the same attributes available to instanciate can be used * print $select->show('CH'); * </pre> * * @package Framework * @subpackage View */ class html_select extends html { protected $tagname = 'select'; protected $options = array(); protected $allowed = array('name','size','tabindex','autocomplete', 'multiple','onchange','disabled','rel'); /** * Add a new option to this drop-down * * @param mixed $names Option name or array with option names * @param mixed $values Option value or array with option values * @param array $attrib Additional attributes for the option entry */ public function add($names, $values = null, $attrib = array()) { if (is_array($names)) { foreach ($names as $i => $text) { $this->options[] = array('text' => $text, 'value' => $values[$i]) + $attrib; } } else { $this->options[] = array('text' => $names, 'value' => $values) + $attrib; } } /** * Get HTML code for this object * * @param string $select Value of the selection option * @param array $attrib Additional attributes to override * * @return string HTML output */ public function show($select = array(), $attrib = null) { // overwrite object attributes if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } $this->content = "\n"; $select = (array)$select; foreach ($this->options as $option) { $attr = array( 'value' => $option['value'], 'selected' => (in_array($option['value'], $select, true) || in_array($option['text'], $select, true)) ? 1 : null); $option_content = $option['text']; if (empty($this->attrib['is_escaped'])) { $option_content = self::quote($option_content); } $this->content .= self::tag('option', $attr + $option, $option_content, array('value','label','class','style','title','disabled','selected')); } return parent::show(); } } /** * Class to build an HTML table * * @package Framework * @subpackage View */ class html_table extends html { protected $tagname = 'table'; protected $allowed = array('id','class','style','width','summary', 'cellpadding','cellspacing','border'); private $header = array(); private $rows = array(); private $rowindex = 0; private $colindex = 0; /** * Constructor * * @param array $attrib Named tag attributes */ public function __construct($attrib = array()) { $default_attrib = self::$doctype == 'xhtml' ? array('summary' => '', 'border' => '0') : array(); $this->attrib = array_merge($attrib, $default_attrib); if (!empty($attrib['tagname']) && $attrib['tagname'] != 'table') { $this->tagname = $attrib['tagname']; $this->allowed = self::$common_attrib; } } /** * Add a table cell * * @param array $attr Cell attributes * @param string $cont Cell content */ public function add($attr, $cont) { if (is_string($attr)) { $attr = array('class' => $attr); } $cell = new stdClass; $cell->attrib = $attr; $cell->content = $cont; $this->rows[$this->rowindex]->cells[$this->colindex] = $cell; $this->colindex += max(1, intval($attr['colspan'])); if ($this->attrib['cols'] && $this->colindex >= $this->attrib['cols']) { $this->add_row(); } } /** * Add a table header cell * * @param array $attr Cell attributes * @param string $cont Cell content */ public function add_header($attr, $cont) { if (is_string($attr)) { $attr = array('class' => $attr); } $cell = new stdClass; $cell->attrib = $attr; $cell->content = $cont; $this->header[] = $cell; } /** * Remove a column from a table * Useful for plugins making alterations * * @param string $class */ public function remove_column($class) { // Remove the header foreach ($this->header as $index=>$header){ if ($header->attrib['class'] == $class){ unset($this->header[$index]); break; } } // Remove cells from rows foreach ($this->rows as $i=>$row){ foreach ($row->cells as $j=>$cell){ if ($cell->attrib['class'] == $class){ unset($this->rows[$i]->cells[$j]); break; } } } } /** * Jump to next row * * @param array $attr Row attributes */ public function add_row($attr = array()) { $this->rowindex++; $this->colindex = 0; $this->rows[$this->rowindex] = new stdClass; $this->rows[$this->rowindex]->attrib = $attr; $this->rows[$this->rowindex]->cells = array(); } /** * Set row attributes * * @param array $attr Row attributes * @param int $index Optional row index (default current row index) */ public function set_row_attribs($attr = array(), $index = null) { if (is_string($attr)) { $attr = array('class' => $attr); } if ($index === null) { $index = $this->rowindex; } // make sure row object exists (#1489094) if (!$this->rows[$index]) { $this->rows[$index] = new stdClass; } $this->rows[$index]->attrib = $attr; } /** * Get row attributes * * @param int $index Row index * * @return array Row attributes */ public function get_row_attribs($index = null) { if ($index === null) { $index = $this->rowindex; } return $this->rows[$index] ? $this->rows[$index]->attrib : null; } /** * Build HTML output of the table data * * @param array $attrib Table attributes * * @return string The final table HTML code */ public function show($attrib = null) { if (is_array($attrib)) { $this->attrib = array_merge($this->attrib, $attrib); } $thead = $tbody = ""; // include <thead> if (!empty($this->header)) { $rowcontent = ''; foreach ($this->header as $c => $col) { $rowcontent .= self::tag($this->_head_tagname(), $col->attrib, $col->content); } $thead = $this->tagname == 'table' ? self::tag('thead', null, self::tag('tr', null, $rowcontent, parent::$common_attrib)) : self::tag($this->_row_tagname(), array('class' => 'thead'), $rowcontent, parent::$common_attrib); } foreach ($this->rows as $r => $row) { $rowcontent = ''; foreach ($row->cells as $c => $col) { $rowcontent .= self::tag($this->_col_tagname(), $col->attrib, $col->content); } if ($r < $this->rowindex || count($row->cells)) { $tbody .= self::tag($this->_row_tagname(), $row->attrib, $rowcontent, parent::$common_attrib); } } if ($this->attrib['rowsonly']) { return $tbody; } // add <tbody> $this->content = $thead . ($this->tagname == 'table' ? self::tag('tbody', null, $tbody) : $tbody); unset($this->attrib['cols'], $this->attrib['rowsonly']); return parent::show(); } /** * Count number of rows * * @return The number of rows */ public function size() { return count($this->rows); } /** * Remove table body (all rows) */ public function remove_body() { $this->rows = array(); $this->rowindex = 0; } /** * Getter for the corresponding tag name for table row elements */ private function _row_tagname() { static $row_tagnames = array('table' => 'tr', 'ul' => 'li', '*' => 'div'); return $row_tagnames[$this->tagname] ?: $row_tagnames['*']; } /** * Getter for the corresponding tag name for table row elements */ private function _head_tagname() { static $head_tagnames = array('table' => 'th', '*' => 'span'); return $head_tagnames[$this->tagname] ?: $head_tagnames['*']; } /** * Getter for the corresponding tag name for table cell elements */ private function _col_tagname() { static $col_tagnames = array('table' => 'td', '*' => 'span'); return $col_tagnames[$this->tagname] ?: $col_tagnames['*']; } }
{ "content_hash": "2a0964ccccdd7e59304a4c0691b0dd14", "timestamp": "", "source": "github", "line_count": 931, "max_line_length": 162, "avg_line_length": 28.10418904403867, "alnum_prop": 0.5229122874068411, "repo_name": "georgehristov/EPESI", "id": "8c2534a589df54a5d980655fa7d18547d6b06235", "size": "27146", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "modules/CRM/Roundcube/RC/program/lib/Roundcube/html.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "5692" }, { "name": "JavaScript", "bytes": "9609" }, { "name": "PHP", "bytes": "27619" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "479ac8365387552cbdd9730453becef0", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "5207213bef4e5bcd47f26a7544b44eccbdf1a789", "size": "194", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Malpighiaceae/Heteropterys/Heteropterys laurifolia/Heteropterys laurifolia antillana/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import { FloatingDrawer } from './variants/FloatingDrawer/FloatingDrawer'; export { FloatingDrawer };
{ "content_hash": "d25e4deffdff6f929b52a903764bc379", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 74, "avg_line_length": 34.333333333333336, "alnum_prop": 0.7766990291262136, "repo_name": "Talend/ui", "id": "421e7dcc90a0c5eaec2109400d5fc1824facbefa", "size": "103", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/design-system/src/components/WIP/Drawer/index.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "809" }, { "name": "Groovy", "bytes": "9150" }, { "name": "HTML", "bytes": "183895" }, { "name": "Java", "bytes": "338" }, { "name": "JavaScript", "bytes": "4781212" }, { "name": "SCSS", "bytes": "699775" }, { "name": "Shell", "bytes": "62" }, { "name": "TypeScript", "bytes": "1291286" } ], "symlink_target": "" }
FROM balenalib/imx7-var-som-debian:sid-run ENV NODE_VERSION 12.22.1 ENV YARN_VERSION 1.22.4 RUN buildDeps='curl libatomic1' \ && set -x \ && for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \ done \ && apt-get update && apt-get install -y $buildDeps --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && echo "1bc056e1fef1c83059235d927edea2c1a2eee91ce654f45369a2af95c041e198 node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh" \ && echo "Running test-stack@node" \ && chmod +x test-stack@node.sh \ && bash test-stack@node.sh \ && rm -rf test-stack@node.sh RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Sid \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.22.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
{ "content_hash": "77b13b4d171db761cbf2a5da42974bcd", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 689, "avg_line_length": 64.82222222222222, "alnum_prop": 0.7034624614329791, "repo_name": "nghiant2710/base-images", "id": "6be301a26e1d2e89b588709bd641e77dfb5b6e24", "size": "2938", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/node/imx7-var-som/debian/sid/12.22.1/run/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "144558581" }, { "name": "JavaScript", "bytes": "16316" }, { "name": "Shell", "bytes": "368690" } ], "symlink_target": "" }
import pika, json from config import config f = open("sample_queue_message.json", "r") sample_data = json.load(f) connection = pika.BlockingConnection(pika.ConnectionParameters( host=config["server"])) channel = connection.channel() channel.queue_declare(queue=config["queue"]) channel.basic_publish(exchange='', routing_key=config["queue"], body=json.dumps(sample_data)) print " [x] Sent ", json.dumps(sample_data) connection.close()
{ "content_hash": "e8126db38a50b247c6f85e0e998edf9a", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 63, "avg_line_length": 25.94736842105263, "alnum_prop": 0.6673427991886409, "repo_name": "adrianchifor/Flipcam-Backend", "id": "a6648fd60cef1b5842ca7f11a044d41748abb559", "size": "515", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "videoconcat/test_send.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "16015" }, { "name": "Python", "bytes": "2978" } ], "symlink_target": "" }
layout: post date: 2017-09-04 title: "Paloma Blanca Style 4412 Sleeveless Court Train Sheath/Column" category: Paloma Blanca tags: [Paloma Blanca,Sheath/Column,Sweetheart,Court Train,Sleeveless] --- ### Paloma Blanca Style 4412 Just **$339.99** ### Sleeveless Court Train Sheath/Column <table><tr><td>BRANDS</td><td>Paloma Blanca</td></tr><tr><td>Silhouette</td><td>Sheath/Column</td></tr><tr><td>Neckline</td><td>Sweetheart</td></tr><tr><td>Hemline/Train</td><td>Court Train</td></tr><tr><td>Sleeve</td><td>Sleeveless</td></tr></table> <a href="https://www.readybrides.com/en/paloma-blanca/513-paloma-blanca-style-4412.html"><img src="//img.readybrides.com/1735/paloma-blanca-style-4412.jpg" alt="Paloma Blanca Style 4412" style="width:100%;" /></a> <!-- break --><a href="https://www.readybrides.com/en/paloma-blanca/513-paloma-blanca-style-4412.html"><img src="//img.readybrides.com/1734/paloma-blanca-style-4412.jpg" alt="Paloma Blanca Style 4412" style="width:100%;" /></a> Buy it: [https://www.readybrides.com/en/paloma-blanca/513-paloma-blanca-style-4412.html](https://www.readybrides.com/en/paloma-blanca/513-paloma-blanca-style-4412.html)
{ "content_hash": "5532e9691a3749ea45115f737db14f38", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 250, "avg_line_length": 82.28571428571429, "alnum_prop": 0.7291666666666666, "repo_name": "HOLEIN/HOLEIN.github.io", "id": "1d5b0b28a60a7bccbdbfdc7459aff8fee9a3bdc4", "size": "1156", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2017-09-04-Paloma-Blanca-Style-4412-Sleeveless-Court-Train-SheathColumn.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "83876" }, { "name": "HTML", "bytes": "14547" }, { "name": "Ruby", "bytes": "897" } ], "symlink_target": "" }
set -e [[ -z $WORKSPACE ]] && WORKSPACE=`pwd` [[ -z $BOOTSTRAP ]] && BOOTSTRAP=false [[ -z $BASH_ENV ]] && BASH_ENV=`mktemp` [[ -z $USE_DOCKER ]] && USE_DOCKER=true set -u # Common definitions from latest bioconda-utils master have to be downloaded before setup.sh is executed. # This file can be used to set BIOCONDA_UTILS_TAG and MINICONDA_VER. source .circleci/common.sh # Set path echo "export PATH=$WORKSPACE/miniconda/bin:$PATH" >> $BASH_ENV source $BASH_ENV # Make sure the CircleCI config is up to date. # add upstream as some semi-randomly named temporary remote to diff against UPSTREAM_REMOTE=__upstream__$(mktemp -u XXXXXXXXXX) git remote add -t master $UPSTREAM_REMOTE https://github.com/bioconda/bioconda-recipes.git git fetch $UPSTREAM_REMOTE if ! git diff --quiet HEAD...$UPSTREAM_REMOTE/master -- .circleci/; then echo 'Your bioconda-recipes CI configuration is out of date.' echo 'Please update it to the latest version of the upstream master branch.' echo 'You can do this, e.g., by running:' echo ' git fetch https://github.com/bioconda/bioconda-recipes.git master' echo ' git merge FETCH_HEAD' exit 1 fi git remote remove $UPSTREAM_REMOTE if ! type bioconda-utils 2> /dev/null || [[ $BOOTSTRAP == "true" ]]; then echo "Setting up bioconda-utils..." # setup conda and bioconda-utils if not loaded from cache mkdir -p $WORKSPACE # step 1: download and install miniconda if [[ $OSTYPE == darwin* ]]; then tag="MacOSX" elif [[ $OSTYPE == linux* ]]; then tag="Linux" else echo "Unsupported OS: $OSTYPE" exit 1 fi curl -L -o miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-$MINICONDA_VER-$tag-x86_64.sh bash miniconda.sh -b -p $WORKSPACE/miniconda # step 2: setup channels $WORKSPACE/miniconda/bin/conda config --system --add channels defaults $WORKSPACE/miniconda/bin/conda config --system --add channels bioconda $WORKSPACE/miniconda/bin/conda config --system --add channels conda-forge # step 3: install bioconda-utils $WORKSPACE/miniconda/bin/conda install -y git pip --file https://raw.githubusercontent.com/bioconda/bioconda-utils/$BIOCONDA_UTILS_TAG/bioconda_utils/bioconda_utils-requirements.txt $WORKSPACE/miniconda/bin/pip install git+https://github.com/bioconda/bioconda-utils.git@$BIOCONDA_UTILS_TAG # step 4: configure local channel mkdir -p $WORKSPACE/miniconda/conda-bld/{noarch,linux-64,osx-64} $WORKSPACE/miniconda/bin/conda index $WORKSPACE/miniconda/conda-bld $WORKSPACE/miniconda/bin/conda config --system --add channels file://$WORKSPACE/miniconda/conda-bld # step 5: cleanup $WORKSPACE/miniconda/bin/conda clean -y --all rm miniconda.sh fi # Fetch the master branch for comparison (this can fail locally, if git remote # is configured via ssh and this is executed in a container). if [[ $BOOTSTRAP != "true" ]]; then git fetch origin +master:master || true fi
{ "content_hash": "276c967ec573ca99dcb115a512465b33", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 185, "avg_line_length": 40.28378378378378, "alnum_prop": 0.7081516269708151, "repo_name": "abims-sbr/bioconda-recipes", "id": "6c2677c33d077358578a0d82128d458ec907975b", "size": "2993", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": ".circleci/setup.sh", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "237" }, { "name": "C", "bytes": "102020" }, { "name": "Java", "bytes": "286" }, { "name": "M4", "bytes": "726" }, { "name": "Perl", "bytes": "99619" }, { "name": "Perl 6", "bytes": "23942" }, { "name": "Prolog", "bytes": "1044" }, { "name": "Python", "bytes": "371349" }, { "name": "Roff", "bytes": "996" }, { "name": "Shell", "bytes": "1493906" } ], "symlink_target": "" }
namespace EasyCpp { namespace VFS { class DLL_EXPORT OSVFSProvider : public VFSProvider { public: OSVFSProvider(const std::string& base); virtual ~OSVFSProvider(); // Geerbt über VFSProvider virtual bool ready() override; virtual bool exists(const Path & p) override; virtual void remove(const Path & p) override; virtual void rename(const Path & p, const Path & target) override; virtual std::vector<Path> getFiles(const Path & p) override; virtual InputOutputStreamPtr openIO(const Path& path) override; virtual InputStreamPtr openInput(const Path& path) override; virtual OutputStreamPtr openOutput(const Path& path) override; static std::string getCurrentWorkingDirectory(); private: std::string _base; }; } }
{ "content_hash": "f41685fafd88605db8487bff39b6c23d", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 69, "avg_line_length": 29.5, "alnum_prop": 0.7288135593220338, "repo_name": "Thalhammer/EasyCpp", "id": "df896cb0c3ea71f84ebb838c71f8f8fd89666a36", "size": "838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "EasyCpp/VFS/OSVFSProvider/OSVFSProvider.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "351" }, { "name": "C++", "bytes": "677359" }, { "name": "Makefile", "bytes": "5900" } ], "symlink_target": "" }
namespace SharpArch.NHibernate.FluentNHibernate { using System; using global::FluentNHibernate.Automapping; public interface IAutoPersistenceModelGenerator { AutoPersistenceModel Generate(); } }
{ "content_hash": "ba4d3883555436cc96be0fb2b4385587", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 51, "avg_line_length": 20.363636363636363, "alnum_prop": 0.7410714285714286, "repo_name": "libinqi/Sharp-Architecture", "id": "afbfbdcadc07875fea13e94f4e19c156f75038b2", "size": "224", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "Solutions/SharpArch.NHibernate/FluentNHibernate/IAutoPersistenceModelGenerator.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "867" }, { "name": "C#", "bytes": "347556" }, { "name": "HTML", "bytes": "6020" }, { "name": "PowerShell", "bytes": "236" }, { "name": "Shell", "bytes": "7609" } ], "symlink_target": "" }
module Less::Js::Routes class Railtie < ::Rails::Railtie rake_tasks do load File.dirname(__FILE__) + '/../tasks/less_js_routes.tasks' end end end
{ "content_hash": "07e91e775fa450f8e32fe6728634a046", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 68, "avg_line_length": 23.285714285714285, "alnum_prop": 0.6319018404907976, "repo_name": "stevenbristol/less-js-routes", "id": "314d3b8719a95623804bbf26ace6c88c48275851", "size": "181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/less-js-routes/railtie.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "10107" } ], "symlink_target": "" }
package com.xeiam.xchange.examples.cavirtex.marketdata; import java.io.IOException; import com.xeiam.xchange.Exchange; import com.xeiam.xchange.ExchangeFactory; import com.xeiam.xchange.currency.CurrencyPair; import com.xeiam.xchange.dto.marketdata.OrderBook; import com.xeiam.xchange.service.polling.PollingMarketDataService; import com.xeiam.xchange.virtex.VirtExExchange; import com.xeiam.xchange.virtex.dto.marketdata.VirtExDepth; import com.xeiam.xchange.virtex.service.polling.VirtExMarketDataServiceRaw; /** * Demonstrate requesting Order Book at VirtEx */ public class DepthDemo { public static void main(String[] args) throws IOException { // Use the factory to get the VirtEx exchange API using default settings Exchange cavirtex = ExchangeFactory.INSTANCE.createExchange(VirtExExchange.class.getName()); // Interested in the public polling market data feed (no authentication) PollingMarketDataService marketDataService = cavirtex.getPollingMarketDataService(); generic(marketDataService); raw((VirtExMarketDataServiceRaw) marketDataService); } private static void generic(PollingMarketDataService marketDataService) throws IOException { // Get the latest order book data for BTC/CAD OrderBook orderBook = marketDataService.getOrderBook(CurrencyPair.BTC_CAD); System.out.println("Current Order Book size for BTC / CAD: " + (orderBook.getAsks().size() + orderBook.getBids().size())); System.out.println("First Ask: " + orderBook.getAsks().get(0).toString()); System.out.println("First Bid: " + orderBook.getBids().get(0).toString()); System.out.println(orderBook.toString()); } private static void raw(VirtExMarketDataServiceRaw marketDataService) throws IOException { // Get the latest order book data for BTC/CAD VirtExDepth orderBook = marketDataService.getVirtExOrderBook("CAD"); System.out.println("Current Order Book size for BTC / CAD: " + (orderBook.getAsks().size() + orderBook.getBids().size())); System.out.println("Last Ask: " + orderBook.getAsks().get(0)[0].toString()); System.out.println("Last Bid: " + orderBook.getBids().get(0)[0].toString()); System.out.println(orderBook.toString()); } }
{ "content_hash": "a1d6d06886a663e6afc2c6ea90c6750c", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 126, "avg_line_length": 36.47540983606557, "alnum_prop": 0.7555056179775281, "repo_name": "habibmasuro/XChange", "id": "fae4bba0b851e63e0c77b158c835e45faac4781a", "size": "3363", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "xchange-examples/src/main/java/com/xeiam/xchange/examples/cavirtex/marketdata/DepthDemo.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "2762129" } ], "symlink_target": "" }
package com.adamroughton.concentus.cluster; import static org.junit.Assert.fail; import java.util.concurrent.TimeUnit; import org.apache.zookeeper.data.Stat; import com.adamroughton.concentus.cluster.ClusterUtil.DataComparisonDelegate; import com.adamroughton.concentus.data.cluster.kryo.ClusterState; import com.adamroughton.concentus.data.cluster.kryo.StateEntry; import com.adamroughton.concentus.util.Container; import com.adamroughton.concentus.util.Util; import com.esotericsoftware.kryo.Kryo; import com.google.monitoring.runtime.instrumentation.common.com.google.common.base.Objects; import com.netflix.curator.framework.CuratorFramework; public class HelperMethods { /* * * Service State methods * */ public static <TState extends Enum<TState> & ClusterState> int signalState(CuratorFramework client, Kryo kryo, String servicePath, Class<TState> stateType, TState newState, Object data) throws Exception { String signalPath = CorePath.SERVICE_STATE_SIGNAL.getAbsolutePath(servicePath); Stat signalStat = client.checkExists().forPath(signalPath); int version; if (signalStat == null) { version = -1; } else { version = signalStat.getVersion(); } StateEntry<TState> signalEntry = new StateEntry<>(stateType, newState, data, version + 1); byte[] signalEntryBytes = Util.toKryoBytes(kryo, signalEntry); if (signalStat == null) { client.create().creatingParentsIfNeeded().forPath(signalPath, signalEntryBytes); } else { client.setData().forPath(signalPath, signalEntryBytes); } return version + 1; } public static <TState extends Enum<TState> & ClusterState> void setServiceState(CuratorFramework client, Kryo kryo, String servicePath, Class<TState> stateType, TState newState, int version, Object data) throws Exception { String statePath = CorePath.SERVICE_STATE.getAbsolutePath(servicePath); ClusterUtil.ensurePathCreated(client, statePath); StateEntry<TState> stateEntry = new StateEntry<TState>(stateType, newState, data, version); byte[] stateEntryBytes = Util.toKryoBytes(kryo, stateEntry); client.setData().forPath(statePath, stateEntryBytes); } @SuppressWarnings("unchecked") public static <TState extends Enum<TState> & ClusterState> StateEntry<TState> getServiceState(CuratorFramework client, Kryo kryo, String servicePath, Class<TState> expectedStateType) throws Exception { String statePath = CorePath.SERVICE_STATE.getAbsolutePath(servicePath); ClusterUtil.ensurePathCreated(client, statePath); byte[] stateEntryBytes = client.getData().forPath(statePath); StateEntry<?> stateEntryObj = Util.fromKryoBytes(kryo, stateEntryBytes, StateEntry.class); if (stateEntryObj == null) return null; Class<?> actualStateType = stateEntryObj.stateType(); if (!expectedStateType.equals(actualStateType)) { fail("The state type " + actualStateType.getCanonicalName() + " did not match the expected type " + expectedStateType.getCanonicalName() + "."); } return (StateEntry<TState>) stateEntryObj; } public static <TState extends Enum<TState> & ClusterState> void waitForState(CuratorFramework client, Kryo kryo, ExceptionCallback exCallback, String servicePath, TState state, long timeout, TimeUnit unit) throws Throwable { waitForState(client, kryo, exCallback, servicePath, state, Object.class, timeout, unit); } public static <TState extends Enum<TState> & ClusterState, TData> TData waitForState(CuratorFramework client, final Kryo kryo, ExceptionCallback exCallback, String servicePath, TState state, final Class<TData> expectedStateDataType, long timeout, TimeUnit unit) throws Throwable { final Container<TData> dataContainer = new Container<>(); DataComparisonDelegate<TState> compDelegate = new DataComparisonDelegate<TState>() { @Override public boolean matches(TState expected, byte[] actual) { StateEntry<?> stateEntryObj = Util.fromKryoBytes(kryo, actual, StateEntry.class); boolean isState = Objects.equal(expected, stateEntryObj.getState()); if (isState && stateEntryObj != null) { dataContainer.set(stateEntryObj.getStateData(expectedStateDataType)); } return isState; } }; String statePath = CorePath.SERVICE_STATE.getAbsolutePath(servicePath); if (!ClusterUtil.waitForData(client, statePath, state, compDelegate, timeout, unit)) { exCallback.throwAnyExceptions(); fail("Timed out waiting for state to be set"); } return dataContainer.get(); } }
{ "content_hash": "2add16c1c81b6a9ed11f372b4ce1e4b7", "timestamp": "", "source": "github", "line_count": 105, "max_line_length": 131, "avg_line_length": 42.647619047619045, "alnum_prop": 0.7646270656543099, "repo_name": "adam-roughton/Concentus", "id": "7fd147304042bff35fd77e98cd0a2806910fb855", "size": "4478", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "Core/src/test/java/com/adamroughton/concentus/cluster/HelperMethods.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "6054" }, { "name": "Java", "bytes": "1225001" }, { "name": "Scala", "bytes": "41560" }, { "name": "Shell", "bytes": "1373" } ], "symlink_target": "" }
using System; using System.Net; using System.Net.Http; using System.Web.Http; using Hl7.Fhir.Model; using Spark.Engine.Core; using Spark.Engine.Extensions; namespace Spark.Engine.ExceptionHandling { public class ExceptionResponseMessageFactory : IExceptionResponseMessageFactory { private SparkException ex; public HttpResponseMessage GetResponseMessage(Exception exception, HttpRequestMessage request) { if (exception == null) return null; HttpResponseMessage response = null; response = InternalCreateHttpResponseMessage(exception as SparkException, request) ?? InternalCreateHttpResponseMessage(exception as HttpResponseException, request) ?? InternalCreateHttpResponseMessage(exception, request); return response; } private HttpResponseMessage InternalCreateHttpResponseMessage(SparkException exception, HttpRequestMessage request) { if (exception == null) return null; OperationOutcome outcome = exception.Outcome ?? new OperationOutcome(); outcome.AddAllInnerErrors(exception); return request.CreateResponse(exception.StatusCode, outcome); ; } private HttpResponseMessage InternalCreateHttpResponseMessage(HttpResponseException exception, HttpRequestMessage request) { if (exception == null) return null; OperationOutcome outcome = new OperationOutcome().AddError(exception.Response.ReasonPhrase); return request.CreateResponse(exception.Response.StatusCode, outcome); } private HttpResponseMessage InternalCreateHttpResponseMessage(Exception exception, HttpRequestMessage request) { if (exception == null) return null; OperationOutcome outcome = new OperationOutcome().AddAllInnerErrors(exception); return request.CreateResponse(HttpStatusCode.InternalServerError, outcome); } } }
{ "content_hash": "c72ad06a2df6d769f5d7464abe3d9da1", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 130, "avg_line_length": 37.125, "alnum_prop": 0.6854256854256854, "repo_name": "aerosonik/SparkValidation", "id": "b631a3be28dee8d372317faaff61f29b22ae4a66", "size": "2081", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Spark.Engine/ExceptionHandling/ExceptionResponseMessageFactory.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "99" }, { "name": "Batchfile", "bytes": "49" }, { "name": "C#", "bytes": "912293" }, { "name": "CSS", "bytes": "5040" }, { "name": "HTML", "bytes": "5069" }, { "name": "JavaScript", "bytes": "136201" }, { "name": "XSLT", "bytes": "10021" } ], "symlink_target": "" }
using System; //Created by Schwartz namespace DesktopBattle { /// <summary> /// An exception that is generated when an element is accessed /// or removed from an array based collection that is already empty. /// </summary> public class UnderflowException : ApplicationException { public UnderflowException(string msg) : base(msg) { } } }
{ "content_hash": "7a8bdae46760a97c593055f8aabcc0a7", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 72, "avg_line_length": 30.916666666666668, "alnum_prop": 0.6873315363881402, "repo_name": "PxlBuzzard/Desktop-Battle", "id": "2cb1cef2ca900ad92795a35729f1c898073546ae", "size": "373", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Apparition/Apparition/DataStructs/UnderflowException.cs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C#", "bytes": "107229" } ], "symlink_target": "" }
'use strict'; // This script depends on the following scripts: // /fs/resources/messaging-helpers.js // /fs/resources/messaging-blob-helpers.js // /fs/resources/messaging-serialize-helpers.js // /fs/resources/test-helpers.js // /common/get-host-info.sub.js // /service-workers/service-worker/resources/test-helpers.sub.js // Define URL constants for cross origin windows. const kRemoteOrigin = get_host_info().HTTPS_REMOTE_ORIGIN; const kRemoteOriginDocumentMessageTarget = `${kRemoteOrigin}${base_path()}` + kDocumentMessageTarget; // Sending a FileSystemHandle to a cross origin |target| through postMessage() // must dispatch the 'messageerror' event. // // This test sends a FileSystemHandle to |target|. |target| responds with a // serialized MessageEvent from the 'messageerror' event, allowing the test // runner to verify MessageEvent properties. async function do_send_message_error_test( test, root_dir, receiver, target, target_origin, // False when the MessageEvent's source is null. expected_has_source, // The origin of MessageEvents received by |target|. expected_origin) { const message_watcher = new EventWatcher(test, receiver, 'message'); // Send a file to |target|. const file = await createFileWithContents( test, 'test-error-file', 'test-error-file-contents', root_dir); target.postMessage( { type: 'receive-file-system-handles', cloned_file_system_handles: [file] }, { targetOrigin: target_origin }); // Wait for |target| to respond with results. let message_event = await message_watcher.wait_for('message'); const first_response = message_event.data; assert_equals(first_response.type, 'serialized-message-error', 'The test runner must receive a "serialized-message-error" message ' + 'in response to a FileSystemFileHandle message.'); // Verify the results. assert_equals_serialized_message_error_event( first_response.serialized_message_error_event, expected_origin, expected_has_source); // Send a directory to |target|. const directory = await createDirectory( test, 'test-error-directory', root_dir); target.postMessage( { type: 'receive-file-system-handles', cloned_file_system_handles: [directory] }, { targetOrigin: target_origin }); // Wait for |target| to respond with results. message_event = await message_watcher.wait_for('message'); const second_response = message_event.data; assert_equals(second_response.type, 'serialized-message-error', 'The test runner must receive a "serialized-message-error" message ' + 'response to a FileSystemDirectoryHandle message.'); // Verify the results. assert_equals_serialized_message_error_event( second_response.serialized_message_error_event, expected_origin, expected_has_source); } // This test receives a FileSystemHandle from |target|. This test runner // must dispatch the 'messageerror' event after receiving a handle from target. async function do_receive_message_error_test( test, receiver, target, target_origin, // False when the MessageEvent's source is null. expected_has_source, // The origin of MessageEvents received by this test runner. expected_origin) { const error_watcher = new EventWatcher(test, receiver, 'messageerror'); // Receive a file from |target|. target.postMessage( { type: 'create-file' }, { targetOrigin: target_origin }); const first_error = await error_watcher.wait_for('messageerror'); const serialized_first_error = serialize_message_error_event(first_error); assert_equals_serialized_message_error_event( serialized_first_error, expected_origin, expected_has_source); // Receive a directory from |target|. target.postMessage( { type: 'create-directory' }, { targetOrigin: target_origin }); const second_error = await error_watcher.wait_for('messageerror'); const serialized_second_error = serialize_message_error_event(second_error); assert_equals_serialized_message_error_event( serialized_second_error, expected_origin, expected_has_source); } // Performs the send message error test followed by the receive message error // test. async function do_send_and_receive_message_error_test( test, root_dir, receiver, target, target_origin, // False when the MessageEvent's source is null. expected_has_source, // The origin of MessageEvents received by |target|. expected_origin, // The origin of MessageEvents received by this test runner. expected_remote_origin) { await do_send_message_error_test( test, root_dir, receiver, target, target_origin, expected_has_source, expected_origin); await do_receive_message_error_test( test, receiver, target, target_origin, expected_has_source, expected_remote_origin); } // Runs the same test as do_send_message_error_test(), but uses a MessagePort. // This test starts by establishing a message channel between the test runner // and |target|. async function do_send_message_port_error_test( test, root_dir, target, target_origin) { const message_port = create_message_channel(target, target_origin); await do_send_message_error_test( test, root_dir, /*receiver=*/message_port, /*target=*/message_port, /*target_origin=*/undefined, /*expected_has_source=*/false, /*expected_origin=*/'', /*expected_remote_origin=*/''); } // Runs the same test as do_receive_message_error_test(), but uses a MessagePort. async function do_receive_message_port_error_test( test, target, target_origin) { const message_port = create_message_channel(target, target_origin); await do_receive_message_error_test( test, /*receiver=*/message_port, /*target=*/message_port, /*target_origin=*/undefined, /*expected_has_source=*/false, /*expected_origin=*/''); } // Runs the same test as do_send_and_receive_message_error_test(), but uses a // MessagePort. async function do_send_and_receive_message_port_error_test( test, root_dir, target, target_origin) { await do_send_message_port_error_test( test, root_dir, target, target_origin); await do_receive_message_port_error_test( test, target, target_origin); } directory_test(async (t, root_dir) => { const iframe = await add_iframe( t, { src: kRemoteOriginDocumentMessageTarget }); await do_send_and_receive_message_error_test( t, root_dir, /*receiver=*/self, /*target=*/iframe.contentWindow, /*target_origin=*/'*', /*expected_has_source=*/true, /*expected_origin=*/location.origin, /*expected_remote_origin=*/kRemoteOrigin); }, 'Fail to send and receive messages using a cross origin iframe.'); directory_test(async (t, root_dir) => { const iframe = await add_iframe(t, { src: kRemoteOriginDocumentMessageTarget }); await do_send_and_receive_message_port_error_test( t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*'); }, 'Fail to send and receive messages using a cross origin message port in ' + 'an iframe.'); directory_test(async (t, root_dir) => { const iframe = await add_iframe( t, { src: kDocumentMessageTarget, sandbox: 'allow-scripts' }); await do_send_message_error_test( t, root_dir, /*receiver=*/self, /*target=*/iframe.contentWindow, /*target_origin=*/'*', /*expected_has_source*/true, /*expected_origin=*/location.origin); }, 'Fail to send to a sandboxed iframe.'); directory_test(async (t, root_dir) => { const iframe = await add_iframe( t, { src: kDocumentMessageTarget, sandbox: 'allow-scripts' }); await do_send_message_port_error_test( t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*'); }, 'Fail to send messages using a message port to a sandboxed ' + 'iframe.'); directory_test(async (t, root_dir) => { const iframe_data_uri = await create_message_target_data_uri(t); const iframe = await add_iframe(t, { src: iframe_data_uri }); await do_send_message_error_test(t, root_dir, /*receiver=*/self, /*target=*/iframe.contentWindow, /*target_origin=*/'*', /*expected_has_source*/true, /*expected_origin=*/location.origin); // Do not test receiving FileSystemHandles from the data URI iframe. Data URI // iframes are insecure and do not expose the File System APIs. }, 'Fail to send messages to a data URI iframe.'); directory_test(async (t, root_dir) => { const iframe_data_uri = await create_message_target_data_uri(t); const iframe = await add_iframe(t, { src: iframe_data_uri }); await do_send_message_port_error_test( t, root_dir, /*target=*/iframe.contentWindow, /*target_origin=*/'*'); }, 'Fail to send messages using a message port in a data URI iframe.'); directory_test(async (t, root_dir) => { const child_window = await open_window(t, kRemoteOriginDocumentMessageTarget); await do_send_and_receive_message_error_test( t, root_dir, /*receiver=*/self, /*target=*/child_window, /*target_origin=*/'*', /*expected_has_source=*/true, /*expected_origin=*/location.origin, /*expected_remote_origin=*/kRemoteOrigin); }, 'Fail to send and receive messages using a cross origin window.'); directory_test(async (t, root_dir) => { const child_window = await open_window(t, kRemoteOriginDocumentMessageTarget); await do_send_message_port_error_test( t, root_dir, /*target=*/child_window, /*target_origin=*/'*'); }, 'Fail to send and receive messages using a cross origin message port in ' + 'a window.'); directory_test(async (t, root_dir) => { const url = `${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` + ', sandbox allow-scripts)'; const child_window = await open_window(t, url); await do_send_message_error_test( t, root_dir, /*receiver=*/self, /*target=*/child_window, /*target_origin=*/'*', /*expected_has_source*/true, /*expected_origin=*/location.origin); }, 'Fail to send messages to a sandboxed window.'); directory_test(async (t, root_dir) => { const url = `${kDocumentMessageTarget}?pipe=header(Content-Security-Policy` + ', sandbox allow-scripts)'; const child_window = await open_window(t, url); await do_send_message_port_error_test( t, root_dir, /*target=*/child_window, /*target_origin=*/'*'); }, 'Fail to send messages using a message port to a sandboxed ' + 'window.');
{ "content_hash": "19696780f32b67f7f40564c7fa168375", "timestamp": "", "source": "github", "line_count": 244, "max_line_length": 83, "avg_line_length": 41.83196721311475, "alnum_prop": 0.7101009111394141, "repo_name": "nwjs/chromium.src", "id": "7c97a7da484e3fca54d458d6b1fde4bf7c216406", "size": "10207", "binary": false, "copies": "13", "ref": "refs/heads/nw70", "path": "third_party/blink/web_tests/external/wpt/fs/script-tests/FileSystemBaseHandle-postMessage-Error.js", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
A structure for user input. For generic event programming, use [event](https://github.com/pistondevelopers/event). This library contains: * Press and release of buttons * Keyboard * Mouse * Unicode input * Window resize * Window focus * Modifier keys [How to contribute](https://github.com/PistonDevelopers/piston/blob/master/CONTRIBUTING.md)
{ "content_hash": "99ce87a01446efdc739e963200a2a04a", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 91, "avg_line_length": 23.133333333333333, "alnum_prop": 0.7752161383285303, "repo_name": "Potpourri/piston", "id": "331579a470fea7b4f2e96f6c10866f5eaa832b34", "size": "581", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "src/input/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Rust", "bytes": "85877" } ], "symlink_target": "" }
import uuid import requests from modularodm import Q from modularodm.exceptions import ModularOdmException from framework.auth import Auth from website import security from website import settings from website.project import new_node from website.models import User, Node, MailRecord def record_message(message, created): record = MailRecord( data=message.raw, records=created, ) record.save() def get_or_create_user(fullname, address, is_spam): """Get or create user by email address. :param str fullname: User full name :param str address: User email address :param bool is_spam: User flagged as potential spam :return: Tuple of (user, created) """ try: user = User.find_one(Q('username', 'iexact', address)) return user, False except ModularOdmException: password = str(uuid.uuid4()) user = User.create_confirmed(address, password, fullname) user.verification_key = security.random_string(20) if is_spam: user.system_tags.append('is_spam') user.save() return user, True def get_or_create_node(title, user): """Get or create node by title and creating user. :param str title: Node title :param User user: User creating node :return: Tuple of (node, created) """ try: node = Node.find_one( Q('title', 'iexact', title) & Q('contributors', 'eq', user._id) ) return node, False except ModularOdmException: node = new_node('project', title, user) return node, True def provision_node(conference, message, node, user): """ :param Conference conference: :param ConferenceMessage message: :param Node node: :param User user: """ auth = Auth(user=user) node.update_node_wiki('home', message.text, auth) node.add_contributors(prepare_contributors(conference.admins), log=False) if not message.is_spam and conference.public_projects: node.set_privacy('public', auth=auth) node.add_tag(message.conference_name, auth=auth) node.add_tag(message.conference_category, auth=auth) node.system_tags.extend(['emailed', message.conference_name, message.conference_category]) if message.is_spam: node.system_tags.append('spam') node.save() def prepare_contributors(admins): return [ { 'user': admin, 'permissions': ['read', 'write', 'admin'], 'visible': False, } for admin in admins ] def upload_attachment(user, node, attachment): from website.addons.osfstorage import utils as storage_utils attachment.seek(0) name = attachment.filename or settings.MISSING_FILE_NAME content = attachment.read() upload_url = storage_utils.get_waterbutler_upload_url(user, node, path=name) requests.put( upload_url, data=content, ) def upload_attachments(user, node, attachments): for attachment in attachments: upload_attachment(user, node, attachment)
{ "content_hash": "5469b915ed9d2f03a1aa81bf6bd97b6b", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 94, "avg_line_length": 27.64864864864865, "alnum_prop": 0.6539589442815249, "repo_name": "himanshuo/osf.io", "id": "5360c49beccfaef409433677f6f06c85f2c53271", "size": "3094", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "website/conferences/utils.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "78345" }, { "name": "HTML", "bytes": "34188" }, { "name": "JavaScript", "bytes": "885345" }, { "name": "Mako", "bytes": "442634" }, { "name": "Python", "bytes": "2536134" }, { "name": "Shell", "bytes": "234" } ], "symlink_target": "" }
using Chloe.Server.Models; namespace Chloe.Server.Dtos { public class DistributionListAddOrUpdateResponseDto: DistributionListDto { public DistributionListAddOrUpdateResponseDto(DistributionList entity) :base(entity) { } } }
{ "content_hash": "0d90933d3360800ae68293d225a297b8", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 78, "avg_line_length": 21.153846153846153, "alnum_prop": 0.6909090909090909, "repo_name": "QuinntyneBrown/azure-search-getting-started", "id": "2c64779f47dfbb9125d7639dd2784d1baf45fc1d", "size": "275", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Chloe/Server/Dtos/DistributionListAddOrUpdateResponseDto.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "536816" }, { "name": "CSS", "bytes": "21945" }, { "name": "HTML", "bytes": "105717" }, { "name": "JavaScript", "bytes": "140796" }, { "name": "TypeScript", "bytes": "626935" } ], "symlink_target": "" }
// // sqSqueakIPhoneInfoPlistInterface // SqueakNoOGLIPhone // // Created by John M McIntosh on 9/1/08. // #import "sqSqueakIPhoneInfoPlistInterface.h" NSString * kwriteable_preferenceKey = @"writeable_preference"; NSString * kscrollableView_preferenceKey = @"scrollableView_preference"; NSString * kmemorySize_preferenceKey = @"memorySize_preference"; extern int gSqueakUseFileMappedMMAP; @implementation sqSqueakIPhoneInfoPlistInterface - (void) parseInfoPlist { NSAutoreleasePool * pool = [NSAutoreleasePool new]; [super parseInfoPlist]; self.SqueakUseFileMappedMMAP = YES; gSqueakUseFileMappedMMAP = 1; NSString *testValue = [defaults stringForKey: kwriteable_preferenceKey]; if (testValue == nil) { // no default values have been set, create them here based on what's in our Settings bundle info // NSString *pathStr = [[NSBundle mainBundle] bundlePath]; NSString *settingsBundlePath = [pathStr stringByAppendingPathComponent:@"Settings.bundle"]; NSString *finalPath = [settingsBundlePath stringByAppendingPathComponent:@"Root.plist"]; NSDictionary *settingsDict = [NSDictionary dictionaryWithContentsOfFile:finalPath]; NSArray *prefSpecifierArray = [settingsDict objectForKey:@"PreferenceSpecifiers"]; NSDictionary *prefItem; NSString *writeable_preferenceDefault = @"YES"; NSString *scrollableView_preferenceDefault= @"NO"; NSString *memorySize_preferenceDefault=@"33554432"; for (prefItem in prefSpecifierArray) { NSString *keyValueStr = [prefItem objectForKey:@"Key"]; id defaultValue = [prefItem objectForKey:@"DefaultValue"]; if ([keyValueStr isEqualToString: kwriteable_preferenceKey]) { writeable_preferenceDefault = defaultValue; } if ([keyValueStr isEqualToString: kscrollableView_preferenceKey]) { scrollableView_preferenceDefault = defaultValue; } if ([keyValueStr isEqualToString: kmemorySize_preferenceKey]) { memorySize_preferenceDefault = defaultValue; } } // since no default values have been set (i.e. no preferences file created), create it here NSDictionary *appDefaults = [NSDictionary dictionaryWithObjectsAndKeys: writeable_preferenceDefault, kwriteable_preferenceKey, scrollableView_preferenceDefault, kscrollableView_preferenceKey, memorySize_preferenceDefault, kmemorySize_preferenceKey, nil]; [[NSUserDefaults standardUserDefaults] registerDefaults: appDefaults]; [[NSUserDefaults standardUserDefaults] synchronize]; } [pool drain]; } - (BOOL) imageIsWriteable { return [defaults boolForKey: kwriteable_preferenceKey]; } - (BOOL) useScrollingView { return [defaults boolForKey: kscrollableView_preferenceKey]; } - (NSInteger) memorySize { return [defaults integerForKey: kmemorySize_preferenceKey]; } @end
{ "content_hash": "f0a3b0ef385a8244eff384f2d99336aa", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 136, "avg_line_length": 34.57831325301205, "alnum_prop": 0.7439024390243902, "repo_name": "josephzizys/cog", "id": "5600113fe3e33aefb894df600e308778234fe9c0", "size": "4580", "binary": false, "copies": "2", "ref": "refs/heads/blessed", "path": "platforms/iOS/vm/iPhone/Classes/sqSqueakIPhoneInfoPlistInterface.m", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
package com.xjeffrose.xio.server; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; public class XioBootstrapTest { @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Test public void testStart() throws Exception { } @Test public void testStop() throws Exception { } @Test public void testGetBoundPorts() throws Exception { } @Test public void testGetXioMetrics() throws Exception { } }
{ "content_hash": "c352f25cd9d1f53c9893c836f0c5b6af", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 52, "avg_line_length": 13.575, "alnum_prop": 0.6979742173112339, "repo_name": "andyday/xio", "id": "55807caa95bcf457c28a16bfe88fe9e549fca2ed", "size": "543", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/test/java/com/xjeffrose/xio/server/XioBootstrapTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "990" }, { "name": "Java", "bytes": "537112" }, { "name": "Makefile", "bytes": "25638" }, { "name": "Python", "bytes": "36905" }, { "name": "Scala", "bytes": "1681" }, { "name": "Shell", "bytes": "23832" }, { "name": "Thrift", "bytes": "1583" } ], "symlink_target": "" }
<?php include 'head.php'; include 'header.php'; include 'navbar.php'; include 'presentation.php'; include 'tuiles.php'; include 'carousel.php'; include 'formulaire.php'; ?> </div> </div> </body> </html>
{ "content_hash": "fac74f4842f9331c53968a45530c13a0", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 31, "avg_line_length": 6.822222222222222, "alnum_prop": 0.4527687296416938, "repo_name": "Messant/Maquette_graphique_ffss45", "id": "263a6be240ebcd9fa04500ad2bd682630ea09a89", "size": "307", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "153343" }, { "name": "PHP", "bytes": "14086" } ], "symlink_target": "" }
title: "测试的依赖项" weight: 6 type: docs --- <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> # 用于测试的依赖项 Flink 提供了用于测试作业的实用程序,您可以将其添加为依赖项。 ## DataStream API 测试 如果要为使用 DataStream API 构建的作业开发测试用例,则需要添加以下依赖项: {{< artifact_tabs flink-test-utils withTestScope >}} 在各种测试实用程序中,该模块提供了 `MiniCluster` (一个可配置的轻量级 Flink 集群,能在 JUnit 测试中运行),可以直接执行作业。 有关如何使用这些实用程序的更多细节,请查看 [DataStream API 测试]({{< ref "docs/dev/datastream/testing" >}})。 ## Table API 测试 如果您想在您的 IDE 中本地测试 Table API 和 SQL 程序,除了前述提到的 `flink-test-utils` 之外,您还要添加以下依赖项: {{< artifact_tabs flink-table-test-utils withTestScope >}} 这将自动引入查询计划器和运行时,分别用于计划和执行查询。 {{< hint info >}} `flink-table-test-utils` 模块已在 Flink 1.15 中引入,目前被认为是实验性的。 {{< /hint >}}
{ "content_hash": "3158f797b647e73c6a2632e2104da55c", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 85, "avg_line_length": 29.541666666666668, "alnum_prop": 0.767277856135402, "repo_name": "xccui/flink", "id": "33e0e9e8f5a8ea23cefa0576781d4d2a513b5578", "size": "1882", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "docs/content.zh/docs/dev/configuration/testing.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "20596" }, { "name": "Batchfile", "bytes": "1863" }, { "name": "C", "bytes": "847" }, { "name": "Cython", "bytes": "138311" }, { "name": "Dockerfile", "bytes": "5579" }, { "name": "FreeMarker", "bytes": "100031" }, { "name": "GAP", "bytes": "139536" }, { "name": "HTML", "bytes": "188041" }, { "name": "HiveQL", "bytes": "213569" }, { "name": "Java", "bytes": "98600106" }, { "name": "JavaScript", "bytes": "7038" }, { "name": "Less", "bytes": "84321" }, { "name": "Makefile", "bytes": "5134" }, { "name": "Python", "bytes": "3206355" }, { "name": "Scala", "bytes": "10942352" }, { "name": "Shell", "bytes": "528784" }, { "name": "TypeScript", "bytes": "391270" }, { "name": "q", "bytes": "16671" } ], "symlink_target": "" }
<?php namespace CentralDB\Entity; use Doctrine\ORM\Mapping as ORM; /** * Menus * * @ORM\Table(name="menus") * @ORM\Entity */ class Menu { /** * @var integer * * @ORM\Column(name="menu_id", type="integer", precision=0, scale=0, nullable=false, unique=false) * @ORM\Id * @ORM\GeneratedValue(strategy="IDENTITY") */ private $menuId; /** * @var string * * @ORM\Column(name="menu_title", type="string", length=255, precision=0, scale=0, nullable=false, unique=false) */ private $menuTitle; /** * @var string * * @ORM\Column(name="menu_description", type="string", length=255, precision=0, scale=0, nullable=false, unique=false) */ private $menuDescription; /** * Get menuId * * @return integer */ public function getMenuId() { return $this->menuId; } /** * Set menuTitle * * @param string $menuTitle * @return Menus */ public function setMenuTitle($menuTitle) { $this->menuTitle = $menuTitle; return $this; } /** * Get menuTitle * * @return string */ public function getMenuTitle() { return $this->menuTitle; } /** * Set menuDescription * * @param string $menuDescription * @return Menus */ public function setMenuDescription($menuDescription) { $this->menuDescription = $menuDescription; return $this; } /** * Get menuDescription * * @return string */ public function getMenuDescription() { return $this->menuDescription; } }
{ "content_hash": "4d3f6a1ba44a2d7781ba67f036852534", "timestamp": "", "source": "github", "line_count": 94, "max_line_length": 122, "avg_line_length": 17.98936170212766, "alnum_prop": 0.5446481371969248, "repo_name": "zs425/accommfzf2", "id": "42a1cbeb62dc6808ec1b326956ba80e9dc16afd0", "size": "1691", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "module/CentralDB/src/CentralDB/Entity/Menu.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "47912" }, { "name": "ActionScript", "bytes": "15982" }, { "name": "CSS", "bytes": "641938" }, { "name": "JavaScript", "bytes": "3192659" }, { "name": "PHP", "bytes": "1812385" } ], "symlink_target": "" }
namespace TelerikAcademyLearningSystem.Core.Model { public class AdminUser { public string Username { get; set; } public string Password { get; set; } } }
{ "content_hash": "413819650caf859ff5a040882f8fbe01", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 50, "avg_line_length": 20.444444444444443, "alnum_prop": 0.6413043478260869, "repo_name": "plamenti/Telerik2015", "id": "3a2ab2038429dcdb940d0ce4b06fe039a06e5353", "size": "186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Automation/TALearningSystemTestingFramework/TelerikAcademyLearningSystem.Core/Model/AdminUser.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "2467" }, { "name": "C#", "bytes": "535857" }, { "name": "HTML", "bytes": "39617" }, { "name": "Python", "bytes": "48230" }, { "name": "Smalltalk", "bytes": "247052" } ], "symlink_target": "" }
@class DBTEAMGroupMemberInfo; @class DBTEAMGroupsMembersListResult; NS_ASSUME_NONNULL_BEGIN #pragma mark - API Object /// /// The `GroupsMembersListResult` struct. /// /// This class implements the `DBSerializable` protocol (serialize and /// deserialize instance methods), which is required for all Obj-C SDK API route /// objects. /// @interface DBTEAMGroupsMembersListResult : NSObject <DBSerializable, NSCopying> #pragma mark - Instance fields /// (no description). @property (nonatomic, readonly) NSArray<DBTEAMGroupMemberInfo *> *members; /// Pass the cursor into `groupsMembersListContinue` to obtain additional group /// members. @property (nonatomic, readonly, copy) NSString *cursor; /// Is true if there are additional group members that have not been returned /// yet. An additional call to `groupsMembersListContinue` can retrieve them. @property (nonatomic, readonly) NSNumber *hasMore; #pragma mark - Constructors /// /// Full constructor for the struct (exposes all instance variables). /// /// @param members (no description). /// @param cursor Pass the cursor into `groupsMembersListContinue` to obtain /// additional group members. /// @param hasMore Is true if there are additional group members that have not /// been returned yet. An additional call to `groupsMembersListContinue` can /// retrieve them. /// /// @return An initialized instance. /// - (instancetype)initWithMembers:(NSArray<DBTEAMGroupMemberInfo *> *)members cursor:(NSString *)cursor hasMore:(NSNumber *)hasMore; - (instancetype)init NS_UNAVAILABLE; @end #pragma mark - Serializer Object /// /// The serialization class for the `GroupsMembersListResult` struct. /// @interface DBTEAMGroupsMembersListResultSerializer : NSObject /// /// Serializes `DBTEAMGroupsMembersListResult` instances. /// /// @param instance An instance of the `DBTEAMGroupsMembersListResult` API /// object. /// /// @return A json-compatible dictionary representation of the /// `DBTEAMGroupsMembersListResult` API object. /// + (nullable NSDictionary<NSString *, id> *)serialize:(DBTEAMGroupsMembersListResult *)instance; /// /// Deserializes `DBTEAMGroupsMembersListResult` instances. /// /// @param dict A json-compatible dictionary representation of the /// `DBTEAMGroupsMembersListResult` API object. /// /// @return An instantiation of the `DBTEAMGroupsMembersListResult` object. /// + (DBTEAMGroupsMembersListResult *)deserialize:(NSDictionary<NSString *, id> *)dict; @end NS_ASSUME_NONNULL_END
{ "content_hash": "896982cb67d601ebf69a769ff8e71c7d", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 95, "avg_line_length": 30.853658536585368, "alnum_prop": 0.7450592885375494, "repo_name": "dropbox/dropbox-sdk-obj-c", "id": "7ff0242af54924d780e3adc9c15f1b418f20f034", "size": "2716", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Source/ObjectiveDropboxOfficial/Shared/Generated/ApiObjects/Team/Headers/DBTEAMGroupsMembersListResult.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1288" }, { "name": "CSS", "bytes": "6608" }, { "name": "Objective-C", "bytes": "18903013" }, { "name": "Python", "bytes": "7087" }, { "name": "Ruby", "bytes": "1819" }, { "name": "Shell", "bytes": "4527" } ], "symlink_target": "" }
package com.sanbeg.sdk.stripedecoration; import android.graphics.Canvas; import android.graphics.drawable.Drawable; import android.view.View; import androidx.recyclerview.widget.RecyclerView; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * Created by steve on 3/30/17. */ public class StripeDecorationTest { static final int PARENT_WIDTH = 123; static final int PARENT_HEIGHT = 234; static final int CHILD_HEIGHT = 12; static final int CHILD_WIDTH = 23; @Mock Drawable stripe; @Mock Drawable stripe2; @Mock Canvas c; @Mock View child0; @Mock View child1; @Mock View child2; @Mock View childv0; @Mock View childv1; @Mock View childv2; @Mock RecyclerView rv; @Mock RecyclerView horiz; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); when(rv.getChildCount()).thenReturn(3); when(rv.getWidth()).thenReturn(PARENT_WIDTH); when(rv.getChildAt(0)).thenReturn(child0); when(rv.getChildAt(1)).thenReturn(child1); when(rv.getChildAt(2)).thenReturn(child2); when(child0.getTop()).thenReturn(0); when(child0.getBottom()).thenReturn(CHILD_HEIGHT); when(child1.getTop()).thenReturn(CHILD_HEIGHT); when(child1.getBottom()).thenReturn(2 * CHILD_HEIGHT); when(child2.getTop()).thenReturn(2 * CHILD_HEIGHT); when(child2.getBottom()).thenReturn(3 * CHILD_HEIGHT); when(horiz.getChildCount()).thenReturn(3); when(horiz.getHeight()).thenReturn(PARENT_HEIGHT); when(horiz.getChildAt(0)).thenReturn(childv0); when(horiz.getChildAt(1)).thenReturn(childv1); when(horiz.getChildAt(2)).thenReturn(childv2); when(childv0.getLeft()).thenReturn(0); when(childv0.getRight()).thenReturn(CHILD_WIDTH); when(childv1.getLeft()).thenReturn(CHILD_WIDTH); when(childv1.getRight()).thenReturn(2 * CHILD_WIDTH); when(childv2.getLeft()).thenReturn(2 * CHILD_WIDTH); when(childv2.getRight()).thenReturn(3 * CHILD_WIDTH); } @Test public void testOnDrawTop() throws Exception { when(rv.getChildAdapterPosition(child0)).thenReturn(0); StripeDecoration decoration = new StripeDecoration(stripe); decoration.onDraw(c, rv, null); verify(stripe).setBounds(0, CHILD_HEIGHT, PARENT_WIDTH, 2 * CHILD_HEIGHT); verify(stripe).draw(c); } @Test public void testOnDrawTwo() throws Exception { when(rv.getChildAdapterPosition(child0)).thenReturn(0); StripeDecoration decoration = new StripeDecoration(stripe); decoration.setDrawables(stripe, stripe2); decoration.onDraw(c, rv, null); verify(stripe).setBounds(0, 0, PARENT_WIDTH, CHILD_HEIGHT); verify(stripe2).setBounds(0, CHILD_HEIGHT, PARENT_WIDTH, 2 * CHILD_HEIGHT); verify(stripe).setBounds(0, 2 * CHILD_HEIGHT, PARENT_WIDTH, 3 * CHILD_HEIGHT); verify(stripe, times(2)).draw(c); verify(stripe2).draw(c); } @Test public void testOnDrawNoPosition() throws Exception { when(rv.getChildAdapterPosition(child0)).thenReturn(RecyclerView.NO_POSITION); StripeDecoration decoration = new StripeDecoration(stripe); decoration.onDraw(c, rv, null); verify(stripe).setBounds(0, CHILD_HEIGHT, PARENT_WIDTH, 2 * CHILD_HEIGHT); verify(stripe).draw(c); } @Test public void testOnDrawNext() throws Exception { when(rv.getChildAdapterPosition(child0)).thenReturn(1); StripeDecoration decoration = new StripeDecoration(stripe); decoration.onDraw(c, rv, null); verify(stripe).setBounds(0, 0, PARENT_WIDTH, CHILD_HEIGHT); verify(stripe).setBounds(0, 2 * CHILD_HEIGHT, PARENT_WIDTH, 3 * CHILD_HEIGHT); verify(stripe, times(2)).draw(c); } @Test public void testOnDrawNextHoriz() throws Exception { when(horiz.getChildAdapterPosition(childv0)).thenReturn(1); StripeDecoration decoration = new StripeDecoration(stripe); decoration.setOrientation(StripeDecoration.HORIZONTAL); decoration.onDraw(c, horiz, null); verify(stripe).setBounds(0, 0, CHILD_WIDTH, PARENT_HEIGHT); verify(stripe).setBounds(2 * CHILD_WIDTH, 0, 3 * CHILD_WIDTH, PARENT_HEIGHT); verify(stripe, times(2)).draw(c); } @Test(expected = IllegalArgumentException.class) public void testBadOrientation() { StripeDecoration decoration = new StripeDecoration(stripe); decoration.setOrientation(42); } }
{ "content_hash": "212bd197cb83cbae247970e22c6ad3b9", "timestamp": "", "source": "github", "line_count": 151, "max_line_length": 86, "avg_line_length": 31.90728476821192, "alnum_prop": 0.6716479867164799, "repo_name": "sanbeg/StripeDecoration", "id": "f768de5dc7228ca3e31f7d6cc80804261c2924b1", "size": "4818", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "stripe-decoration/src/test/java/com/sanbeg/sdk/stripedecoration/StripeDecorationTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "15233" } ], "symlink_target": "" }
#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_ #define TENSORFLOW_LITE_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_ extern int PredictGesture(float *output); #endif /* TENSORFLOW_LITE_MICRO_EXAMPLES_MAGIC_WAND_GESTURE_PREDICTOR_H_ */
{ "content_hash": "270a58503a0cd3b2644d290aab1a6c3e", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 75, "avg_line_length": 33, "alnum_prop": 0.7954545454545454, "repo_name": "finikorg/zephyr", "id": "660e72d58765b5a9b26b10b15f5206ec45231188", "size": "891", "binary": false, "copies": "4", "ref": "refs/heads/main", "path": "samples/modules/tflite-micro/magic_wand/src/gesture_predictor.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "445128" }, { "name": "Batchfile", "bytes": "110" }, { "name": "C", "bytes": "44321001" }, { "name": "C++", "bytes": "29292" }, { "name": "CMake", "bytes": "1369918" }, { "name": "Cadence", "bytes": "1501" }, { "name": "EmberScript", "bytes": "997" }, { "name": "Forth", "bytes": "1648" }, { "name": "GDB", "bytes": "1285" }, { "name": "Haskell", "bytes": "722" }, { "name": "JetBrains MPS", "bytes": "3152" }, { "name": "PLSQL", "bytes": "281" }, { "name": "Perl", "bytes": "215338" }, { "name": "Python", "bytes": "2251570" }, { "name": "Shell", "bytes": "171294" }, { "name": "SmPL", "bytes": "36840" }, { "name": "Smalltalk", "bytes": "1885" }, { "name": "SourcePawn", "bytes": "14890" }, { "name": "Tcl", "bytes": "5838" }, { "name": "VBA", "bytes": "294" }, { "name": "Verilog", "bytes": "6394" } ], "symlink_target": "" }
define(["Tone/core/Tone", "Tone/signal/Add", "Tone/signal/Subtract", "Tone/signal/Multiply", "Tone/signal/IfThenElse", "Tone/signal/OR", "Tone/signal/AND", "Tone/signal/NOT", "Tone/signal/GreaterThan", "Tone/signal/LessThan", "Tone/signal/Equal", "Tone/signal/EqualZero", "Tone/signal/GreaterThanZero", "Tone/signal/Abs", "Tone/signal/Negate", "Tone/signal/Max", "Tone/signal/Min", "Tone/signal/Modulo", "Tone/signal/Pow", "Tone/signal/AudioToGain"], function(Tone){ "use strict"; Tone.Expr = function(){ var expr = this._replacements(Array.prototype.slice.call(arguments)); var inputCount = this._parseInputs(expr); /** * hold onto all of the nodes for disposal * @type {Array} * @private */ this._nodes = []; /** * The inputs. The length is determined by the expression. * @type {Array} */ this.input = new Array(inputCount); //create a gain for each input for (var i = 0; i < inputCount; i++){ this.input[i] = this.context.createGain(); } //parse the syntax tree var tree = this._parseTree(expr); //evaluate the results var result; try { result = this._eval(tree); } catch (e){ this._disposeNodes(); throw new Error("Could evaluate expression: "+expr); } /** * The output node is the result of the expression * @type {Tone} */ this.output = result; }; Tone.extend(Tone.Expr, Tone.SignalBase); //some helpers to cut down the amount of code function applyBinary(Constructor, args, self){ var op = new Constructor(); self._eval(args[0]).connect(op, 0, 0); self._eval(args[1]).connect(op, 0, 1); return op; } function applyUnary(Constructor, args, self){ var op = new Constructor(); self._eval(args[0]).connect(op, 0, 0); return op; } function getNumber(arg){ return arg ? parseFloat(arg) : undefined; } function literalNumber(arg){ return arg && arg.args ? parseFloat(arg.args) : undefined; } /* * the Expressions that Tone.Expr can parse. * * each expression belongs to a group and contains a regexp * for selecting the operator as well as that operators method * * @type {Object} * @private */ Tone.Expr._Expressions = { //values "value" : { "signal" : { regexp : /^\d+\.\d+|^\d+/, method : function(arg){ var sig = new Tone.Signal(getNumber(arg)); return sig; } }, "input" : { regexp : /^\$\d/, method : function(arg, self){ return self.input[getNumber(arg.substr(1))]; } } }, //syntactic glue "glue" : { "(" : { regexp : /^\(/, }, ")" : { regexp : /^\)/, }, "," : { regexp : /^,/, } }, //functions "func" : { "abs" : { regexp : /^abs/, method : applyUnary.bind(this, Tone.Abs) }, "min" : { regexp : /^min/, method : applyBinary.bind(this, Tone.Min) }, "max" : { regexp : /^max/, method : applyBinary.bind(this, Tone.Max) }, "if" : { regexp : /^if/, method : function(args, self){ var op = new Tone.IfThenElse(); self._eval(args[0]).connect(op.if); self._eval(args[1]).connect(op.then); self._eval(args[2]).connect(op.else); return op; } }, "gt0" : { regexp : /^gt0/, method : applyUnary.bind(this, Tone.GreaterThanZero) }, "eq0" : { regexp : /^eq0/, method : applyUnary.bind(this, Tone.EqualZero) }, "mod" : { regexp : /^mod/, method : function(args, self){ var modulus = literalNumber(args[1]); var op = new Tone.Modulo(modulus); self._eval(args[0]).connect(op); return op; } }, "pow" : { regexp : /^pow/, method : function(args, self){ var exp = literalNumber(args[1]); var op = new Tone.Pow(exp); self._eval(args[0]).connect(op); return op; } }, "a2g" : { regexp : /^a2g/, method : function(args, self){ var op = new Tone.AudioToGain(); self._eval(args[0]).connect(op); return op; } }, }, //binary expressions "binary" : { "+" : { regexp : /^\+/, precedence : 1, method : applyBinary.bind(this, Tone.Add) }, "-" : { regexp : /^\-/, precedence : 1, method : function(args, self){ //both unary and binary op if (args.length === 1){ return applyUnary(Tone.Negate, args, self); } else { return applyBinary(Tone.Subtract, args, self); } } }, "*" : { regexp : /^\*/, precedence : 0, method : applyBinary.bind(this, Tone.Multiply) }, ">" : { regexp : /^\>/, precedence : 2, method : applyBinary.bind(this, Tone.GreaterThan) }, "<" : { regexp : /^</, precedence : 2, method : applyBinary.bind(this, Tone.LessThan) }, "==" : { regexp : /^==/, precedence : 3, method : applyBinary.bind(this, Tone.Equal) }, "&&" : { regexp : /^&&/, precedence : 4, method : applyBinary.bind(this, Tone.AND) }, "||" : { regexp : /^\|\|/, precedence : 5, method : applyBinary.bind(this, Tone.OR) }, }, //unary expressions "unary" : { "-" : { regexp : /^\-/, method : applyUnary.bind(this, Tone.Negate) }, "!" : { regexp : /^\!/, method : applyUnary.bind(this, Tone.NOT) }, }, }; /** * @param {string} expr the expression string * @return {number} the input count * @private */ Tone.Expr.prototype._parseInputs = function(expr){ var inputArray = expr.match(/\$\d/g); var inputMax = 0; if (inputArray !== null){ for (var i = 0; i < inputArray.length; i++){ var inputNum = parseInt(inputArray[i].substr(1)) + 1; inputMax = Math.max(inputMax, inputNum); } } return inputMax; }; /** * @param {Array} args an array of arguments * @return {string} the results of the replacements being replaced * @private */ Tone.Expr.prototype._replacements = function(args){ var expr = args.shift(); for (var i = 0; i < args.length; i++){ expr = expr.replace(/\%/i, args[i]); } return expr; }; /** * tokenize the expression based on the Expressions object * @param {string} expr * @return {Object} returns two methods on the tokenized list, next and peek * @private */ Tone.Expr.prototype._tokenize = function(expr){ var position = -1; var tokens = []; while(expr.length > 0){ expr = expr.trim(); var token = getNextToken(expr); tokens.push(token); expr = expr.substr(token.value.length); } function getNextToken(expr){ for (var type in Tone.Expr._Expressions){ var group = Tone.Expr._Expressions[type]; for (var opName in group){ var op = group[opName]; var reg = op.regexp; var match = expr.match(reg); if (match !== null){ return { type : type, value : match[0], method : op.method }; } } } throw new SyntaxError("Unexpected token "+expr); } return { next : function(){ return tokens[++position]; }, peek : function(){ return tokens[position + 1]; } }; }; /** * recursively parse the string expression into a syntax tree * * @param {string} expr * @return {Object} * @private */ Tone.Expr.prototype._parseTree = function(expr){ var lexer = this._tokenize(expr); var isUndef = this.isUndef.bind(this); function matchSyntax(token, syn) { return !isUndef(token) && token.type === "glue" && token.value === syn; } function matchGroup(token, groupName, prec) { var ret = false; var group = Tone.Expr._Expressions[groupName]; if (!isUndef(token)){ for (var opName in group){ var op = group[opName]; if (op.regexp.test(token.value)){ if (!isUndef(prec)){ if(op.precedence === prec){ return true; } } else { return true; } } } } return ret; } function parseExpression(precedence) { if (isUndef(precedence)){ precedence = 5; } var expr; if (precedence < 0){ expr = parseUnary(); } else { expr = parseExpression(precedence-1); } var token = lexer.peek(); while (matchGroup(token, "binary", precedence)) { token = lexer.next(); expr = { operator: token.value, method : token.method, args : [ expr, parseExpression(precedence) ] }; token = lexer.peek(); } return expr; } function parseUnary() { var token, expr; token = lexer.peek(); if (matchGroup(token, "unary")) { token = lexer.next(); expr = parseUnary(); return { operator: token.value, method : token.method, args : [expr] }; } return parsePrimary(); } function parsePrimary() { var token, expr; token = lexer.peek(); if (isUndef(token)) { throw new SyntaxError("Unexpected termination of expression"); } if (token.type === "func") { token = lexer.next(); return parseFunctionCall(token); } if (token.type === "value") { token = lexer.next(); return { method : token.method, args : token.value }; } if (matchSyntax(token, "(")) { lexer.next(); expr = parseExpression(); token = lexer.next(); if (!matchSyntax(token, ")")) { throw new SyntaxError("Expected )"); } return expr; } throw new SyntaxError("Parse error, cannot process token " + token.value); } function parseFunctionCall(func) { var token, args = []; token = lexer.next(); if (!matchSyntax(token, "(")) { throw new SyntaxError("Expected ( in a function call \"" + func.value + "\""); } token = lexer.peek(); if (!matchSyntax(token, ")")) { args = parseArgumentList(); } token = lexer.next(); if (!matchSyntax(token, ")")) { throw new SyntaxError("Expected ) in a function call \"" + func.value + "\""); } return { method : func.method, args : args, name : name }; } function parseArgumentList() { var token, expr, args = []; while (true) { expr = parseExpression(); if (isUndef(expr)) { // TODO maybe throw exception? break; } args.push(expr); token = lexer.peek(); if (!matchSyntax(token, ",")) { break; } lexer.next(); } return args; } return parseExpression(); }; /** * recursively evaluate the expression tree * @param {Object} tree * @return {AudioNode} the resulting audio node from the expression * @private */ Tone.Expr.prototype._eval = function(tree){ if (!this.isUndef(tree)){ var node = tree.method(tree.args, this); this._nodes.push(node); return node; } }; /** * dispose all the nodes * @private */ Tone.Expr.prototype._disposeNodes = function(){ for (var i = 0; i < this._nodes.length; i++){ var node = this._nodes[i]; if (this.isFunction(node.dispose)) { node.dispose(); } else if (this.isFunction(node.disconnect)) { node.disconnect(); } node = null; this._nodes[i] = null; } this._nodes = null; }; /** * clean up */ Tone.Expr.prototype.dispose = function(){ Tone.prototype.dispose.call(this); this._disposeNodes(); }; return Tone.Expr; });
{ "content_hash": "5573867d342c843f3d4d36b8d91e69c9", "timestamp": "", "source": "github", "line_count": 499, "max_line_length": 98, "avg_line_length": 22.4188376753507, "alnum_prop": 0.5694109233932243, "repo_name": "zeakd/CTP_project", "id": "d9323eb3e044c6e2b89ac1ed543347276e0c5448", "size": "11605", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "src/assets/bower_components/tone/Tone/signal/Expr.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7944" }, { "name": "HTML", "bytes": "16223" }, { "name": "JavaScript", "bytes": "62373" } ], "symlink_target": "" }
<?php class KecamatanTest extends CDbTestCase { public $fixtures=array( 'kecamatans'=>'Kecamatan', ); public function testCreate() { } }
{ "content_hash": "09b590194f39bcc44c972ba35f7b0cd9", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 11.23076923076923, "alnum_prop": 0.6917808219178082, "repo_name": "ata/kkn", "id": "78c128c8f0b52c94adcfd61efe89a6195548fe09", "size": "146", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "protected/tests/unit/KecamatanTest.php", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "188073" }, { "name": "PHP", "bytes": "10026459" }, { "name": "Perl", "bytes": "35924" }, { "name": "Shell", "bytes": "6149" } ], "symlink_target": "" }
/* Copyright (c) 2006, Sun Microsystems, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Sun Microsystems nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE */ // conveniences extern int Indent; void printIndent(); extern oop catchThisOne; // utilities extern "C" void copy_oops_up(oop* from, oop* to, int count); extern "C" void set_oops(oop* to, int count, oop value = NULL); char* copy_string(char* s); char* copy_string(char* s, smi len); char* copy_c_heap_string(char* s); // copying oops must be accompanied by record_multistores for remembered set void copy_oops_down(oop* from, oop* to, int count); inline void copy_oops(oop* from, oop* to, int count) { copy_oops_up(from, to, count); } inline void copy_oops_overlapping(oop* from, oop* to, int count) { if (from < to) copy_oops_down(from + count, to + count, count); else if (from > to) copy_oops_up(from, to, count); } inline void copy_words(int* from, int* to, int count) { copy_oops((oop*) from, (oop*) to, count); } inline void set_words(int* from, int count, int value = 0) { set_oops((oop*) from, count, (oop) value); } inline int min(int a, int b) { return a < b ? a : b; } inline int max(int a, int b) { return a > b ? a : b; } inline int min(int a, int b, int c) { return a < b ? min(a, c) : min(b, c); } inline int max(int a, int b, int c) { return a > b ? max(a, c) : max(b, c); } #define between(p, low, high) ((void*)(p) >= (void*)(low) && (void*)(p) < (void*)(high)) inline void *align(void* p, int alignment) { int number = (int) p; int adjust = alignment - (number%alignment) % alignment; return (void*) (number + adjust); } // some useful constants const int K = 1024; const int M = K * K; const int oopSize = sizeof(oop); const int floatSize = sizeof(double); inline int byte_size(void* from, void* to) { return (char*) to - (char*) from; } // If your compiler or lint supports a pragma informing it that a // variable is unused, redefine these appropriately /* #ifdef __GNUC__ // GNU 2.6.2 can't disambiguate the overloaded Unused function #define Unused(x) #else //#endif */ inline void Unused(int x) { x, 0; } inline void Unused(void *x) { x, 0; }
{ "content_hash": "d48d84374fa6fa56fc21246418af502f", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 130, "avg_line_length": 37.31578947368421, "alnum_prop": 0.7091678420310297, "repo_name": "talksmall/Strongtalk", "id": "36ba61bbcccce8cf3444803957edee882cea817d", "size": "3618", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vm/memory/util.hpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "78417" }, { "name": "C", "bytes": "41379" }, { "name": "C++", "bytes": "4213901" }, { "name": "Perl", "bytes": "1053" } ], "symlink_target": "" }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.tabmodel; import android.os.SystemClock; import org.chromium.base.annotations.CalledByNative; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.tab.Tab; import org.chromium.content_public.browser.WebContents; /** * Bridges between the C++ and Java {@link TabModel} interfaces. */ public abstract class TabModelJniBridge implements TabModel { private final boolean mIsIncognito; // TODO(dtrainor, simonb): Make these non-static so we don't break if we have multiple instances // of chrome running. Also investigate how this affects document mode. private static long sTabSwitchStartTime; private static TabSelectionType sTabSelectionType; private static boolean sTabSwitchLatencyMetricRequired; private static boolean sPerceivedTabSwitchLatencyMetricLogged; /** Native TabModelJniBridge pointer, which will be set by {@link #initializeNative()}. */ private long mNativeTabModelJniBridge; public TabModelJniBridge(boolean isIncognito) { mIsIncognito = isIncognito; } /** Initializes the native-side counterpart to this class. */ protected void initializeNative() { assert mNativeTabModelJniBridge == 0; mNativeTabModelJniBridge = nativeInit(mIsIncognito); } /** @return Whether the native-side pointer has been initialized. */ public boolean isNativeInitialized() { return mNativeTabModelJniBridge != 0; } @Override public void destroy() { if (isNativeInitialized()) { // This will invalidate all other native references to this object in child classes. nativeDestroy(mNativeTabModelJniBridge); mNativeTabModelJniBridge = 0; } } @Override public boolean isIncognito() { return mIsIncognito; } @Override public Profile getProfile() { return nativeGetProfileAndroid(mNativeTabModelJniBridge); } /** Broadcast a native-side notification that all tabs are now loaded from storage. */ public void broadcastSessionRestoreComplete() { assert isNativeInitialized(); nativeBroadcastSessionRestoreComplete(mNativeTabModelJniBridge); } /** * Called by subclasses when a Tab is added to the TabModel. * @param tab Tab being added to the model. */ protected void tabAddedToModel(Tab tab) { if (isNativeInitialized()) nativeTabAddedToModel(mNativeTabModelJniBridge, tab); } /** * Sets the TabModel's index. * @param index Index of the Tab to select. */ @CalledByNative private void setIndex(int index) { TabModelUtils.setIndex(this, index); } @Override @CalledByNative public abstract Tab getTabAt(int index); /** * Closes the Tab at a particular index. * @param index Index of the tab to close. * @return Whether the was successfully closed. */ @CalledByNative protected abstract boolean closeTabAt(int index); /** * Creates a Tab with the given WebContents. * @param incognito Whether or not the tab is incognito. * @param webContents A {@link WebContents} object. * @param parentId ID of the parent. * @return Whether or not the Tab was successfully created. */ @CalledByNative protected abstract boolean createTabWithWebContents( boolean incognito, WebContents webContents, int parentId); /** * Creates a Tab with the given WebContents for DevTools. * @param url URL to show. */ @CalledByNative protected abstract Tab createNewTabForDevTools(String url); @Override @CalledByNative public abstract int getCount(); @Override @CalledByNative public abstract int index(); /** @return Whether or not a sync session is currently being restored. */ @CalledByNative protected abstract boolean isSessionRestoreInProgress(); /** * Register the start of tab switch latency timing. Called when setIndex() indicates a tab * switch event. * @param type The type of action that triggered the tab selection. */ public static void startTabSwitchLatencyTiming(final TabSelectionType type) { sTabSwitchStartTime = SystemClock.uptimeMillis(); sTabSelectionType = type; sTabSwitchLatencyMetricRequired = false; sPerceivedTabSwitchLatencyMetricLogged = false; } /** * Should be called a visible {@link ChromeTab} gets a frame to render in the browser process. * If we don't get this call, we ignore requests to * {@link #flushActualTabSwitchLatencyMetric()}. */ public static void setActualTabSwitchLatencyMetricRequired() { if (sTabSwitchStartTime <= 0) return; sTabSwitchLatencyMetricRequired = true; } /** * Logs the perceived tab switching latency metric. This will automatically be logged if * the actual metric is set and flushed. */ public static void logPerceivedTabSwitchLatencyMetric() { if (sTabSwitchStartTime <= 0 || sPerceivedTabSwitchLatencyMetricLogged) return; flushTabSwitchLatencyMetric(true); sPerceivedTabSwitchLatencyMetricLogged = true; } /** * Flush the latency metric if called after the indication that a frame is ready. */ public static void flushActualTabSwitchLatencyMetric() { if (sTabSwitchStartTime <= 0 || !sTabSwitchLatencyMetricRequired) return; logPerceivedTabSwitchLatencyMetric(); flushTabSwitchLatencyMetric(false); sTabSwitchStartTime = 0; sTabSwitchLatencyMetricRequired = false; } private static void flushTabSwitchLatencyMetric(boolean perceived) { if (sTabSwitchStartTime <= 0) return; final long ms = SystemClock.uptimeMillis() - sTabSwitchStartTime; switch (sTabSelectionType) { case FROM_CLOSE: nativeLogFromCloseMetric(ms, perceived); break; case FROM_EXIT: nativeLogFromExitMetric(ms, perceived); break; case FROM_NEW: nativeLogFromNewMetric(ms, perceived); break; case FROM_USER: nativeLogFromUserMetric(ms, perceived); break; } } private native long nativeInit(boolean isIncognito); private native Profile nativeGetProfileAndroid(long nativeTabModelJniBridge); private native void nativeBroadcastSessionRestoreComplete(long nativeTabModelJniBridge); private native void nativeDestroy(long nativeTabModelJniBridge); private native void nativeTabAddedToModel(long nativeTabModelJniBridge, Tab tab); // Native methods for tab switch latency metrics. private static native void nativeLogFromCloseMetric(long ms, boolean perceived); private static native void nativeLogFromExitMetric(long ms, boolean perceived); private static native void nativeLogFromNewMetric(long ms, boolean perceived); private static native void nativeLogFromUserMetric(long ms, boolean perceived); }
{ "content_hash": "6842eabf2bf947a23eed26e90a746b4e", "timestamp": "", "source": "github", "line_count": 203, "max_line_length": 100, "avg_line_length": 35.995073891625616, "alnum_prop": 0.6971397290269604, "repo_name": "vadimtk/chrome4sdp", "id": "e1288142ce1141615cc4a8bbda4bcddcb4471277", "size": "7307", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "chrome/android/java/src/org/chromium/chrome/browser/tabmodel/TabModelJniBridge.java", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
import logging import goaway.globalvars as globalvars logger = logging.getLogger(__name__) DATA_STORE_HANDLE_KIND_ATTR = "__store" NAME_ATTR = "__name" class ObjectHandle(object): """ Represents a shared object in a datstore. Instances of this class are returned by object handle constructors. Applications should not directly create these. Example: accumulators = goaway.StrictCentralized() accumulators.flowers = 0 accumulators.trees = 10 """ def __init__(self, data_store_kind, name): """ Args: data_store_kind: Name of the type of datastore to use (from globalvars) name: Name of the object, to identify its store. """ self.__dict__[DATA_STORE_HANDLE_KIND_ATTR] = data_store_kind self.__dict__[NAME_ATTR] = name def __getattr__(self, field): """ Hook when an attribute is fetched. """ store = globalvars.get_data_store(getattr(self, DATA_STORE_HANDLE_KIND_ATTR)) object_name = getattr(self, NAME_ATTR) value = store.get(object_name, field) return value def __setattr__(self, field, value): """ Hook when an attribute is set. """ store = globalvars.get_data_store(getattr(self, DATA_STORE_HANDLE_KIND_ATTR)) object_name = getattr(self, NAME_ATTR) store.set(object_name, field, value)
{ "content_hash": "31a95cfc220e43fd59d061f2a4cd6ab5", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 85, "avg_line_length": 30.934782608695652, "alnum_prop": 0.617709065354884, "repo_name": "anpere/goaway", "id": "820f6582610d4d6536ed231e35188f8098b4410b", "size": "1423", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "goaway/objecthandle.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "20495" } ], "symlink_target": "" }